xref: /aosp_15_r20/frameworks/av/media/libstagefright/MediaCodec.cpp (revision ec779b8e0859a360c3d303172224686826e6e0e1)
1 /*
2  * Copyright 2012, The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #include "hidl/HidlSupport.h"
19 #define LOG_TAG "MediaCodec"
20 #include <utils/Log.h>
21 
22 #include <dlfcn.h>
23 #include <inttypes.h>
24 #include <future>
25 #include <random>
26 #include <set>
27 #include <string>
28 
29 #include <C2Buffer.h>
30 
31 #include "include/SoftwareRenderer.h"
32 
33 #include <android_media_codec.h>
34 
35 #include <android/api-level.h>
36 #include <android/content/pm/IPackageManagerNative.h>
37 #include <android/hardware/cas/native/1.0/IDescrambler.h>
38 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
39 
40 #include <aidl/android/media/BnResourceManagerClient.h>
41 #include <aidl/android/media/IResourceManagerService.h>
42 #include <android/binder_ibinder.h>
43 #include <android/binder_manager.h>
44 #include <android/dlext.h>
45 #include <android-base/stringprintf.h>
46 #include <binder/IMemory.h>
47 #include <binder/IServiceManager.h>
48 #include <binder/MemoryDealer.h>
49 #include <cutils/properties.h>
50 #include <gui/BufferQueue.h>
51 #include <gui/Surface.h>
52 #include <hidlmemory/FrameworkUtils.h>
53 #include <mediadrm/ICrypto.h>
54 #include <media/IOMX.h>
55 #include <media/MediaCodecBuffer.h>
56 #include <media/MediaCodecInfo.h>
57 #include <media/MediaMetricsItem.h>
58 #include <media/MediaResource.h>
59 #include <media/NdkMediaErrorPriv.h>
60 #include <media/NdkMediaFormat.h>
61 #include <media/NdkMediaFormatPriv.h>
62 #include <media/formatshaper/FormatShaper.h>
63 #include <media/stagefright/foundation/ABuffer.h>
64 #include <media/stagefright/foundation/ADebug.h>
65 #include <media/stagefright/foundation/AMessage.h>
66 #include <media/stagefright/foundation/AString.h>
67 #include <media/stagefright/foundation/AUtils.h>
68 #include <media/stagefright/foundation/avc_utils.h>
69 #include <media/stagefright/foundation/hexdump.h>
70 #include <media/stagefright/ACodec.h>
71 #include <media/stagefright/BatteryChecker.h>
72 #include <media/stagefright/BufferProducerWrapper.h>
73 #include <media/stagefright/CCodec.h>
74 #include <media/stagefright/CryptoAsync.h>
75 #include <media/stagefright/MediaCodec.h>
76 #include <media/stagefright/MediaCodecConstants.h>
77 #include <media/stagefright/MediaCodecList.h>
78 #include <media/stagefright/MediaDefs.h>
79 #include <media/stagefright/MediaErrors.h>
80 #include <media/stagefright/OMXClient.h>
81 #include <media/stagefright/PersistentSurface.h>
82 #include <media/stagefright/RenderedFrameInfo.h>
83 #include <media/stagefright/SurfaceUtils.h>
84 #include <nativeloader/dlext_namespaces.h>
85 #include <private/android_filesystem_config.h>
86 #include <server_configurable_flags/get_flags.h>
87 #include <utils/Singleton.h>
88 
89 namespace android {
90 
91 using Status = ::ndk::ScopedAStatus;
92 using aidl::android::media::BnResourceManagerClient;
93 using aidl::android::media::IResourceManagerClient;
94 using aidl::android::media::IResourceManagerService;
95 using aidl::android::media::ClientInfoParcel;
96 using server_configurable_flags::GetServerConfigurableFlag;
97 using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
98 using JudderEvent = VideoRenderQualityTracker::JudderEvent;
99 
100 // key for media statistics
101 static const char *kCodecKeyName = "codec";
102 // attrs for media statistics
103 // NB: these are matched with public Java API constants defined
104 // in frameworks/base/media/java/android/media/MediaCodec.java
105 // These must be kept synchronized with the constants there.
106 static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
107 static const char *kCodecCodec = "android.media.mediacodec.codec";  /* e.g. OMX.google.aac.decoder */
108 static const char *kCodecId = "android.media.mediacodec.id";
109 static const char *kCodecMime = "android.media.mediacodec.mime";    /* e.g. audio/mime */
110 static const char *kCodecMode = "android.media.mediacodec.mode";    /* audio, video */
111 static const char *kCodecModeVideo = "video";            /* values returned for kCodecMode */
112 static const char *kCodecModeAudio = "audio";
113 static const char *kCodecModeImage = "image";
114 static const char *kCodecModeUnknown = "unknown";
115 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
116 static const char *kCodecHardware = "android.media.mediacodec.hardware"; /* 0,1 */
117 static const char *kCodecSecure = "android.media.mediacodec.secure";   /* 0, 1 */
118 static const char *kCodecTunneled = "android.media.mediacodec.tunneled"; /* 0,1 */
119 static const char *kCodecWidth = "android.media.mediacodec.width";     /* 0..n */
120 static const char *kCodecHeight = "android.media.mediacodec.height";   /* 0..n */
121 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees";  /* 0/90/180/270 */
122 static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
123 static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
124 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
125 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
126 static const char *kCodecPriority = "android.media.mediacodec.priority";
127 
128 // Min/Max QP before shaping
129 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
130 static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
131 static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
132 static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
133 static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
134 static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
135 
136 // Min/Max QP after shaping
137 static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
138 static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
139 static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
140 static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
141 static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
142 static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
143 
144 // NB: These are not yet exposed as public Java API constants.
145 static const char *kCodecCrypto = "android.media.mediacodec.crypto";   /* 0,1 */
146 static const char *kCodecProfile = "android.media.mediacodec.profile";  /* 0..n */
147 static const char *kCodecLevel = "android.media.mediacodec.level";  /* 0..n */
148 static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode";  /* CQ/VBR/CBR */
149 static const char *kCodecBitrate = "android.media.mediacodec.bitrate";  /* 0..n */
150 static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate";  /* 0..n */
151 static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth";  /* 0..n */
152 static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight";  /* 0..n */
153 static const char *kCodecError = "android.media.mediacodec.errcode";
154 static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs";   /* 0..n ms*/
155 static const char *kCodecErrorState = "android.media.mediacodec.errstate";
156 static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max";   /* in us */
157 static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min";   /* in us */
158 static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg";   /* in us */
159 static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
160 static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
161 static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
162 static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
163 static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
164 static const char *kCodecComponentColorFormat = "android.media.mediacodec.component-color-format";
165 
166 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on";  /* 0..n */
167 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off";  /* 0..n */
168 static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame";  /* 0..n */
169 static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
170 static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
171 static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
172 static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
173 static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
174 static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
175 static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
176 // HDR metrics
177 static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
178 static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
179 static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
180 static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
181 static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
182 static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
183 static const char *kCodecHdrStaticInfo = "android.media.mediacodec.hdr-static-info";
184 static const char *kCodecHdr10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
185 static const char *kCodecHdrFormat = "android.media.mediacodec.hdr-format";
186 // array/sync/async/block modes
187 static const char *kCodecArrayMode = "android.media.mediacodec.array-mode";
188 static const char *kCodecOperationMode = "android.media.mediacodec.operation-mode";
189 static const char *kCodecOutputSurface = "android.media.mediacodec.output-surface";
190 // max size configured by the app
191 static const char *kCodecAppMaxInputSize = "android.media.mediacodec.app-max-input-size";
192 // max size actually used
193 static const char *kCodecUsedMaxInputSize = "android.media.mediacodec.used-max-input-size";
194 // max size suggested by the codec
195 static const char *kCodecCodecMaxInputSize = "android.media.mediacodec.codec-max-input-size";
196 static const char *kCodecFlushCount = "android.media.mediacodec.flush-count";
197 static const char *kCodecSetSurfaceCount = "android.media.mediacodec.set-surface-count";
198 static const char *kCodecResolutionChangeCount = "android.media.mediacodec.resolution-change-count";
199 
200 // the kCodecRecent* fields appear only in getMetrics() results
201 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max";      /* in us */
202 static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min";      /* in us */
203 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg";      /* in us */
204 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
205 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist";    /* in us */
206 
207 /* -1: shaper disabled
208    >=0: number of fields changed */
209 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
210 
211 // Render metrics
212 static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
213 static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
214 static const char *kCodecLastRenderTimeUs = "android.media.mediacodec.last-render-time-us";
215 static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
216 static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
217 static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
218 static const char *kCodecFramesSkipped = "android.media.mediacodec.frames-skipped";
219 static const char *kCodecFramerateContent = "android.media.mediacodec.framerate-content";
220 static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate-desired";
221 static const char *kCodecFramerateActual = "android.media.mediacodec.framerate-actual";
222 // Freeze
223 static const char *kCodecFreezeCount = "android.media.mediacodec.freeze-count";
224 static const char *kCodecFreezeScore = "android.media.mediacodec.freeze-score";
225 static const char *kCodecFreezeRate = "android.media.mediacodec.freeze-rate";
226 static const char *kCodecFreezeDurationMsAvg = "android.media.mediacodec.freeze-duration-ms-avg";
227 static const char *kCodecFreezeDurationMsMax = "android.media.mediacodec.freeze-duration-ms-max";
228 static const char *kCodecFreezeDurationMsHistogram =
229         "android.media.mediacodec.freeze-duration-ms-histogram";
230 static const char *kCodecFreezeDurationMsHistogramBuckets =
231         "android.media.mediacodec.freeze-duration-ms-histogram-buckets";
232 static const char *kCodecFreezeDistanceMsAvg = "android.media.mediacodec.freeze-distance-ms-avg";
233 static const char *kCodecFreezeDistanceMsHistogram =
234         "android.media.mediacodec.freeze-distance-ms-histogram";
235 static const char *kCodecFreezeDistanceMsHistogramBuckets =
236         "android.media.mediacodec.freeze-distance-ms-histogram-buckets";
237 // Judder
238 static const char *kCodecJudderCount = "android.media.mediacodec.judder-count";
239 static const char *kCodecJudderScore = "android.media.mediacodec.judder-score";
240 static const char *kCodecJudderRate = "android.media.mediacodec.judder-rate";
241 static const char *kCodecJudderScoreAvg = "android.media.mediacodec.judder-score-avg";
242 static const char *kCodecJudderScoreMax = "android.media.mediacodec.judder-score-max";
243 static const char *kCodecJudderScoreHistogram = "android.media.mediacodec.judder-score-histogram";
244 static const char *kCodecJudderScoreHistogramBuckets =
245         "android.media.mediacodec.judder-score-histogram-buckets";
246 // Freeze event
247 static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
248 static const char *kFreezeEventKeyName = "videofreeze";
249 static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
250 static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
251 static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
252 static const char *kFreezeEventAvgDurationMs = "android.media.mediacodec.freeze.avg-duration-ms";
253 static const char *kFreezeEventAvgDistanceMs = "android.media.mediacodec.freeze.avg-distance-ms";
254 static const char *kFreezeEventDetailsDurationMs =
255         "android.media.mediacodec.freeze.details-duration-ms";
256 static const char *kFreezeEventDetailsDistanceMs =
257         "android.media.mediacodec.freeze.details-distance-ms";
258 // Judder event
259 static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
260 static const char *kJudderEventKeyName = "videojudder";
261 static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
262 static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
263 static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
264 static const char *kJudderEventAvgScore = "android.media.mediacodec.judder.avg-score";
265 static const char *kJudderEventAvgDistanceMs = "android.media.mediacodec.judder.avg-distance-ms";
266 static const char *kJudderEventDetailsActualDurationUs =
267         "android.media.mediacodec.judder.details-actual-duration-us";
268 static const char *kJudderEventDetailsContentDurationUs =
269         "android.media.mediacodec.judder.details-content-duration-us";
270 static const char *kJudderEventDetailsDistanceMs =
271         "android.media.mediacodec.judder.details-distance-ms";
272 
273 // XXX suppress until we get our representation right
274 static bool kEmitHistogram = false;
275 
276 typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
277 
278 // Multi access unit helpers
generateFlagsFromAccessUnitInfo(sp<AMessage> & msg,const sp<BufferInfosWrapper> & bufferInfos)279 static status_t generateFlagsFromAccessUnitInfo(
280         sp<AMessage> &msg, const sp<BufferInfosWrapper> &bufferInfos) {
281     msg->setInt64("timeUs", bufferInfos->value[0].mTimestamp);
282     msg->setInt32("flags", bufferInfos->value[0].mFlags);
283     // will prevent any access-unit info copy.
284     if (bufferInfos->value.size() > 1) {
285         uint32_t bufferFlags = 0;
286         uint32_t flagsInAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODEC_CONFIG;
287         uint32_t andFlags = flagsInAllAU;
288         int infoIdx = 0;
289         bool foundEndOfStream = false;
290         for ( ; infoIdx < bufferInfos->value.size() && !foundEndOfStream; ++infoIdx) {
291             bufferFlags |= bufferInfos->value[infoIdx].mFlags;
292             andFlags &= bufferInfos->value[infoIdx].mFlags;
293             if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
294                 foundEndOfStream = true;
295             }
296         }
297         bufferFlags = bufferFlags & (andFlags | (~flagsInAllAU));
298         if (infoIdx != bufferInfos->value.size()) {
299             ALOGE("Error: incorrect access-units");
300             return -EINVAL;
301         }
302         msg->setInt32("flags", bufferFlags);
303     }
304     return OK;
305 }
306 
getId(IResourceManagerClient const * client)307 static int64_t getId(IResourceManagerClient const * client) {
308     return (int64_t) client;
309 }
310 
getId(const std::shared_ptr<IResourceManagerClient> & client)311 static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
312     return getId(client.get());
313 }
314 
isResourceError(status_t err)315 static bool isResourceError(status_t err) {
316     return (err == NO_MEMORY);
317 }
318 
areRenderMetricsEnabled()319 static bool areRenderMetricsEnabled() {
320     std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
321     return v == "true";
322 }
323 
324 static const int kMaxRetry = 2;
325 static const int kMaxReclaimWaitTimeInUs = 500000;  // 0.5s
326 static const int kNumBuffersAlign = 16;
327 
328 static const C2MemoryUsage kDefaultReadWriteUsage{
329     C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
330 
331 ////////////////////////////////////////////////////////////////////////////////
332 
333 /*
334  * Implementation of IResourceManagerClient interrface that facilitates
335  * MediaCodec reclaim for the ResourceManagerService.
336  */
337 struct ResourceManagerClient : public BnResourceManagerClient {
ResourceManagerClientandroid::ResourceManagerClient338     explicit ResourceManagerClient(MediaCodec* codec, int32_t pid, int32_t uid) :
339             mMediaCodec(codec), mPid(pid), mUid(uid) {}
340 
reclaimResourceandroid::ResourceManagerClient341     Status reclaimResource(bool* _aidl_return) override {
342         sp<MediaCodec> codec = mMediaCodec.promote();
343         if (codec == NULL) {
344             // Codec is already gone, so remove the resources as well
345             ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
346             std::shared_ptr<IResourceManagerService> service =
347                     IResourceManagerService::fromBinder(binder);
348             if (service == nullptr) {
349                 ALOGE("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
350                 *_aidl_return = false;
351                 return Status::fromStatus(STATUS_INVALID_OPERATION);
352             }
353             ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
354                                         .uid = static_cast<int32_t>(mUid),
355                                         .id = getId(this)};
356             service->removeClient(clientInfo);
357             *_aidl_return = true;
358             return Status::ok();
359         }
360         status_t err = codec->reclaim();
361         if (err == WOULD_BLOCK) {
362             ALOGD("Wait for the client to release codec.");
363             usleep(kMaxReclaimWaitTimeInUs);
364             ALOGD("Try to reclaim again.");
365             err = codec->reclaim(true /* force */);
366         }
367         if (err != OK) {
368             ALOGW("ResourceManagerClient failed to release codec with err %d", err);
369         }
370         *_aidl_return = (err == OK);
371         return Status::ok();
372     }
373 
getNameandroid::ResourceManagerClient374     Status getName(::std::string* _aidl_return) override {
375         _aidl_return->clear();
376         sp<MediaCodec> codec = mMediaCodec.promote();
377         if (codec == NULL) {
378             // codec is already gone.
379             return Status::ok();
380         }
381 
382         AString name;
383         if (codec->getName(&name) == OK) {
384             *_aidl_return = name.c_str();
385         }
386         return Status::ok();
387     }
388 
~ResourceManagerClientandroid::ResourceManagerClient389     virtual ~ResourceManagerClient() {}
390 
391 private:
392     wp<MediaCodec> mMediaCodec;
393     int32_t mPid;
394     int32_t mUid;
395 
396     DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
397 };
398 
399 /*
400  * Proxy for ResourceManagerService that communicates with the
401  * ResourceManagerService for MediaCodec
402  */
403 struct MediaCodec::ResourceManagerServiceProxy :
404     public std::enable_shared_from_this<ResourceManagerServiceProxy> {
405 
406     // BinderDiedContext defines the cookie that is passed as DeathRecipient.
407     // Since this can maintain more context than a raw pointer, we can
408     // validate the scope of ResourceManagerServiceProxy,
409     // before deferencing it upon the binder death.
410     struct BinderDiedContext {
411         std::weak_ptr<ResourceManagerServiceProxy> mRMServiceProxy;
412     };
413 
414     ResourceManagerServiceProxy(pid_t pid, uid_t uid,
415             const std::shared_ptr<IResourceManagerClient> &client);
416     ~ResourceManagerServiceProxy();
417     status_t init();
418     void addResource(const MediaResourceParcel &resource);
419     void removeResource(const MediaResourceParcel &resource);
420     void removeClient();
421     void markClientForPendingRemoval();
422     bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
423     void notifyClientCreated();
424     void notifyClientStarted(ClientConfigParcel& clientConfig);
425     void notifyClientStopped(ClientConfigParcel& clientConfig);
426     void notifyClientConfigChanged(ClientConfigParcel& clientConfig);
427 
setCodecNameandroid::MediaCodec::ResourceManagerServiceProxy428     inline void setCodecName(const char* name) {
429         mCodecName = name;
430     }
431 
setImportanceandroid::MediaCodec::ResourceManagerServiceProxy432     inline void setImportance(int importance) {
433         mImportance = importance;
434     }
435 
436 private:
437     // To get the binder interface to ResourceManagerService.
getServiceandroid::MediaCodec::ResourceManagerServiceProxy438     void getService() {
439         std::scoped_lock lock{mLock};
440         getService_l();
441     }
442 
443     std::shared_ptr<IResourceManagerService> getService_l();
444 
445     // To add/register all the resources currently added/registered with
446     // the ResourceManagerService.
447     // This function will be called right after the death of the Resource
448     // Manager to make sure that the newly started ResourceManagerService
449     // knows about the current resource usage.
450     void reRegisterAllResources_l();
451 
deinitandroid::MediaCodec::ResourceManagerServiceProxy452     void deinit() {
453         std::scoped_lock lock{mLock};
454         // Unregistering from DeathRecipient notification.
455         if (mService != nullptr) {
456             AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), mCookie);
457             mService = nullptr;
458         }
459     }
460 
461     // For binder death handling
462     static void BinderDiedCallback(void* cookie);
463     static void BinderUnlinkedCallback(void* cookie);
464 
binderDiedandroid::MediaCodec::ResourceManagerServiceProxy465     void binderDied() {
466         std::scoped_lock lock{mLock};
467         ALOGE("ResourceManagerService died.");
468         mService = nullptr;
469         mBinderDied = true;
470         // start an async operation that will reconnect with the RM and
471         // re-registers all the resources.
472         mGetServiceFuture = std::async(std::launch::async, [this] { getService(); });
473     }
474 
475     /**
476      * Get the ClientInfo to communicate with the ResourceManager.
477      *
478      * ClientInfo includes:
479      *   - {pid, uid} of the process
480      *   - identifier for the client
481      *   - name of the client/codec
482      *   - importance associated with the client
483      */
getClientInfoandroid::MediaCodec::ResourceManagerServiceProxy484     inline ClientInfoParcel getClientInfo() const {
485         ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
486                                     .uid = static_cast<int32_t>(mUid),
487                                     .id = getId(mClient),
488                                     .name = mCodecName,
489                                     .importance = mImportance};
490         return clientInfo;
491     }
492 
493 private:
494     std::mutex  mLock;
495     bool        mBinderDied = false;
496     pid_t       mPid;
497     uid_t       mUid;
498     int         mImportance = 0;
499     std::string mCodecName;
500     /**
501      * Reconnecting with the ResourceManagerService, after its binder interface dies,
502      * is done asynchronously. It will also make sure that, all the resources
503      * asssociated with this Proxy (MediaCodec) is added with the new instance
504      * of the ResourceManagerService to persist the state of resources.
505      * We must store the reference of the furture to guarantee real asynchronous operation.
506      */
507     std::future<void> mGetServiceFuture;
508     // To maintain the list of all the resources currently added/registered with
509     // the ResourceManagerService.
510     std::set<MediaResourceParcel> mMediaResourceParcel;
511     std::shared_ptr<IResourceManagerClient> mClient;
512     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
513     std::shared_ptr<IResourceManagerService> mService;
514     BinderDiedContext* mCookie;
515 };
516 
ResourceManagerServiceProxy(pid_t pid,uid_t uid,const std::shared_ptr<IResourceManagerClient> & client)517 MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
518         pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client) :
519     mPid(pid), mUid(uid), mClient(client),
520     mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
521             AIBinder_DeathRecipient_new(BinderDiedCallback))),
522     mCookie(nullptr) {
523     if (mUid == MediaCodec::kNoUid) {
524         mUid = AIBinder_getCallingUid();
525     }
526     if (mPid == MediaCodec::kNoPid) {
527         mPid = AIBinder_getCallingPid();
528     }
529     // Setting callback notification when DeathRecipient gets deleted.
530     AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), BinderUnlinkedCallback);
531 }
532 
~ResourceManagerServiceProxy()533 MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
534     deinit();
535 }
536 
init()537 status_t MediaCodec::ResourceManagerServiceProxy::init() {
538     std::scoped_lock lock{mLock};
539 
540     int callerPid = AIBinder_getCallingPid();
541     int callerUid = AIBinder_getCallingUid();
542 
543     if (mPid != callerPid || mUid != callerUid) {
544         // Media processes don't need special permissions to act on behalf of other processes.
545         if (callerUid != AID_MEDIA) {
546             char const * permission = "android.permission.MEDIA_RESOURCE_OVERRIDE_PID";
547             if (!checkCallingPermission(String16(permission))) {
548                 ALOGW("%s is required to override the caller's PID for media resource management.",
549                         permission);
550                 return PERMISSION_DENIED;
551             }
552         }
553     }
554 
555     mService = getService_l();
556     if (mService == nullptr) {
557         return DEAD_OBJECT;
558     }
559 
560     // Kill clients pending removal.
561     mService->reclaimResourcesFromClientsPendingRemoval(mPid);
562     return OK;
563 }
564 
getService_l()565 std::shared_ptr<IResourceManagerService> MediaCodec::ResourceManagerServiceProxy::getService_l() {
566     if (mService != nullptr) {
567         return mService;
568     }
569 
570     // Get binder interface to resource manager.
571     ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
572     mService = IResourceManagerService::fromBinder(binder);
573     if (mService == nullptr) {
574         ALOGE("Failed to get ResourceManagerService");
575         return mService;
576     }
577 
578     // Create the context that is passed as cookie to the binder death notification.
579     // The context gets deleted at BinderUnlinkedCallback.
580     mCookie = new BinderDiedContext{.mRMServiceProxy = weak_from_this()};
581     // Register for the callbacks by linking to death notification.
582     AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), mCookie);
583 
584     // If the RM was restarted, re-register all the resources.
585     if (mBinderDied) {
586         reRegisterAllResources_l();
587         mBinderDied = false;
588     }
589     return mService;
590 }
591 
reRegisterAllResources_l()592 void MediaCodec::ResourceManagerServiceProxy::reRegisterAllResources_l() {
593     if (mMediaResourceParcel.empty()) {
594         ALOGV("No resources to add");
595         return;
596     }
597 
598     if (mService == nullptr) {
599         ALOGW("Service isn't available");
600         return;
601     }
602 
603     std::vector<MediaResourceParcel> resources;
604     std::copy(mMediaResourceParcel.begin(), mMediaResourceParcel.end(),
605               std::back_inserter(resources));
606     mService->addResource(getClientInfo(), mClient, resources);
607 }
608 
BinderDiedCallback(void * cookie)609 void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
610     BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
611 
612     // Validate the context and check if the ResourceManagerServiceProxy object is still in scope.
613     if (context != nullptr) {
614         std::shared_ptr<ResourceManagerServiceProxy> thiz = context->mRMServiceProxy.lock();
615         if (thiz != nullptr) {
616             thiz->binderDied();
617         } else {
618             ALOGI("ResourceManagerServiceProxy is out of scope already");
619         }
620     }
621 }
622 
BinderUnlinkedCallback(void * cookie)623 void MediaCodec::ResourceManagerServiceProxy::BinderUnlinkedCallback(void* cookie) {
624     BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
625     // Since we don't need the context anymore, we are deleting it now.
626     delete context;
627 }
628 
addResource(const MediaResourceParcel & resource)629 void MediaCodec::ResourceManagerServiceProxy::addResource(
630         const MediaResourceParcel &resource) {
631     std::scoped_lock lock{mLock};
632     std::shared_ptr<IResourceManagerService> service = getService_l();
633     if (service == nullptr) {
634         ALOGW("Service isn't available");
635         return;
636     }
637     std::vector<MediaResourceParcel> resources;
638     resources.push_back(resource);
639     service->addResource(getClientInfo(), mClient, resources);
640     mMediaResourceParcel.emplace(resource);
641 }
642 
removeResource(const MediaResourceParcel & resource)643 void MediaCodec::ResourceManagerServiceProxy::removeResource(
644         const MediaResourceParcel &resource) {
645     std::scoped_lock lock{mLock};
646     std::shared_ptr<IResourceManagerService> service = getService_l();
647     if (service == nullptr) {
648         ALOGW("Service isn't available");
649         return;
650     }
651     std::vector<MediaResourceParcel> resources;
652     resources.push_back(resource);
653     service->removeResource(getClientInfo(), resources);
654     mMediaResourceParcel.erase(resource);
655 }
656 
removeClient()657 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
658     std::scoped_lock lock{mLock};
659     std::shared_ptr<IResourceManagerService> service = getService_l();
660     if (service == nullptr) {
661         ALOGW("Service isn't available");
662         return;
663     }
664     service->removeClient(getClientInfo());
665     mMediaResourceParcel.clear();
666 }
667 
markClientForPendingRemoval()668 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
669     std::scoped_lock lock{mLock};
670     std::shared_ptr<IResourceManagerService> service = getService_l();
671     if (service == nullptr) {
672         ALOGW("Service isn't available");
673         return;
674     }
675     service->markClientForPendingRemoval(getClientInfo());
676     mMediaResourceParcel.clear();
677 }
678 
reclaimResource(const std::vector<MediaResourceParcel> & resources)679 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
680         const std::vector<MediaResourceParcel> &resources) {
681     std::scoped_lock lock{mLock};
682     std::shared_ptr<IResourceManagerService> service = getService_l();
683     if (service == nullptr) {
684         ALOGW("Service isn't available");
685         return false;
686     }
687     bool success;
688     Status status = service->reclaimResource(getClientInfo(), resources, &success);
689     return status.isOk() && success;
690 }
691 
notifyClientCreated()692 void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
693     std::scoped_lock lock{mLock};
694     std::shared_ptr<IResourceManagerService> service = getService_l();
695     if (service == nullptr) {
696         ALOGW("Service isn't available");
697         return;
698     }
699     service->notifyClientCreated(getClientInfo());
700 }
701 
notifyClientStarted(ClientConfigParcel & clientConfig)702 void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
703         ClientConfigParcel& clientConfig) {
704     std::scoped_lock lock{mLock};
705     std::shared_ptr<IResourceManagerService> service = getService_l();
706     if (service == nullptr) {
707         ALOGW("Service isn't available");
708         return;
709     }
710     clientConfig.clientInfo = getClientInfo();
711     service->notifyClientStarted(clientConfig);
712 }
713 
notifyClientStopped(ClientConfigParcel & clientConfig)714 void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
715         ClientConfigParcel& clientConfig) {
716     std::scoped_lock lock{mLock};
717     std::shared_ptr<IResourceManagerService> service = getService_l();
718     if (service == nullptr) {
719         ALOGW("Service isn't available");
720         return;
721     }
722     clientConfig.clientInfo = getClientInfo();
723     service->notifyClientStopped(clientConfig);
724 }
725 
notifyClientConfigChanged(ClientConfigParcel & clientConfig)726 void MediaCodec::ResourceManagerServiceProxy::notifyClientConfigChanged(
727         ClientConfigParcel& clientConfig) {
728     std::scoped_lock lock{mLock};
729     std::shared_ptr<IResourceManagerService> service = getService_l();
730     if (service == nullptr) {
731         ALOGW("Service isn't available");
732         return;
733     }
734     clientConfig.clientInfo = getClientInfo();
735     service->notifyClientConfigChanged(clientConfig);
736 }
737 
738 ////////////////////////////////////////////////////////////////////////////////
739 
BufferInfo()740 MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
741 
742 ////////////////////////////////////////////////////////////////////////////////
743 
744 class MediaCodec::ReleaseSurface {
745 public:
ReleaseSurface(uint64_t usage)746     explicit ReleaseSurface(uint64_t usage) {
747         BufferQueue::createBufferQueue(&mProducer, &mConsumer);
748         mSurface = new Surface(mProducer, false /* controlledByApp */);
749         struct ConsumerListener : public BnConsumerListener {
750             ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
751                 mConsumer = consumer;
752             }
753             void onFrameAvailable(const BufferItem&) override {
754                 BufferItem buffer;
755                 // consume buffer
756                 sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
757                 if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
758                     consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber, buffer.mFence);
759                 }
760             }
761 
762             wp<IGraphicBufferConsumer> mConsumer;
763             void onBuffersReleased() override {}
764             void onSidebandStreamChanged() override {}
765         };
766         sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
767         mConsumer->consumerConnect(listener, false);
768         mConsumer->setConsumerName(String8{"MediaCodec.release"});
769         mConsumer->setConsumerUsageBits(usage);
770     }
771 
getSurface()772     const sp<Surface> &getSurface() {
773         return mSurface;
774     }
775 
776 private:
777     sp<IGraphicBufferProducer> mProducer;
778     sp<IGraphicBufferConsumer> mConsumer;
779     sp<Surface> mSurface;
780 };
781 
782 ////////////////////////////////////////////////////////////////////////////////
783 
784 namespace {
785 
786 enum {
787     kWhatFillThisBuffer      = 'fill',
788     kWhatDrainThisBuffer     = 'drai',
789     kWhatEOS                 = 'eos ',
790     kWhatStartCompleted      = 'Scom',
791     kWhatStopCompleted       = 'scom',
792     kWhatReleaseCompleted    = 'rcom',
793     kWhatFlushCompleted      = 'fcom',
794     kWhatError               = 'erro',
795     kWhatCryptoError         = 'ercp',
796     kWhatComponentAllocated  = 'cAll',
797     kWhatComponentConfigured = 'cCon',
798     kWhatInputSurfaceCreated = 'isfc',
799     kWhatInputSurfaceAccepted = 'isfa',
800     kWhatSignaledInputEOS    = 'seos',
801     kWhatOutputFramesRendered = 'outR',
802     kWhatOutputBuffersChanged = 'outC',
803     kWhatFirstTunnelFrameReady = 'ftfR',
804     kWhatPollForRenderedBuffers = 'plrb',
805     kWhatMetricsUpdated      = 'mtru',
806 };
807 
808 class CryptoAsyncCallback : public CryptoAsync::CryptoAsyncCallback {
809 public:
810 
CryptoAsyncCallback(const sp<AMessage> & notify)811     explicit CryptoAsyncCallback(const sp<AMessage> & notify):mNotify(notify) {
812     }
813 
~CryptoAsyncCallback()814     ~CryptoAsyncCallback() {}
815 
onDecryptComplete(const sp<AMessage> & result)816     void onDecryptComplete(const sp<AMessage> &result) override {
817         (void)result;
818     }
819 
onDecryptError(const std::list<sp<AMessage>> & errorMsgs)820     void onDecryptError(const std::list<sp<AMessage>> &errorMsgs) override {
821         // This error may be decrypt/queue error.
822         status_t errorCode ;
823         for (auto &emsg : errorMsgs) {
824              sp<AMessage> notify(mNotify->dup());
825              if(emsg->findInt32("err", &errorCode)) {
826                  if (isCryptoError(errorCode)) {
827                      notify->setInt32("what", kWhatCryptoError);
828                  } else {
829                      notify->setInt32("what", kWhatError);
830                  }
831                  notify->extend(emsg);
832                  notify->post();
833              } else {
834                  ALOGW("Buffers with no errorCode are not expected");
835              }
836         }
837     }
838 private:
839     const sp<AMessage> mNotify;
840 };
841 
842 class OnBufferReleasedListener : public ::android::SurfaceListener{
843 private:
844     uint32_t mGeneration;
845     std::weak_ptr<BufferChannelBase> mBufferChannel;
846 
notifyBufferReleased()847     void notifyBufferReleased() {
848         auto p = mBufferChannel.lock();
849         if (p) {
850             p->onBufferReleasedFromOutputSurface(mGeneration);
851         }
852     }
853 
notifyBufferAttached()854     void notifyBufferAttached() {
855         auto p = mBufferChannel.lock();
856         if (p) {
857             p->onBufferAttachedToOutputSurface(mGeneration);
858         }
859     }
860 
861 public:
OnBufferReleasedListener(uint32_t generation,const std::shared_ptr<BufferChannelBase> & bufferChannel)862     explicit OnBufferReleasedListener(
863             uint32_t generation,
864             const std::shared_ptr<BufferChannelBase> &bufferChannel)
865             : mGeneration(generation), mBufferChannel(bufferChannel) {}
866 
867     virtual ~OnBufferReleasedListener() = default;
868 
onBufferReleased()869     void onBufferReleased() override {
870         notifyBufferReleased();
871     }
872 
onBuffersDiscarded(const std::vector<sp<GraphicBuffer>> & buffers)873     void onBuffersDiscarded([[maybe_unused]] const std::vector<sp<GraphicBuffer>>& buffers)
874         override { }
875 
onBufferDetached(int slot)876     void onBufferDetached([[maybe_unused]] int slot) override {
877         notifyBufferReleased();
878     }
879 
needsReleaseNotify()880     bool needsReleaseNotify() override { return true; }
881 
882 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(BQ_CONSUMER_ATTACH_CALLBACK)
onBufferAttached()883     void onBufferAttached() override {
884         notifyBufferAttached();
885     }
886 
needsAttachNotify()887     bool needsAttachNotify() override { return true; }
888 #endif
889 };
890 
891 class BufferCallback : public CodecBase::BufferCallback {
892 public:
893     explicit BufferCallback(const sp<AMessage> &notify);
894     virtual ~BufferCallback() = default;
895 
896     virtual void onInputBufferAvailable(
897             size_t index, const sp<MediaCodecBuffer> &buffer) override;
898     virtual void onOutputBufferAvailable(
899             size_t index, const sp<MediaCodecBuffer> &buffer) override;
900 private:
901     const sp<AMessage> mNotify;
902 };
903 
BufferCallback(const sp<AMessage> & notify)904 BufferCallback::BufferCallback(const sp<AMessage> &notify)
905     : mNotify(notify) {}
906 
onInputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)907 void BufferCallback::onInputBufferAvailable(
908         size_t index, const sp<MediaCodecBuffer> &buffer) {
909     sp<AMessage> notify(mNotify->dup());
910     notify->setInt32("what", kWhatFillThisBuffer);
911     notify->setSize("index", index);
912     notify->setObject("buffer", buffer);
913     notify->post();
914 }
915 
onOutputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)916 void BufferCallback::onOutputBufferAvailable(
917         size_t index, const sp<MediaCodecBuffer> &buffer) {
918     sp<AMessage> notify(mNotify->dup());
919     notify->setInt32("what", kWhatDrainThisBuffer);
920     notify->setSize("index", index);
921     notify->setObject("buffer", buffer);
922     notify->post();
923 }
924 
925 class CodecCallback : public CodecBase::CodecCallback {
926 public:
927     explicit CodecCallback(const sp<AMessage> &notify);
928     virtual ~CodecCallback() = default;
929 
930     virtual void onEos(status_t err) override;
931     virtual void onStartCompleted() override;
932     virtual void onStopCompleted() override;
933     virtual void onReleaseCompleted() override;
934     virtual void onFlushCompleted() override;
935     virtual void onError(status_t err, enum ActionCode actionCode) override;
936     virtual void onComponentAllocated(const char *componentName) override;
937     virtual void onComponentConfigured(
938             const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) override;
939     virtual void onInputSurfaceCreated(
940             const sp<AMessage> &inputFormat,
941             const sp<AMessage> &outputFormat,
942             const sp<BufferProducerWrapper> &inputSurface) override;
943     virtual void onInputSurfaceCreationFailed(status_t err) override;
944     virtual void onInputSurfaceAccepted(
945             const sp<AMessage> &inputFormat,
946             const sp<AMessage> &outputFormat) override;
947     virtual void onInputSurfaceDeclined(status_t err) override;
948     virtual void onSignaledInputEOS(status_t err) override;
949     virtual void onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) override;
950     virtual void onOutputBuffersChanged() override;
951     virtual void onFirstTunnelFrameReady() override;
952     virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) override;
953 private:
954     const sp<AMessage> mNotify;
955 };
956 
CodecCallback(const sp<AMessage> & notify)957 CodecCallback::CodecCallback(const sp<AMessage> &notify) : mNotify(notify) {}
958 
onEos(status_t err)959 void CodecCallback::onEos(status_t err) {
960     sp<AMessage> notify(mNotify->dup());
961     notify->setInt32("what", kWhatEOS);
962     notify->setInt32("err", err);
963     notify->post();
964 }
965 
onStartCompleted()966 void CodecCallback::onStartCompleted() {
967     sp<AMessage> notify(mNotify->dup());
968     notify->setInt32("what", kWhatStartCompleted);
969     notify->post();
970 }
971 
onStopCompleted()972 void CodecCallback::onStopCompleted() {
973     sp<AMessage> notify(mNotify->dup());
974     notify->setInt32("what", kWhatStopCompleted);
975     notify->post();
976 }
977 
onReleaseCompleted()978 void CodecCallback::onReleaseCompleted() {
979     sp<AMessage> notify(mNotify->dup());
980     notify->setInt32("what", kWhatReleaseCompleted);
981     notify->post();
982 }
983 
onFlushCompleted()984 void CodecCallback::onFlushCompleted() {
985     sp<AMessage> notify(mNotify->dup());
986     notify->setInt32("what", kWhatFlushCompleted);
987     notify->post();
988 }
989 
onError(status_t err,enum ActionCode actionCode)990 void CodecCallback::onError(status_t err, enum ActionCode actionCode) {
991     sp<AMessage> notify(mNotify->dup());
992     notify->setInt32("what", kWhatError);
993     notify->setInt32("err", err);
994     notify->setInt32("actionCode", actionCode);
995     notify->post();
996 }
997 
onComponentAllocated(const char * componentName)998 void CodecCallback::onComponentAllocated(const char *componentName) {
999     sp<AMessage> notify(mNotify->dup());
1000     notify->setInt32("what", kWhatComponentAllocated);
1001     notify->setString("componentName", componentName);
1002     notify->post();
1003 }
1004 
onComponentConfigured(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)1005 void CodecCallback::onComponentConfigured(
1006         const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
1007     sp<AMessage> notify(mNotify->dup());
1008     notify->setInt32("what", kWhatComponentConfigured);
1009     notify->setMessage("input-format", inputFormat);
1010     notify->setMessage("output-format", outputFormat);
1011     notify->post();
1012 }
1013 
onInputSurfaceCreated(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat,const sp<BufferProducerWrapper> & inputSurface)1014 void CodecCallback::onInputSurfaceCreated(
1015         const sp<AMessage> &inputFormat,
1016         const sp<AMessage> &outputFormat,
1017         const sp<BufferProducerWrapper> &inputSurface) {
1018     sp<AMessage> notify(mNotify->dup());
1019     notify->setInt32("what", kWhatInputSurfaceCreated);
1020     notify->setMessage("input-format", inputFormat);
1021     notify->setMessage("output-format", outputFormat);
1022     notify->setObject("input-surface", inputSurface);
1023     notify->post();
1024 }
1025 
onInputSurfaceCreationFailed(status_t err)1026 void CodecCallback::onInputSurfaceCreationFailed(status_t err) {
1027     sp<AMessage> notify(mNotify->dup());
1028     notify->setInt32("what", kWhatInputSurfaceCreated);
1029     notify->setInt32("err", err);
1030     notify->post();
1031 }
1032 
onInputSurfaceAccepted(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)1033 void CodecCallback::onInputSurfaceAccepted(
1034         const sp<AMessage> &inputFormat,
1035         const sp<AMessage> &outputFormat) {
1036     sp<AMessage> notify(mNotify->dup());
1037     notify->setInt32("what", kWhatInputSurfaceAccepted);
1038     notify->setMessage("input-format", inputFormat);
1039     notify->setMessage("output-format", outputFormat);
1040     notify->post();
1041 }
1042 
onInputSurfaceDeclined(status_t err)1043 void CodecCallback::onInputSurfaceDeclined(status_t err) {
1044     sp<AMessage> notify(mNotify->dup());
1045     notify->setInt32("what", kWhatInputSurfaceAccepted);
1046     notify->setInt32("err", err);
1047     notify->post();
1048 }
1049 
onSignaledInputEOS(status_t err)1050 void CodecCallback::onSignaledInputEOS(status_t err) {
1051     sp<AMessage> notify(mNotify->dup());
1052     notify->setInt32("what", kWhatSignaledInputEOS);
1053     if (err != OK) {
1054         notify->setInt32("err", err);
1055     }
1056     notify->post();
1057 }
1058 
onOutputFramesRendered(const std::list<RenderedFrameInfo> & done)1059 void CodecCallback::onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) {
1060     sp<AMessage> notify(mNotify->dup());
1061     notify->setInt32("what", kWhatOutputFramesRendered);
1062     if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
1063         notify->post();
1064     }
1065 }
1066 
onOutputBuffersChanged()1067 void CodecCallback::onOutputBuffersChanged() {
1068     sp<AMessage> notify(mNotify->dup());
1069     notify->setInt32("what", kWhatOutputBuffersChanged);
1070     notify->post();
1071 }
1072 
onFirstTunnelFrameReady()1073 void CodecCallback::onFirstTunnelFrameReady() {
1074     sp<AMessage> notify(mNotify->dup());
1075     notify->setInt32("what", kWhatFirstTunnelFrameReady);
1076     notify->post();
1077 }
1078 
onMetricsUpdated(const sp<AMessage> & updatedMetrics)1079 void CodecCallback::onMetricsUpdated(const sp<AMessage> &updatedMetrics) {
1080     sp<AMessage> notify(mNotify->dup());
1081     notify->setInt32("what", kWhatMetricsUpdated);
1082     notify->setMessage("updated-metrics", updatedMetrics);
1083     notify->post();
1084 }
1085 
toMediaResourceSubType(bool isHardware,MediaCodec::Domain domain)1086 static MediaResourceSubType toMediaResourceSubType(bool isHardware, MediaCodec::Domain domain) {
1087     switch (domain) {
1088     case MediaCodec::DOMAIN_VIDEO:
1089         return isHardware? MediaResourceSubType::kHwVideoCodec :
1090                            MediaResourceSubType::kSwVideoCodec;
1091     case MediaCodec::DOMAIN_AUDIO:
1092         return isHardware? MediaResourceSubType::kHwAudioCodec :
1093                            MediaResourceSubType::kSwAudioCodec;
1094     case MediaCodec::DOMAIN_IMAGE:
1095         return isHardware? MediaResourceSubType::kHwImageCodec :
1096                            MediaResourceSubType::kSwImageCodec;
1097     default:
1098         return MediaResourceSubType::kUnspecifiedSubType;
1099     }
1100 }
1101 
toCodecMode(MediaCodec::Domain domain)1102 static const char * toCodecMode(MediaCodec::Domain domain) {
1103     switch (domain) {
1104         case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
1105         case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
1106         case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
1107         default:                       return kCodecModeUnknown;
1108     }
1109 }
1110 
1111 }  // namespace
1112 
1113 ////////////////////////////////////////////////////////////////////////////////
1114 
1115 // static
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid)1116 sp<MediaCodec> MediaCodec::CreateByType(
1117         const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
1118         uid_t uid) {
1119     sp<AMessage> format;
1120     return CreateByType(looper, mime, encoder, err, pid, uid, format);
1121 }
1122 
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid,sp<AMessage> format)1123 sp<MediaCodec> MediaCodec::CreateByType(
1124         const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
1125         uid_t uid, sp<AMessage> format) {
1126     Vector<AString> matchingCodecs;
1127 
1128     MediaCodecList::findMatchingCodecs(
1129             mime.c_str(),
1130             encoder,
1131             0,
1132             format,
1133             &matchingCodecs);
1134 
1135     if (err != NULL) {
1136         *err = NAME_NOT_FOUND;
1137     }
1138     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
1139         sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
1140         AString componentName = matchingCodecs[i];
1141         status_t ret = codec->init(componentName);
1142         if (err != NULL) {
1143             *err = ret;
1144         }
1145         if (ret == OK) {
1146             return codec;
1147         }
1148         ALOGD("Allocating component '%s' failed (%d), try next one.",
1149                 componentName.c_str(), ret);
1150     }
1151     return NULL;
1152 }
1153 
1154 // static
CreateByComponentName(const sp<ALooper> & looper,const AString & name,status_t * err,pid_t pid,uid_t uid)1155 sp<MediaCodec> MediaCodec::CreateByComponentName(
1156         const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
1157     sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
1158 
1159     const status_t ret = codec->init(name);
1160     if (err != NULL) {
1161         *err = ret;
1162     }
1163     return ret == OK ? codec : NULL; // NULL deallocates codec.
1164 }
1165 
1166 // static
CreatePersistentInputSurface()1167 sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
1168     sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
1169     if (pluginSurface != nullptr) {
1170         return pluginSurface;
1171     }
1172 
1173     OMXClient client;
1174     if (client.connect() != OK) {
1175         ALOGE("Failed to connect to OMX to create persistent input surface.");
1176         return NULL;
1177     }
1178 
1179     sp<IOMX> omx = client.interface();
1180 
1181     sp<IGraphicBufferProducer> bufferProducer;
1182     sp<hardware::media::omx::V1_0::IGraphicBufferSource> bufferSource;
1183 
1184     status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
1185 
1186     if (err != OK) {
1187         ALOGE("Failed to create persistent input surface.");
1188         return NULL;
1189     }
1190 
1191     return new PersistentSurface(bufferProducer, bufferSource);
1192 }
1193 
1194 //static
getGloballyAvailableResources(std::vector<GlobalResourceInfo> & resources)1195 status_t MediaCodec::getGloballyAvailableResources(std::vector<GlobalResourceInfo>& resources) {
1196     resources.clear();
1197     // Make sure codec availability feature is on.
1198     if (!android::media::codec::codec_availability()) {
1199         return ERROR_UNSUPPORTED;
1200     }
1201     // TODO: For now this is just an empty function.
1202     // The actual implementation should use component store to query the
1203     // available resources from hal, and fill in resources with the same.
1204     return ERROR_UNSUPPORTED;
1205 }
1206 
1207 // GenerateCodecId generates a 64bit Random ID for each codec that is created.
1208 // The Codec ID is generated as:
1209 //   - A process-unique random high 32bits
1210 //   - An atomic sequence low 32bits
1211 //
GenerateCodecId()1212 static uint64_t GenerateCodecId() {
1213     static std::atomic_uint64_t sId = [] {
1214         std::random_device rd;
1215         std::mt19937 gen(rd());
1216         std::uniform_int_distribution<uint32_t> distrib(0, UINT32_MAX);
1217         uint32_t randomID = distrib(gen);
1218         uint64_t id = randomID;
1219         return id << 32;
1220     }();
1221     return sId++;
1222 }
1223 
MediaCodec(const sp<ALooper> & looper,pid_t pid,uid_t uid,std::function<sp<CodecBase> (const AString &,const char *)> getCodecBase,std::function<status_t (const AString &,sp<MediaCodecInfo> *)> getCodecInfo)1224 MediaCodec::MediaCodec(
1225         const sp<ALooper> &looper, pid_t pid, uid_t uid,
1226         std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
1227         std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
1228     : mState(UNINITIALIZED),
1229       mReleasedByResourceManager(false),
1230       mLooper(looper),
1231       mCodec(NULL),
1232       mReplyID(0),
1233       mFlags(0),
1234       mStickyError(OK),
1235       mSoftRenderer(NULL),
1236       mDomain(DOMAIN_UNKNOWN),
1237       mWidth(0),
1238       mHeight(0),
1239       mRotationDegrees(0),
1240       mDequeueInputTimeoutGeneration(0),
1241       mDequeueInputReplyID(0),
1242       mDequeueOutputTimeoutGeneration(0),
1243       mDequeueOutputReplyID(0),
1244       mTunneledInputWidth(0),
1245       mTunneledInputHeight(0),
1246       mTunneled(false),
1247       mTunnelPeekState(TunnelPeekState::kLegacyMode),
1248       mTunnelPeekEnabled(false),
1249       mHaveInputSurface(false),
1250       mHavePendingInputBuffers(false),
1251       mCpuBoostRequested(false),
1252       mIsSurfaceToDisplay(false),
1253       mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
1254       mVideoRenderQualityTracker(
1255               VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
1256                       GetServerConfigurableFlag)),
1257       mLatencyUnknown(0),
1258       mBytesEncoded(0),
1259       mEarliestEncodedPtsUs(INT64_MAX),
1260       mLatestEncodedPtsUs(INT64_MIN),
1261       mFramesEncoded(0),
1262       mNumLowLatencyEnables(0),
1263       mNumLowLatencyDisables(0),
1264       mIsLowLatencyModeOn(false),
1265       mIndexOfFirstFrameWhenLowLatencyOn(-1),
1266       mInputBufferCounter(0),
1267       mGetCodecBase(getCodecBase),
1268       mGetCodecInfo(getCodecInfo) {
1269     mCodecId = GenerateCodecId();
1270     mResourceManagerProxy = std::make_shared<ResourceManagerServiceProxy>(pid, uid,
1271             ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
1272     if (!mGetCodecBase) {
1273         mGetCodecBase = [](const AString &name, const char *owner) {
1274             return GetCodecBase(name, owner);
1275         };
1276     }
1277     if (!mGetCodecInfo) {
1278         mGetCodecInfo = [&log = mErrorLog](const AString &name,
1279                                            sp<MediaCodecInfo> *info) -> status_t {
1280             *info = nullptr;
1281             const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
1282             if (!mcl) {
1283                 log.log(LOG_TAG, "Fatal error: failed to initialize MediaCodecList");
1284                 return NO_INIT;  // if called from Java should raise IOException
1285             }
1286             AString tmp = name;
1287             if (tmp.endsWith(".secure")) {
1288                 tmp.erase(tmp.size() - 7, 7);
1289             }
1290             for (const AString &codecName : { name, tmp }) {
1291                 ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
1292                 if (codecIdx < 0) {
1293                     continue;
1294                 }
1295                 *info = mcl->getCodecInfo(codecIdx);
1296                 return OK;
1297             }
1298             log.log(LOG_TAG, base::StringPrintf("Codec with name '%s' is not found on the device.",
1299                                   name.c_str()));
1300             return NAME_NOT_FOUND;
1301         };
1302     }
1303 
1304     // we want an empty metrics record for any early getMetrics() call
1305     // this should be the *only* initMediametrics() call that's not on the Looper thread
1306     initMediametrics();
1307 }
1308 
~MediaCodec()1309 MediaCodec::~MediaCodec() {
1310     CHECK_EQ(mState, UNINITIALIZED);
1311     mResourceManagerProxy->removeClient();
1312 
1313     flushMediametrics();  // this deletes mMetricsHandle
1314     // don't keep the last metrics handle around
1315     if (mLastMetricsHandle != 0) {
1316         mediametrics_delete(mLastMetricsHandle);
1317         mLastMetricsHandle = 0;
1318     }
1319 
1320     // clean any saved metrics info we stored as part of configure()
1321     if (mConfigureMsg != nullptr) {
1322         mediametrics_handle_t metricsHandle;
1323         if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
1324             mediametrics_delete(metricsHandle);
1325         }
1326     }
1327 }
1328 
1329 // except for in constructor, called from the looper thread (and therefore not mutexed)
initMediametrics()1330 void MediaCodec::initMediametrics() {
1331     if (mMetricsHandle == 0) {
1332         mMetricsHandle = mediametrics_create(kCodecKeyName);
1333     }
1334 
1335     mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
1336 
1337     {
1338         Mutex::Autolock al(mRecentLock);
1339         for (int i = 0; i<kRecentLatencyFrames; i++) {
1340             mRecentSamples[i] = kRecentSampleInvalid;
1341         }
1342         mRecentHead = 0;
1343     }
1344 
1345     {
1346         Mutex::Autolock al(mLatencyLock);
1347         mBuffersInFlight.clear();
1348         mNumLowLatencyEnables = 0;
1349         mNumLowLatencyDisables = 0;
1350         mIsLowLatencyModeOn = false;
1351         mIndexOfFirstFrameWhenLowLatencyOn = -1;
1352         mInputBufferCounter = 0;
1353     }
1354 
1355     mSubsessionCount = 0;
1356     mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
1357     resetMetricsFields();
1358 }
1359 
resetMetricsFields()1360 void MediaCodec::resetMetricsFields() {
1361     mHdrInfoFlags = 0;
1362 
1363     mApiUsageMetrics = ApiUsageMetrics();
1364     mReliabilityContextMetrics = ReliabilityContextMetrics();
1365 }
1366 
1367 // always called from the looper thread (and therefore not mutexed)
resetSubsessionMetricsFields()1368 void MediaCodec::resetSubsessionMetricsFields() {
1369     mBytesEncoded = 0;
1370     mFramesEncoded = 0;
1371     mFramesInput = 0;
1372     mBytesInput = 0;
1373     mEarliestEncodedPtsUs = INT64_MAX;
1374     mLatestEncodedPtsUs = INT64_MIN;
1375 }
1376 
1377 // always called from the looper thread
updateMediametrics()1378 void MediaCodec::updateMediametrics() {
1379     if (mMetricsHandle == 0) {
1380         ALOGV("no metrics handle found");
1381         return;
1382     }
1383 
1384     Mutex::Autolock _lock(mMetricsLock);
1385 
1386     mediametrics_setInt32(mMetricsHandle, kCodecArrayMode, mApiUsageMetrics.isArrayMode ? 1 : 0);
1387     mApiUsageMetrics.operationMode = (mFlags & kFlagIsAsync) ?
1388             ((mFlags & kFlagUseBlockModel) ? ApiUsageMetrics::kBlockMode
1389                     : ApiUsageMetrics::kAsynchronousMode)
1390             : ApiUsageMetrics::kSynchronousMode;
1391     mediametrics_setInt32(mMetricsHandle, kCodecOperationMode, mApiUsageMetrics.operationMode);
1392     mediametrics_setInt32(mMetricsHandle, kCodecOutputSurface,
1393             mApiUsageMetrics.isUsingOutputSurface ? 1 : 0);
1394 
1395     mediametrics_setInt32(mMetricsHandle, kCodecAppMaxInputSize,
1396             mApiUsageMetrics.inputBufferSize.appMax);
1397     mediametrics_setInt32(mMetricsHandle, kCodecUsedMaxInputSize,
1398             mApiUsageMetrics.inputBufferSize.usedMax);
1399     mediametrics_setInt32(mMetricsHandle, kCodecCodecMaxInputSize,
1400             mApiUsageMetrics.inputBufferSize.codecMax);
1401 
1402     mediametrics_setInt32(mMetricsHandle, kCodecFlushCount, mReliabilityContextMetrics.flushCount);
1403     mediametrics_setInt32(mMetricsHandle, kCodecSetSurfaceCount,
1404             mReliabilityContextMetrics.setOutputSurfaceCount);
1405     mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
1406             mReliabilityContextMetrics.resolutionChangeCount);
1407 
1408     // Video rendering quality metrics
1409     {
1410         const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
1411         if (m.frameReleasedCount > 0) {
1412             mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
1413             mediametrics_setInt64(mMetricsHandle, kCodecLastRenderTimeUs, m.lastRenderTimeUs);
1414             mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
1415             mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
1416             mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
1417             mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
1418             mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
1419             mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
1420             mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
1421         }
1422         if (m.freezeDurationMsHistogram.getCount() >= 1) {
1423             const MediaHistogram<int32_t> &h = m.freezeDurationMsHistogram;
1424             mediametrics_setInt64(mMetricsHandle, kCodecFreezeScore, m.freezeScore);
1425             mediametrics_setDouble(mMetricsHandle, kCodecFreezeRate, m.freezeRate);
1426             mediametrics_setInt64(mMetricsHandle, kCodecFreezeCount, h.getCount());
1427             mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsAvg, h.getAvg());
1428             mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsMax, h.getMax());
1429             mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogram, h.emit());
1430             mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogramBuckets,
1431                                    h.emitBuckets());
1432         }
1433         if (m.freezeDistanceMsHistogram.getCount() >= 1) {
1434             const MediaHistogram<int32_t> &h = m.freezeDistanceMsHistogram;
1435             mediametrics_setInt32(mMetricsHandle, kCodecFreezeDistanceMsAvg, h.getAvg());
1436             mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogram, h.emit());
1437             mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogramBuckets,
1438                                    h.emitBuckets());
1439         }
1440         if (m.judderScoreHistogram.getCount() >= 1) {
1441             const MediaHistogram<int32_t> &h = m.judderScoreHistogram;
1442             mediametrics_setInt64(mMetricsHandle, kCodecJudderScore, m.judderScore);
1443             mediametrics_setDouble(mMetricsHandle, kCodecJudderRate, m.judderRate);
1444             mediametrics_setInt64(mMetricsHandle, kCodecJudderCount, h.getCount());
1445             mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreAvg, h.getAvg());
1446             mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreMax, h.getMax());
1447             mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogram, h.emit());
1448             mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogramBuckets,
1449                                    h.emitBuckets());
1450         }
1451         if (m.freezeEventCount != 0) {
1452             mediametrics_setInt32(mMetricsHandle, kCodecFreezeEventCount, m.freezeEventCount);
1453         }
1454         if (m.judderEventCount != 0) {
1455             mediametrics_setInt32(mMetricsHandle, kCodecJudderEventCount, m.judderEventCount);
1456         }
1457     }
1458 
1459     if (mLatencyHist.getCount() != 0 ) {
1460         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
1461         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
1462         mediametrics_setInt64(mMetricsHandle, kCodecLatencyAvg, mLatencyHist.getAvg());
1463         mediametrics_setInt64(mMetricsHandle, kCodecLatencyCount, mLatencyHist.getCount());
1464 
1465         if (kEmitHistogram) {
1466             // and the histogram itself
1467             std::string hist = mLatencyHist.emit();
1468             mediametrics_setCString(mMetricsHandle, kCodecLatencyHist, hist.c_str());
1469         }
1470     }
1471     if (mLatencyUnknown > 0) {
1472         mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
1473     }
1474     int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
1475     if (playbackDurationSec > 0) {
1476         mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
1477     }
1478     if (mLifetimeStartNs > 0) {
1479         nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
1480         lifetime = lifetime / (1000 * 1000);    // emitted in ms, truncated not rounded
1481         mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
1482     }
1483 
1484     if (mBytesEncoded) {
1485         Mutex::Autolock al(mOutputStatsLock);
1486 
1487         mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
1488         int64_t duration = 0;
1489         if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
1490             duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
1491         }
1492         mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
1493         mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
1494         mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
1495         mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
1496     }
1497 
1498     {
1499         Mutex::Autolock al(mLatencyLock);
1500         mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
1501         mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOff, mNumLowLatencyDisables);
1502         mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
1503                               mIndexOfFirstFrameWhenLowLatencyOn);
1504     }
1505 
1506 #if 0
1507     // enable for short term, only while debugging
1508     updateEphemeralMediametrics(mMetricsHandle);
1509 #endif
1510 }
1511 
updateHdrMetrics(bool isConfig)1512 void MediaCodec::updateHdrMetrics(bool isConfig) {
1513     if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
1514         return;
1515     }
1516 
1517     int32_t colorStandard = -1;
1518     if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
1519         mediametrics_setInt32(mMetricsHandle,
1520                 isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
1521     }
1522     int32_t colorRange = -1;
1523     if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
1524         mediametrics_setInt32(mMetricsHandle,
1525                 isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
1526     }
1527     int32_t colorTransfer = -1;
1528     if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
1529         mediametrics_setInt32(mMetricsHandle,
1530                 isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
1531     }
1532     HDRStaticInfo info;
1533     if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
1534             && ColorUtils::isHDRStaticInfoValid(&info)) {
1535         mHdrInfoFlags |= kFlagHasHdrStaticInfo;
1536     }
1537     mediametrics_setInt32(mMetricsHandle, kCodecHdrStaticInfo,
1538             (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
1539     sp<ABuffer> hdr10PlusInfo;
1540     if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
1541             && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
1542         mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
1543     }
1544     mediametrics_setInt32(mMetricsHandle, kCodecHdr10PlusInfo,
1545             (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
1546 
1547     // hdr format
1548     sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
1549 
1550     AString mime;
1551     int32_t profile = -1;
1552 
1553     if (codedFormat->findString("mime", &mime)
1554             && codedFormat->findInt32(KEY_PROFILE, &profile)
1555             && colorTransfer != -1) {
1556         hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
1557         mediametrics_setInt32(mMetricsHandle, kCodecHdrFormat, static_cast<int>(hdrFormat));
1558     }
1559 }
1560 
getHdrFormat(const AString & mime,const int32_t profile,const int32_t colorTransfer)1561 hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
1562         const int32_t colorTransfer) {
1563     return (mFlags & kFlagIsEncoder)
1564             ? getHdrFormatForEncoder(mime, profile, colorTransfer)
1565             : getHdrFormatForDecoder(mime, profile, colorTransfer);
1566 }
1567 
getHdrFormatForEncoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1568 hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
1569         const int32_t colorTransfer) {
1570     switch (colorTransfer) {
1571         case COLOR_TRANSFER_ST2084:
1572             if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1573                 switch (profile) {
1574                     case VP9Profile2HDR:
1575                         return HDR_FORMAT_HDR10;
1576                     case VP9Profile2HDR10Plus:
1577                         return HDR_FORMAT_HDR10PLUS;
1578                     default:
1579                         return HDR_FORMAT_NONE;
1580                 }
1581             } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1582                 switch (profile) {
1583                     case AV1ProfileMain10HDR10:
1584                         return HDR_FORMAT_HDR10;
1585                     case AV1ProfileMain10HDR10Plus:
1586                         return HDR_FORMAT_HDR10PLUS;
1587                     default:
1588                         return HDR_FORMAT_NONE;
1589                 }
1590             } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1591                 switch (profile) {
1592                     case HEVCProfileMain10HDR10:
1593                         return HDR_FORMAT_HDR10;
1594                     case HEVCProfileMain10HDR10Plus:
1595                         return HDR_FORMAT_HDR10PLUS;
1596                     default:
1597                         return HDR_FORMAT_NONE;
1598                 }
1599             } else {
1600                 return HDR_FORMAT_NONE;
1601             }
1602         case COLOR_TRANSFER_HLG:
1603             if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1604                 return HDR_FORMAT_HLG;
1605             } else {
1606                 // TODO: DOLBY format
1607                 return HDR_FORMAT_NONE;
1608             }
1609         default:
1610             return HDR_FORMAT_NONE;
1611     }
1612 }
1613 
getHdrFormatForDecoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1614 hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
1615         const int32_t colorTransfer) {
1616     switch (colorTransfer) {
1617         case COLOR_TRANSFER_ST2084:
1618             if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
1619                 return HDR_FORMAT_NONE;
1620             }
1621             return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
1622         case COLOR_TRANSFER_HLG:
1623             if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1624                 return HDR_FORMAT_HLG;
1625             }
1626             // TODO: DOLBY format
1627     }
1628     return HDR_FORMAT_NONE;
1629 }
1630 
profileSupport10Bits(const AString & mime,const int32_t profile)1631 bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
1632     if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1633         return true;
1634     } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1635         switch (profile) {
1636             case VP9Profile2:
1637             case VP9Profile3:
1638             case VP9Profile2HDR:
1639             case VP9Profile3HDR:
1640             case VP9Profile2HDR10Plus:
1641             case VP9Profile3HDR10Plus:
1642                 return true;
1643         }
1644     } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1645         switch (profile) {
1646             case HEVCProfileMain10:
1647             case HEVCProfileMain10HDR10:
1648             case HEVCProfileMain10HDR10Plus:
1649                 return true;
1650         }
1651     }
1652     return false;
1653 }
1654 
1655 
1656 // called to update info being passed back via getMetrics(), which is a
1657 // unique copy for that call, no concurrent access worries.
updateEphemeralMediametrics(mediametrics_handle_t item)1658 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
1659     ALOGD("MediaCodec::updateEphemeralMediametrics()");
1660 
1661     if (item == 0) {
1662         return;
1663     }
1664 
1665     // build an empty histogram
1666     MediaHistogram<int64_t> recentHist;
1667     recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
1668 
1669     // stuff it with the samples in the ring buffer
1670     {
1671         Mutex::Autolock al(mRecentLock);
1672 
1673         for (int i = 0; i < kRecentLatencyFrames; i++) {
1674             if (mRecentSamples[i] != kRecentSampleInvalid) {
1675                 recentHist.insert(mRecentSamples[i]);
1676             }
1677         }
1678     }
1679 
1680     // spit the data (if any) into the supplied analytics record
1681     if (recentHist.getCount() != 0 ) {
1682         mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
1683         mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
1684         mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
1685         mediametrics_setInt64(item, kCodecRecentLatencyCount, recentHist.getCount());
1686 
1687         if (kEmitHistogram) {
1688             // and the histogram itself
1689             std::string hist = recentHist.emit();
1690             mediametrics_setCString(item, kCodecRecentLatencyHist, hist.c_str());
1691         }
1692     }
1693 }
1694 
emitVector(std::vector<int32_t> vector)1695 static std::string emitVector(std::vector<int32_t> vector) {
1696     std::ostringstream sstr;
1697     for (size_t i = 0; i < vector.size(); ++i) {
1698         if (i != 0) {
1699             sstr << ',';
1700         }
1701         sstr << vector[i];
1702     }
1703     return sstr.str();
1704 }
1705 
reportToMediaMetricsIfValid(const FreezeEvent & e)1706 static void reportToMediaMetricsIfValid(const FreezeEvent &e) {
1707     if (e.valid) {
1708         mediametrics_handle_t handle = mediametrics_create(kFreezeEventKeyName);
1709         mediametrics_setInt64(handle, kFreezeEventInitialTimeUs, e.initialTimeUs);
1710         mediametrics_setInt32(handle, kFreezeEventDurationMs, e.durationMs);
1711         mediametrics_setInt64(handle, kFreezeEventCount, e.count);
1712         mediametrics_setInt32(handle, kFreezeEventAvgDurationMs, e.sumDurationMs / e.count);
1713         mediametrics_setInt32(handle, kFreezeEventAvgDistanceMs, e.sumDistanceMs / e.count);
1714         mediametrics_setString(handle, kFreezeEventDetailsDurationMs,
1715                                emitVector(e.details.durationMs));
1716         mediametrics_setString(handle, kFreezeEventDetailsDistanceMs,
1717                                emitVector(e.details.distanceMs));
1718         mediametrics_selfRecord(handle);
1719         mediametrics_delete(handle);
1720     }
1721 }
1722 
reportToMediaMetricsIfValid(const JudderEvent & e)1723 static void reportToMediaMetricsIfValid(const JudderEvent &e) {
1724     if (e.valid) {
1725         mediametrics_handle_t handle = mediametrics_create(kJudderEventKeyName);
1726         mediametrics_setInt64(handle, kJudderEventInitialTimeUs, e.initialTimeUs);
1727         mediametrics_setInt32(handle, kJudderEventDurationMs, e.durationMs);
1728         mediametrics_setInt64(handle, kJudderEventCount, e.count);
1729         mediametrics_setInt32(handle, kJudderEventAvgScore, e.sumScore / e.count);
1730         mediametrics_setInt32(handle, kJudderEventAvgDistanceMs, e.sumDistanceMs / e.count);
1731         mediametrics_setString(handle, kJudderEventDetailsActualDurationUs,
1732                                emitVector(e.details.actualRenderDurationUs));
1733         mediametrics_setString(handle, kJudderEventDetailsContentDurationUs,
1734                                emitVector(e.details.contentRenderDurationUs));
1735         mediametrics_setString(handle, kJudderEventDetailsDistanceMs,
1736                                emitVector(e.details.distanceMs));
1737         mediametrics_selfRecord(handle);
1738         mediametrics_delete(handle);
1739     }
1740 }
1741 
1742 // except for in destructor, called from the looper thread
flushMediametrics()1743 void MediaCodec::flushMediametrics() {
1744     ALOGV("flushMediametrics");
1745 
1746     // update does its own mutex locking
1747     updateMediametrics();
1748     resetMetricsFields();
1749 
1750     // ensure mutex while we do our own work
1751     Mutex::Autolock _lock(mMetricsLock);
1752     if (mMetricsHandle != 0) {
1753         if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
1754             mediametrics_selfRecord(mMetricsHandle);
1755         }
1756         // keep previous metrics handle for subsequent getMetrics() calls.
1757         // NOTE: There could be multiple error events, each flushing the metrics.
1758         // We keep the last non-empty metrics handle, so getMetrics() in the
1759         // next call will get the latest metrics prior to the errors.
1760         if (mLastMetricsHandle != 0) {
1761             mediametrics_delete(mLastMetricsHandle);
1762         }
1763         mLastMetricsHandle = mMetricsHandle;
1764         mMetricsHandle = 0;
1765     }
1766     // we no longer have anything pending upload
1767     mMetricsToUpload = false;
1768 
1769     // Freeze and judder events are reported separately
1770     reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetFreezeEvent());
1771     reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetJudderEvent());
1772 }
1773 
updateLowLatency(const sp<AMessage> & msg)1774 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
1775     int32_t lowLatency = 0;
1776     if (msg->findInt32("low-latency", &lowLatency)) {
1777         Mutex::Autolock al(mLatencyLock);
1778         if (lowLatency > 0) {
1779             ++mNumLowLatencyEnables;
1780             // This is just an estimate since low latency mode change happens ONLY at key frame
1781             mIsLowLatencyModeOn = true;
1782         } else if (lowLatency == 0) {
1783             ++mNumLowLatencyDisables;
1784             // This is just an estimate since low latency mode change happens ONLY at key frame
1785             mIsLowLatencyModeOn = false;
1786         }
1787     }
1788 }
1789 
updateCodecImportance(const sp<AMessage> & msg)1790 void MediaCodec::updateCodecImportance(const sp<AMessage>& msg) {
1791     // Update the codec importance.
1792     int32_t importance = 0;
1793     if (msg->findInt32(KEY_IMPORTANCE, &importance)) {
1794         // Ignoring the negative importance.
1795         if (importance >= 0) {
1796             // Notify RM about the change in the importance.
1797             mResourceManagerProxy->setImportance(importance);
1798             ClientConfigParcel clientConfig;
1799             initClientConfigParcel(clientConfig);
1800             mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
1801         }
1802     }
1803 }
1804 
asString(TunnelPeekState state,const char * default_string)1805 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
1806     switch(state) {
1807         case TunnelPeekState::kLegacyMode:
1808             return "LegacyMode";
1809         case TunnelPeekState::kEnabledNoBuffer:
1810             return "EnabledNoBuffer";
1811         case TunnelPeekState::kDisabledNoBuffer:
1812             return "DisabledNoBuffer";
1813         case TunnelPeekState::kBufferDecoded:
1814             return "BufferDecoded";
1815         case TunnelPeekState::kBufferRendered:
1816             return "BufferRendered";
1817         case TunnelPeekState::kDisabledQueued:
1818             return "DisabledQueued";
1819         case TunnelPeekState::kEnabledQueued:
1820             return "EnabledQueued";
1821         default:
1822             return default_string;
1823     }
1824 }
1825 
updateTunnelPeek(const sp<AMessage> & msg)1826 void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
1827     int32_t tunnelPeek = 0;
1828     if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
1829         return;
1830     }
1831 
1832     TunnelPeekState previousState = mTunnelPeekState;
1833     if(tunnelPeek == 0){
1834         mTunnelPeekEnabled = false;
1835         switch (mTunnelPeekState) {
1836             case TunnelPeekState::kLegacyMode:
1837                 msg->setInt32("android._tunnel-peek-set-legacy", 0);
1838                 [[fallthrough]];
1839             case TunnelPeekState::kEnabledNoBuffer:
1840                 mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
1841                 break;
1842             case TunnelPeekState::kEnabledQueued:
1843                 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
1844                 break;
1845             default:
1846                 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1847                 return;
1848         }
1849     } else {
1850         mTunnelPeekEnabled = true;
1851         switch (mTunnelPeekState) {
1852             case TunnelPeekState::kLegacyMode:
1853                 msg->setInt32("android._tunnel-peek-set-legacy", 0);
1854                 [[fallthrough]];
1855             case TunnelPeekState::kDisabledNoBuffer:
1856                 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
1857                 break;
1858             case TunnelPeekState::kDisabledQueued:
1859                 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
1860                 break;
1861             case TunnelPeekState::kBufferDecoded:
1862                 msg->setInt32("android._trigger-tunnel-peek", 1);
1863                 mTunnelPeekState = TunnelPeekState::kBufferRendered;
1864                 break;
1865             default:
1866                 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1867                 return;
1868         }
1869     }
1870 
1871     ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
1872 }
1873 
processRenderedFrames(const sp<AMessage> & msg)1874 void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
1875     int what = 0;
1876     msg->findInt32("what", &what);
1877     if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
1878         static bool logged = false;
1879         if (!logged) {
1880             logged = true;
1881             ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
1882         }
1883         return;
1884     }
1885     // Rendered frames only matter if they're being sent to the display
1886     if (mIsSurfaceToDisplay) {
1887         int64_t renderTimeNs;
1888         for (size_t index = 0;
1889             msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
1890             index++) {
1891             // Capture metrics for playback duration
1892             mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
1893             // Capture metrics for quality
1894             int64_t mediaTimeUs = 0;
1895             if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
1896                 ALOGE("processRenderedFrames: no media time found");
1897                 continue;
1898             }
1899             // Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
1900             // rendered frame.
1901             if (!mTunneled || mediaTimeUs != INT64_MAX) {
1902                 FreezeEvent freezeEvent;
1903                 JudderEvent judderEvent;
1904                 mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs, &freezeEvent,
1905                                                            &judderEvent);
1906                 reportToMediaMetricsIfValid(freezeEvent);
1907                 reportToMediaMetricsIfValid(judderEvent);
1908             }
1909         }
1910     }
1911 }
1912 
1913 // when we send a buffer to the codec;
statsBufferSent(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1914 void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1915 
1916     // only enqueue if we have a legitimate time
1917     if (presentationUs <= 0) {
1918         ALOGV("presentation time: %" PRId64, presentationUs);
1919         return;
1920     }
1921 
1922     if (mBatteryChecker != nullptr) {
1923         mBatteryChecker->onCodecActivity([this] () {
1924             mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
1925         });
1926     }
1927 
1928     // NOTE: these were erroneously restricted to video encoders, but we want them for all
1929     // codecs.
1930     if (android::media::codec::provider_->subsession_metrics()
1931             || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
1932         mBytesInput += buffer->size();
1933         mFramesInput++;
1934     }
1935 
1936     // mutex access to mBuffersInFlight and other stats
1937     Mutex::Autolock al(mLatencyLock);
1938 
1939     // XXX: we *could* make sure that the time is later than the end of queue
1940     // as part of a consistency check...
1941     if (!mTunneled) {
1942         const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1943         BufferFlightTiming_t startdata = { presentationUs, nowNs };
1944         mBuffersInFlight.push_back(startdata);
1945     }
1946 
1947     if (mIsLowLatencyModeOn && mIndexOfFirstFrameWhenLowLatencyOn < 0) {
1948         mIndexOfFirstFrameWhenLowLatencyOn = mInputBufferCounter;
1949     }
1950     ++mInputBufferCounter;
1951 }
1952 
1953 // when we get a buffer back from the codec, always called from the looper thread
statsBufferReceived(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1954 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1955 
1956     CHECK_NE(mState, UNINITIALIZED);
1957 
1958     // NOTE: these were erroneously restricted to video encoders, but we want them for all
1959     // codecs.
1960     if (android::media::codec::provider_->subsession_metrics()
1961             || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
1962         int32_t flags = 0;
1963         (void) buffer->meta()->findInt32("flags", &flags);
1964 
1965         // some of these frames, we don't want to count
1966         // standalone EOS.... has an invalid timestamp
1967         if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
1968             mBytesEncoded += buffer->size();
1969             mFramesEncoded++;
1970 
1971             Mutex::Autolock al(mOutputStatsLock);
1972             int64_t timeUs = 0;
1973             if (buffer->meta()->findInt64("timeUs", &timeUs)) {
1974                 if (timeUs > mLatestEncodedPtsUs) {
1975                     mLatestEncodedPtsUs = timeUs;
1976                 }
1977                 // can't chain as an else-if or this never triggers
1978                 if (timeUs < mEarliestEncodedPtsUs) {
1979                     mEarliestEncodedPtsUs = timeUs;
1980                 }
1981             }
1982         }
1983     }
1984 
1985     // mutex access to mBuffersInFlight and other stats
1986     Mutex::Autolock al(mLatencyLock);
1987 
1988     // how long this buffer took for the round trip through the codec
1989     // NB: pipelining can/will make these times larger. e.g., if each packet
1990     // is always 2 msec and we have 3 in flight at any given time, we're going to
1991     // see "6 msec" as an answer.
1992 
1993     // ignore stuff with no presentation time
1994     if (presentationUs <= 0) {
1995         ALOGV("-- returned buffer timestamp %" PRId64 " <= 0, ignore it", presentationUs);
1996         mLatencyUnknown++;
1997         return;
1998     }
1999 
2000     if (mBatteryChecker != nullptr) {
2001         mBatteryChecker->onCodecActivity([this] () {
2002             mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
2003         });
2004     }
2005 
2006     BufferFlightTiming_t startdata;
2007     bool valid = false;
2008     while (mBuffersInFlight.size() > 0) {
2009         startdata = *mBuffersInFlight.begin();
2010         ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
2011               startdata.presentationUs, startdata.startedNs);
2012         if (startdata.presentationUs == presentationUs) {
2013             // a match
2014             ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
2015                   startdata.presentationUs, presentationUs);
2016             mBuffersInFlight.pop_front();
2017             valid = true;
2018             break;
2019         } else if (startdata.presentationUs < presentationUs) {
2020             // we must have missed the match for this, drop it and keep looking
2021             ALOGV("--  drop entry for %" PRId64 ", before our frame of %" PRId64,
2022                   startdata.presentationUs, presentationUs);
2023             mBuffersInFlight.pop_front();
2024             continue;
2025         } else {
2026             // head is after, so we don't have a frame for ourselves
2027             ALOGV("--  found entry for %" PRId64 ", AFTER our frame of %" PRId64
2028                   " we have nothing to pair with",
2029                   startdata.presentationUs, presentationUs);
2030             mLatencyUnknown++;
2031             return;
2032         }
2033     }
2034     if (!valid) {
2035         ALOGV("-- empty queue, so ignore that.");
2036         mLatencyUnknown++;
2037         return;
2038     }
2039 
2040     // now start our calculations
2041     const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
2042     int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
2043 
2044     mLatencyHist.insert(latencyUs);
2045 
2046     // push into the recent samples
2047     {
2048         Mutex::Autolock al(mRecentLock);
2049 
2050         if (mRecentHead >= kRecentLatencyFrames) {
2051             mRecentHead = 0;
2052         }
2053         mRecentSamples[mRecentHead++] = latencyUs;
2054     }
2055 }
2056 
discardDecodeOnlyOutputBuffer(size_t index)2057 bool MediaCodec::discardDecodeOnlyOutputBuffer(size_t index) {
2058     Mutex::Autolock al(mBufferLock);
2059     BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
2060     sp<MediaCodecBuffer> buffer = info->mData;
2061     int32_t flags;
2062     CHECK(buffer->meta()->findInt32("flags", &flags));
2063     if (flags & BUFFER_FLAG_DECODE_ONLY) {
2064         ALOGV("discardDecodeOnlyOutputBuffer: mPortBuffers[out][%zu] NOT owned by client", index);
2065         info->mOwnedByClient = false;
2066         info->mData.clear();
2067         mBufferChannel->discardBuffer(buffer);
2068         return true;
2069     }
2070     return false;
2071 }
2072 
2073 // static
PostAndAwaitResponse(const sp<AMessage> & msg,sp<AMessage> * response)2074 status_t MediaCodec::PostAndAwaitResponse(
2075         const sp<AMessage> &msg, sp<AMessage> *response) {
2076     status_t err = msg->postAndAwaitResponse(response);
2077 
2078     if (err != OK) {
2079         return err;
2080     }
2081 
2082     if (!(*response)->findInt32("err", &err)) {
2083         err = OK;
2084     }
2085 
2086     return err;
2087 }
2088 
PostReplyWithError(const sp<AMessage> & msg,int32_t err)2089 void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
2090     sp<AReplyToken> replyID;
2091     CHECK(msg->senderAwaitsResponse(&replyID));
2092     PostReplyWithError(replyID, err);
2093 }
2094 
PostReplyWithError(const sp<AReplyToken> & replyID,int32_t err)2095 void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
2096     int32_t finalErr = err;
2097     if (mReleasedByResourceManager) {
2098         // override the err code if MediaCodec has been released by ResourceManager.
2099         finalErr = DEAD_OBJECT;
2100     }
2101 
2102     sp<AMessage> response = new AMessage;
2103     response->setInt32("err", finalErr);
2104     response->postReply(replyID);
2105 }
2106 
CreateCCodec()2107 static CodecBase *CreateCCodec() {
2108     return new CCodec;
2109 }
2110 
2111 //static
GetCodecBase(const AString & name,const char * owner)2112 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
2113     if (owner) {
2114         if (strcmp(owner, "default") == 0) {
2115             return new ACodec;
2116         } else if (strncmp(owner, "codec2", 6) == 0) {
2117             return CreateCCodec();
2118         }
2119     }
2120 
2121     if (name.startsWithIgnoreCase("c2.")) {
2122         return CreateCCodec();
2123     } else if (name.startsWithIgnoreCase("omx.")) {
2124         // at this time only ACodec specifies a mime type.
2125         return new ACodec;
2126     } else {
2127         return NULL;
2128     }
2129 }
2130 
2131 struct CodecListCache {
CodecListCacheandroid::CodecListCache2132     CodecListCache()
2133         : mCodecInfoMap{[] {
2134               const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
2135               size_t count = mcl->countCodecs();
2136               std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
2137               for (size_t i = 0; i < count; ++i) {
2138                   sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
2139                   codecInfoMap.emplace(info->getCodecName(), info);
2140               }
2141               return codecInfoMap;
2142           }()} {
2143     }
2144 
2145     const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
2146 };
2147 
GetCodecListCache()2148 static const CodecListCache &GetCodecListCache() {
2149     static CodecListCache sCache{};
2150     return sCache;
2151 }
2152 
init(const AString & name)2153 status_t MediaCodec::init(const AString &name) {
2154     status_t err = mResourceManagerProxy->init();
2155     if (err != OK) {
2156         mErrorLog.log(LOG_TAG, base::StringPrintf(
2157                 "Fatal error: failed to initialize ResourceManager (err=%d)", err));
2158         mCodec = NULL; // remove the codec
2159         return err;
2160     }
2161 
2162     // save init parameters for reset
2163     mInitName = name;
2164 
2165     // Current video decoders do not return from OMX_FillThisBuffer
2166     // quickly, violating the OpenMAX specs, until that is remedied
2167     // we need to invest in an extra looper to free the main event
2168     // queue.
2169 
2170     mCodecInfo.clear();
2171 
2172     bool secureCodec = false;
2173     const char *owner = "";
2174     if (!name.startsWith("android.filter.")) {
2175         err = mGetCodecInfo(name, &mCodecInfo);
2176         if (err != OK) {
2177             mErrorLog.log(LOG_TAG, base::StringPrintf(
2178                     "Getting codec info with name '%s' failed (err=%d)", name.c_str(), err));
2179             mCodec = NULL;  // remove the codec.
2180             return err;
2181         }
2182         if (mCodecInfo == nullptr) {
2183             mErrorLog.log(LOG_TAG, base::StringPrintf(
2184                     "Getting codec info with name '%s' failed", name.c_str()));
2185             return NAME_NOT_FOUND;
2186         }
2187         secureCodec = name.endsWith(".secure");
2188         Vector<AString> mediaTypes;
2189         mCodecInfo->getSupportedMediaTypes(&mediaTypes);
2190         for (size_t i = 0; i < mediaTypes.size(); ++i) {
2191             if (mediaTypes[i].startsWith("video/")) {
2192                 mDomain = DOMAIN_VIDEO;
2193                 break;
2194             } else if (mediaTypes[i].startsWith("audio/")) {
2195                 mDomain = DOMAIN_AUDIO;
2196                 break;
2197             } else if (mediaTypes[i].startsWith("image/")) {
2198                 mDomain = DOMAIN_IMAGE;
2199                 break;
2200             }
2201         }
2202         owner = mCodecInfo->getOwnerName();
2203     }
2204 
2205     mCodec = mGetCodecBase(name, owner);
2206     if (mCodec == NULL) {
2207         mErrorLog.log(LOG_TAG, base::StringPrintf(
2208                 "Getting codec base with name '%s' (from '%s' HAL) failed", name.c_str(), owner));
2209         return NAME_NOT_FOUND;
2210     }
2211 
2212     if (mDomain == DOMAIN_VIDEO) {
2213         // video codec needs dedicated looper
2214         if (mCodecLooper == NULL) {
2215             status_t err = OK;
2216             mCodecLooper = new ALooper;
2217             mCodecLooper->setName("CodecLooper");
2218             err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
2219             if (OK != err) {
2220                 mErrorLog.log(LOG_TAG, "Fatal error: codec looper failed to start");
2221                 return err;
2222             }
2223         }
2224 
2225         mCodecLooper->registerHandler(mCodec);
2226     } else {
2227         mLooper->registerHandler(mCodec);
2228     }
2229 
2230     mLooper->registerHandler(this);
2231 
2232     mCodec->setCallback(
2233             std::unique_ptr<CodecBase::CodecCallback>(
2234                     new CodecCallback(new AMessage(kWhatCodecNotify, this))));
2235     mBufferChannel = mCodec->getBufferChannel();
2236     mBufferChannel->setCallback(
2237             std::unique_ptr<CodecBase::BufferCallback>(
2238                     new BufferCallback(new AMessage(kWhatCodecNotify, this))));
2239     sp<AMessage> msg = new AMessage(kWhatInit, this);
2240     if (mCodecInfo) {
2241         msg->setObject("codecInfo", mCodecInfo);
2242         // name may be different from mCodecInfo->getCodecName() if we stripped
2243         // ".secure"
2244     }
2245     msg->setString("name", name);
2246 
2247     // initial naming setup covers the period before the first call to ::configure().
2248     // after that, we manage this through ::configure() and the setup message.
2249     if (mMetricsHandle != 0) {
2250         mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
2251         mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
2252     }
2253 
2254     if (mDomain == DOMAIN_VIDEO) {
2255         mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
2256     }
2257 
2258     // If the ComponentName is not set yet, use the name passed by the user.
2259     if (mComponentName.empty()) {
2260         mIsHardware = !MediaCodecList::isSoftwareCodec(name);
2261         mResourceManagerProxy->setCodecName(name.c_str());
2262     }
2263 
2264     std::vector<MediaResourceParcel> resources;
2265     resources.push_back(MediaResource::CodecResource(secureCodec,
2266                                                      toMediaResourceSubType(mIsHardware, mDomain)));
2267 
2268     for (int i = 0; i <= kMaxRetry; ++i) {
2269         if (i > 0) {
2270             // Don't try to reclaim resource for the first time.
2271             if (!mResourceManagerProxy->reclaimResource(resources)) {
2272                 break;
2273             }
2274         }
2275 
2276         sp<AMessage> response;
2277         err = PostAndAwaitResponse(msg, &response);
2278         if (!isResourceError(err)) {
2279             break;
2280         }
2281     }
2282 
2283     if (OK == err) {
2284         // Notify the ResourceManager that, this codec has been created
2285         // (initialized) successfully.
2286         mResourceManagerProxy->notifyClientCreated();
2287     }
2288     return err;
2289 }
2290 
setCallback(const sp<AMessage> & callback)2291 status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
2292     sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
2293     msg->setMessage("callback", callback);
2294 
2295     sp<AMessage> response;
2296     return PostAndAwaitResponse(msg, &response);
2297 }
2298 
setOnFrameRenderedNotification(const sp<AMessage> & notify)2299 status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> &notify) {
2300     sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
2301     msg->setMessage("on-frame-rendered", notify);
2302     return msg->post();
2303 }
2304 
setOnFirstTunnelFrameReadyNotification(const sp<AMessage> & notify)2305 status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify) {
2306     sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
2307     msg->setMessage("first-tunnel-frame-ready", notify);
2308     return msg->post();
2309 }
2310 
2311 /*
2312  * MediaFormat Shaping forward declarations
2313  * including the property name we use for control.
2314  */
2315 static int enableMediaFormatShapingDefault = 1;
2316 static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
2317 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2318                       bool reverse);
2319 
createMediaMetrics(const sp<AMessage> & format,uint32_t flags,status_t * err)2320 mediametrics_handle_t MediaCodec::createMediaMetrics(const sp<AMessage>& format,
2321                                                      uint32_t flags,
2322                                                      status_t* err) {
2323     *err = OK;
2324     mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
2325     bool isEncoder = (flags & CONFIGURE_FLAG_ENCODE);
2326 
2327     // TODO: validity check log-session-id: it should be a 32-hex-digit.
2328     format->findString("log-session-id", &mLogSessionId);
2329 
2330     if (nextMetricsHandle != 0) {
2331         mediametrics_setInt64(nextMetricsHandle, kCodecId, mCodecId);
2332         int32_t profile = 0;
2333         if (format->findInt32("profile", &profile)) {
2334             mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
2335         }
2336         int32_t level = 0;
2337         if (format->findInt32("level", &level)) {
2338             mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
2339         }
2340         mediametrics_setInt32(nextMetricsHandle, kCodecEncoder, isEncoder);
2341 
2342         if (!mLogSessionId.empty()) {
2343             mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
2344         }
2345 
2346         // moved here from ::init()
2347         mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
2348         mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
2349     }
2350 
2351     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
2352         format->findInt32("width", &mWidth);
2353         format->findInt32("height", &mHeight);
2354         if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
2355             mRotationDegrees = 0;
2356         }
2357         if (nextMetricsHandle != 0) {
2358             mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
2359             mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
2360             mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
2361             int32_t maxWidth = 0;
2362             if (format->findInt32("max-width", &maxWidth)) {
2363                 mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
2364             }
2365             int32_t maxHeight = 0;
2366             if (format->findInt32("max-height", &maxHeight)) {
2367                 mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
2368             }
2369             int32_t colorFormat = -1;
2370             if (format->findInt32("color-format", &colorFormat)) {
2371                 mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
2372             }
2373             int32_t appMaxInputSize = -1;
2374             if (format->findInt32(KEY_MAX_INPUT_SIZE, &appMaxInputSize)) {
2375                 mApiUsageMetrics.inputBufferSize.appMax = appMaxInputSize;
2376             }
2377             if (mDomain == DOMAIN_VIDEO) {
2378                 float frameRate = -1.0;
2379                 if (format->findFloat("frame-rate", &frameRate)) {
2380                     mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
2381                 }
2382                 float captureRate = -1.0;
2383                 if (format->findFloat("capture-rate", &captureRate)) {
2384                     mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
2385                 }
2386                 float operatingRate = -1.0;
2387                 if (format->findFloat("operating-rate", &operatingRate)) {
2388                     mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
2389                 }
2390                 int32_t priority = -1;
2391                 if (format->findInt32("priority", &priority)) {
2392                     mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
2393                 }
2394             }
2395         }
2396 
2397         // Prevent possible integer overflow in downstream code.
2398         if (mWidth < 0 || mHeight < 0 ||
2399                (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
2400             mErrorLog.log(LOG_TAG, base::StringPrintf(
2401                     "Invalid size(s), width=%d, height=%d", mWidth, mHeight));
2402             mediametrics_delete(nextMetricsHandle);
2403             // Set the error code and return null handle.
2404             *err = BAD_VALUE;
2405             return 0;
2406         }
2407 
2408     } else {
2409         if (nextMetricsHandle != 0) {
2410             int32_t channelCount;
2411             if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
2412                 mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
2413             }
2414             int32_t sampleRate;
2415             if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
2416                 mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
2417             }
2418         }
2419     }
2420 
2421     if (isEncoder) {
2422         int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
2423                                                  enableMediaFormatShapingDefault);
2424         if (!enableShaping) {
2425             ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
2426             if (nextMetricsHandle != 0) {
2427                 mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
2428             }
2429         } else {
2430             (void) shapeMediaFormat(format, flags, nextMetricsHandle);
2431             // XXX: do we want to do this regardless of shaping enablement?
2432             mapFormat(mComponentName, format, nullptr, false);
2433         }
2434     }
2435 
2436     // push min/max QP to MediaMetrics after shaping
2437     if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
2438         int32_t qpIMin = -1;
2439         if (format->findInt32("video-qp-i-min", &qpIMin)) {
2440             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
2441         }
2442         int32_t qpIMax = -1;
2443         if (format->findInt32("video-qp-i-max", &qpIMax)) {
2444             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
2445         }
2446         int32_t qpPMin = -1;
2447         if (format->findInt32("video-qp-p-min", &qpPMin)) {
2448             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
2449         }
2450         int32_t qpPMax = -1;
2451         if (format->findInt32("video-qp-p-max", &qpPMax)) {
2452             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
2453         }
2454         int32_t qpBMin = -1;
2455         if (format->findInt32("video-qp-b-min", &qpBMin)) {
2456             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
2457         }
2458         int32_t qpBMax = -1;
2459         if (format->findInt32("video-qp-b-max", &qpBMax)) {
2460             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
2461         }
2462     }
2463 
2464     updateLowLatency(format);
2465 
2466     return nextMetricsHandle;
2467 }
2468 
configure(const sp<AMessage> & format,const sp<Surface> & nativeWindow,const sp<ICrypto> & crypto,uint32_t flags)2469 status_t MediaCodec::configure(
2470         const sp<AMessage> &format,
2471         const sp<Surface> &nativeWindow,
2472         const sp<ICrypto> &crypto,
2473         uint32_t flags) {
2474     return configure(format, nativeWindow, crypto, NULL, flags);
2475 }
2476 
configure(const sp<AMessage> & format,const sp<Surface> & surface,const sp<ICrypto> & crypto,const sp<IDescrambler> & descrambler,uint32_t flags)2477 status_t MediaCodec::configure(
2478         const sp<AMessage> &format,
2479         const sp<Surface> &surface,
2480         const sp<ICrypto> &crypto,
2481         const sp<IDescrambler> &descrambler,
2482         uint32_t flags) {
2483 
2484     // Update the codec importance.
2485     updateCodecImportance(format);
2486 
2487     // Create and set up metrics for this codec.
2488     status_t err = OK;
2489     mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags, &err);
2490     if (err != OK) {
2491         return err;
2492     }
2493 
2494     sp<AMessage> msg = new AMessage(kWhatConfigure, this);
2495     msg->setMessage("format", format);
2496     msg->setInt32("flags", flags);
2497     msg->setObject("surface", surface);
2498 
2499     if (crypto != NULL || descrambler != NULL) {
2500         if (crypto != NULL) {
2501             msg->setPointer("crypto", crypto.get());
2502         } else {
2503             msg->setPointer("descrambler", descrambler.get());
2504         }
2505         if (nextMetricsHandle != 0) {
2506             mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
2507         }
2508     } else if (mFlags & kFlagIsSecure) {
2509         // We'll catch this later when we process the buffers.
2510         ALOGW("Crypto or descrambler should be given for secure codec");
2511     }
2512 
2513     if (mConfigureMsg != nullptr) {
2514         // if re-configuring, we have one of these from before.
2515         // Recover the space before we discard the old mConfigureMsg
2516         mediametrics_handle_t metricsHandle;
2517         if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
2518             mediametrics_delete(metricsHandle);
2519         }
2520     }
2521     msg->setInt64("metrics", nextMetricsHandle);
2522 
2523     // save msg for reset
2524     mConfigureMsg = msg;
2525 
2526     sp<AMessage> callback = mCallback;
2527 
2528     std::vector<MediaResourceParcel> resources;
2529     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
2530             toMediaResourceSubType(mIsHardware, mDomain)));
2531     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
2532         // Don't know the buffer size at this point, but it's fine to use 1 because
2533         // the reclaimResource call doesn't consider the requester's buffer size for now.
2534         resources.push_back(MediaResource::GraphicMemoryResource(1));
2535     }
2536     for (int i = 0; i <= kMaxRetry; ++i) {
2537         sp<AMessage> response;
2538         err = PostAndAwaitResponse(msg, &response);
2539         if (err != OK && err != INVALID_OPERATION) {
2540             if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
2541                 break;
2542             }
2543             // MediaCodec now set state to UNINITIALIZED upon any fatal error.
2544             // To maintain backward-compatibility, do a reset() to put codec
2545             // back into INITIALIZED state.
2546             // But don't reset if the err is INVALID_OPERATION, which means
2547             // the configure failure is due to wrong state.
2548 
2549             ALOGE("configure failed with err 0x%08x, resetting...", err);
2550             status_t err2 = reset();
2551             if (err2 != OK) {
2552                 ALOGE("retrying configure: failed to reset codec (%08x)", err2);
2553                 break;
2554             }
2555             if (callback != nullptr) {
2556                 err2 = setCallback(callback);
2557                 if (err2 != OK) {
2558                     ALOGE("retrying configure: failed to set callback (%08x)", err2);
2559                     break;
2560                 }
2561             }
2562         }
2563         if (!isResourceError(err)) {
2564             break;
2565         }
2566     }
2567 
2568     return err;
2569 }
2570 
getRequiredResources(std::vector<InstanceResourceInfo> & resources)2571 status_t MediaCodec::getRequiredResources(std::vector<InstanceResourceInfo>& resources) {
2572     resources.clear();
2573     // Make sure codec availability feature is on.
2574     if (!android::media::codec::codec_availability()) {
2575         return ERROR_UNSUPPORTED;
2576     }
2577     // Make sure that the codec was configured already.
2578     if (mState != CONFIGURED && mState != STARTING && mState != STARTED &&
2579         mState != FLUSHING && mState != FLUSHED) {
2580         ALOGE("Codec wasn't configured yet!");
2581         return INVALID_OPERATION;
2582     }
2583 
2584     if (!mRequiredResourceInfo.empty()) {
2585         resources = mRequiredResourceInfo;
2586         return OK;
2587     }
2588 
2589     // TODO: For now this is just an empty function.
2590     // The actual implementation should use component interface
2591     // (for example, through mCodec->getRequiredDeviceResources) to query the
2592     // the required resources for this configuration, and fill in resources with the same.
2593     return ERROR_UNSUPPORTED;
2594 }
2595 
2596 // Media Format Shaping support
2597 //
2598 
2599 static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
2600 static bool sIsHandheld = true;
2601 
connectFormatShaper()2602 static bool connectFormatShaper() {
2603     static std::once_flag sCheckOnce;
2604 
2605     ALOGV("connectFormatShaper...");
2606 
2607     std::call_once(sCheckOnce, [&](){
2608 
2609         void *libHandle = NULL;
2610         nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
2611 
2612         // prefer any copy in the mainline module
2613         //
2614         android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
2615         AString libraryName = "libmediaformatshaper.so";
2616 
2617         if (mediaNs != NULL) {
2618             static const android_dlextinfo dlextinfo = {
2619                 .flags = ANDROID_DLEXT_USE_NAMESPACE,
2620                 .library_namespace = mediaNs,
2621             };
2622 
2623             AString libraryMainline = "/apex/com.android.media/";
2624 #if __LP64__
2625             libraryMainline.append("lib64/");
2626 #else
2627             libraryMainline.append("lib/");
2628 #endif
2629             libraryMainline.append(libraryName);
2630 
2631             libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
2632                                                  &dlextinfo);
2633 
2634             if (libHandle != NULL) {
2635                 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
2636                                 dlsym(libHandle, "shaper_ops");
2637             } else {
2638                 ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
2639                       libraryMainline.c_str());
2640             }
2641         } else {
2642             ALOGV("connectFormatShaper: couldn't find media namespace.");
2643         }
2644 
2645         // fall back to the system partition, if present.
2646         //
2647         if (sShaperOps == NULL) {
2648 
2649             libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
2650 
2651             if (libHandle != NULL) {
2652                 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
2653                                 dlsym(libHandle, "shaper_ops");
2654             } else {
2655                 ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
2656             }
2657         }
2658 
2659         if (sShaperOps != nullptr
2660             && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
2661             ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
2662                   sShaperOps->version);
2663             sShaperOps = nullptr;
2664         }
2665 
2666         if (sShaperOps != nullptr) {
2667             ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
2668         }
2669 
2670         nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
2671         ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
2672               (loading_finished - loading_started)/1000);
2673 
2674 
2675         // we also want to know whether this is a handheld device
2676         // start with assumption that the device is handheld.
2677         sIsHandheld = true;
2678         sp<IServiceManager> serviceMgr = defaultServiceManager();
2679         sp<content::pm::IPackageManagerNative> packageMgr;
2680         if (serviceMgr.get() != nullptr) {
2681             sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
2682             packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
2683         }
2684         // if we didn't get serviceMgr, we'll leave packageMgr as default null
2685         if (packageMgr != nullptr) {
2686 
2687             // MUST have these
2688             static const String16 featuresNeeded[] = {
2689                 String16("android.hardware.touchscreen")
2690             };
2691             // these must be present to be a handheld
2692             for (::android::String16 required : featuresNeeded) {
2693                 bool hasFeature = false;
2694                 binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
2695                 if (!status.isOk()) {
2696                     ALOGE("%s: hasSystemFeature failed: %s",
2697                         __func__, status.exceptionMessage().c_str());
2698                     continue;
2699                 }
2700                 ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
2701                 if (!hasFeature) {
2702                     ALOGV("... which means we are not handheld");
2703                     sIsHandheld = false;
2704                     break;
2705                 }
2706             }
2707 
2708             // MUST NOT have these
2709             static const String16 featuresDisallowed[] = {
2710                 String16("android.hardware.type.automotive"),
2711                 String16("android.hardware.type.television"),
2712                 String16("android.hardware.type.watch")
2713             };
2714             // any of these present -- we aren't a handheld
2715             for (::android::String16 forbidden : featuresDisallowed) {
2716                 bool hasFeature = false;
2717                 binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
2718                 if (!status.isOk()) {
2719                     ALOGE("%s: hasSystemFeature failed: %s",
2720                         __func__, status.exceptionMessage().c_str());
2721                     continue;
2722                 }
2723                 ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
2724                 if (hasFeature) {
2725                     ALOGV("... which means we are not handheld");
2726                     sIsHandheld = false;
2727                     break;
2728                 }
2729             }
2730         }
2731 
2732     });
2733 
2734     return true;
2735 }
2736 
2737 
2738 #if 0
2739 // a construct to force the above dlopen() to run very early.
2740 // goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
2741 // failure of this means that cold start of those apps is slower by the time to dlopen()
2742 // TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
2743 //
2744 static bool forceEarlyLoadingShaper = connectFormatShaper();
2745 #endif
2746 
2747 // parse the codec's properties: mapping, whether it meets min quality, etc
2748 // and pass them into the video quality code
2749 //
loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,sp<MediaCodecInfo> codecInfo,AString mediaType)2750 static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
2751                                   sp<MediaCodecInfo> codecInfo, AString mediaType) {
2752 
2753     sp<MediaCodecInfo::Capabilities> capabilities =
2754                     codecInfo->getCapabilitiesFor(mediaType.c_str());
2755     if (capabilities == nullptr) {
2756         ALOGI("no capabilities as part of the codec?");
2757     } else {
2758         const sp<AMessage> &details = capabilities->getDetails();
2759         AString mapTarget;
2760         int count = details->countEntries();
2761         for(int ix = 0; ix < count; ix++) {
2762             AMessage::Type entryType;
2763             const char *mapSrc = details->getEntryNameAt(ix, &entryType);
2764             // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
2765             //
2766             static const char *featurePrefix = "feature-";
2767             static const int featurePrefixLen = strlen(featurePrefix);
2768             static const char *tuningPrefix = "tuning-";
2769             static const int tuningPrefixLen = strlen(tuningPrefix);
2770             static const char *mappingPrefix = "mapping-";
2771             static const int mappingPrefixLen = strlen(mappingPrefix);
2772 
2773             if (mapSrc == NULL) {
2774                 continue;
2775             } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
2776                 int32_t intValue;
2777                 if (details->findInt32(mapSrc, &intValue)) {
2778                     ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
2779                     (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
2780                                                    intValue);
2781                 }
2782                 continue;
2783             } else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
2784                 AString value;
2785                 if (details->findString(mapSrc, &value)) {
2786                     ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
2787                     (void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
2788                                                    value.c_str());
2789                 }
2790                 continue;
2791             } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
2792                 AString target;
2793                 if (details->findString(mapSrc, &target)) {
2794                     ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
2795                           target.c_str());
2796                     // key is really "kind-key"
2797                     // separate that, so setMap() sees the triple  kind, key, value
2798                     const char *kind = &mapSrc[mappingPrefixLen];
2799                     const char *sep = strchr(kind, '-');
2800                     const char *key = sep+1;
2801                     if (sep != NULL) {
2802                          std::string xkind = std::string(kind, sep-kind);
2803                         (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
2804                                                    key, target.c_str());
2805                     }
2806                 }
2807             }
2808         }
2809     }
2810 
2811     // we also carry in the codec description whether we are on a handheld device.
2812     // this info is eventually used by both the Codec and the C2 machinery to inform
2813     // the underlying codec whether to do any shaping.
2814     //
2815     if (sIsHandheld) {
2816         // set if we are indeed a handheld device (or in future 'any eligible device'
2817         // missing on devices that aren't eligible for minimum quality enforcement.
2818         (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
2819         // strictly speaking, it's a tuning, but those are strings and feature stores int
2820         (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
2821     }
2822 }
2823 
setupFormatShaper(AString mediaType)2824 status_t MediaCodec::setupFormatShaper(AString mediaType) {
2825     ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
2826           mComponentName.c_str(), mediaType.c_str());
2827 
2828     nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
2829 
2830     // someone might have beaten us to it.
2831     mediaformatshaper::shaperHandle_t shaperHandle;
2832     shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2833     if (shaperHandle != nullptr) {
2834         ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
2835         return OK;
2836     }
2837 
2838     // we get to build & register one
2839     shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
2840     if (shaperHandle == nullptr) {
2841         ALOGW("unable to create a shaper for cocodec %s mediaType %s",
2842               mComponentName.c_str(), mediaType.c_str());
2843         return OK;
2844     }
2845 
2846     (void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
2847 
2848     shaperHandle = sShaperOps->registerShaper(shaperHandle,
2849                                               mComponentName.c_str(), mediaType.c_str());
2850 
2851     nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
2852     ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
2853           mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
2854 
2855     return OK;
2856 }
2857 
2858 
2859 // Format Shaping
2860 //      Mapping and Manipulation of encoding parameters
2861 //
2862 //      All of these decisions are pushed into the shaper instead of here within MediaCodec.
2863 //      this includes decisions based on whether the codec implements minimum quality bars
2864 //      itself or needs to be shaped outside of the codec.
2865 //      This keeps all those decisions in one place.
2866 //      It also means that we push some extra decision information (is this a handheld device
2867 //      or one that is otherwise eligible for minimum quality manipulation, which generational
2868 //      quality target is in force, etc).  This allows those values to be cached in the
2869 //      per-codec structures that are done 1 time within a process instead of for each
2870 //      codec instantiation.
2871 //
2872 
shapeMediaFormat(const sp<AMessage> & format,uint32_t flags,mediametrics_handle_t metricsHandle)2873 status_t MediaCodec::shapeMediaFormat(
2874             const sp<AMessage> &format,
2875             uint32_t flags,
2876             mediametrics_handle_t metricsHandle) {
2877     ALOGV("shapeMediaFormat entry");
2878 
2879     if (!(flags & CONFIGURE_FLAG_ENCODE)) {
2880         ALOGW("shapeMediaFormat: not encoder");
2881         return OK;
2882     }
2883     if (mCodecInfo == NULL) {
2884         ALOGW("shapeMediaFormat: no codecinfo");
2885         return OK;
2886     }
2887 
2888     AString mediaType;
2889     if (!format->findString("mime", &mediaType)) {
2890         ALOGW("shapeMediaFormat: no mediaType information");
2891         return OK;
2892     }
2893 
2894     // make sure we have the function entry points for the shaper library
2895     //
2896 
2897     connectFormatShaper();
2898     if (sShaperOps == nullptr) {
2899         ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
2900         return OK;
2901     }
2902 
2903     // find the shaper information for this codec+mediaType pair
2904     //
2905     mediaformatshaper::shaperHandle_t shaperHandle;
2906     shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2907     if (shaperHandle == nullptr)  {
2908         setupFormatShaper(mediaType);
2909         shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2910     }
2911     if (shaperHandle == nullptr) {
2912         ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
2913               mComponentName.c_str(), mediaType.c_str());
2914         return OK;
2915     }
2916 
2917     // run the shaper
2918     //
2919 
2920     ALOGV("Shaping input: %s", format->debugString(0).c_str());
2921 
2922     sp<AMessage> updatedFormat = format->dup();
2923     AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
2924 
2925     int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
2926     if (result == 0) {
2927         AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
2928 
2929         sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
2930         size_t changeCount = deltas->countEntries();
2931         ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
2932         if (metricsHandle != 0) {
2933             mediametrics_setInt32(metricsHandle, kCodecShapingEnhanced, changeCount);
2934         }
2935         if (changeCount > 0) {
2936             if (metricsHandle != 0) {
2937                 // save some old properties before we fold in the new ones
2938                 int32_t bitrate;
2939                 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
2940                     mediametrics_setInt32(metricsHandle, kCodecOriginalBitrate, bitrate);
2941                 }
2942                 int32_t qpIMin = -1;
2943                 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
2944                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
2945                 }
2946                 int32_t qpIMax = -1;
2947                 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
2948                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
2949                 }
2950                 int32_t qpPMin = -1;
2951                 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
2952                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
2953                 }
2954                 int32_t qpPMax = -1;
2955                 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
2956                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
2957                 }
2958                  int32_t qpBMin = -1;
2959                 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
2960                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
2961                 }
2962                 int32_t qpBMax = -1;
2963                 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
2964                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
2965                 }
2966             }
2967             // NB: for any field in both format and deltas, the deltas copy wins
2968             format->extend(deltas);
2969         }
2970     }
2971 
2972     AMediaFormat_delete(updatedNdkFormat);
2973     return OK;
2974 }
2975 
mapFormat(AString componentName,const sp<AMessage> & format,const char * kind,bool reverse)2976 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2977                       bool reverse) {
2978     AString mediaType;
2979     if (!format->findString("mime", &mediaType)) {
2980         ALOGV("mapFormat: no mediaType information");
2981         return;
2982     }
2983     ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
2984           mediaType.c_str(), kind ? kind : "<all>", reverse);
2985 
2986     // make sure we have the function entry points for the shaper library
2987     //
2988 
2989 #if 0
2990     // let's play the faster "only do mapping if we've already loaded the library
2991     connectFormatShaper();
2992 #endif
2993     if (sShaperOps == nullptr) {
2994         ALOGV("mapFormat: no MediaFormatShaper hooks available");
2995         return;
2996     }
2997 
2998     // find the shaper information for this codec+mediaType pair
2999     //
3000     mediaformatshaper::shaperHandle_t shaperHandle;
3001     shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
3002     if (shaperHandle == nullptr) {
3003         ALOGV("mapFormat: no shaper handle");
3004         return;
3005     }
3006 
3007     const char **mappings;
3008     if (reverse)
3009         mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
3010     else
3011         mappings = sShaperOps->getMappings(shaperHandle, kind);
3012 
3013     if (mappings == nullptr) {
3014         ALOGV("no mappings returned");
3015         return;
3016     }
3017 
3018     ALOGV("Pre-mapping: %s",  format->debugString(2).c_str());
3019     // do the mapping
3020     //
3021     int entries = format->countEntries();
3022     for (int i = 0; ; i += 2) {
3023         if (mappings[i] == nullptr) {
3024             break;
3025         }
3026 
3027         size_t ix = format->findEntryByName(mappings[i]);
3028         if (ix < entries) {
3029             ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
3030             status_t status = format->setEntryNameAt(ix, mappings[i+1]);
3031             if (status != OK) {
3032                 ALOGW("Unable to map from '%s' to '%s': status %d",
3033                       mappings[i], mappings[i+1], status);
3034             }
3035         }
3036     }
3037     ALOGV("Post-mapping: %s",  format->debugString(2).c_str());
3038 
3039 
3040     // reclaim the mapping memory
3041     for (int i = 0; ; i += 2) {
3042         if (mappings[i] == nullptr) {
3043             break;
3044         }
3045         free((void*)mappings[i]);
3046         free((void*)mappings[i + 1]);
3047     }
3048     free(mappings);
3049     mappings = nullptr;
3050 }
3051 
3052 //
3053 // end of Format Shaping hooks within MediaCodec
3054 //
3055 
releaseCrypto()3056 status_t MediaCodec::releaseCrypto()
3057 {
3058     ALOGV("releaseCrypto");
3059 
3060     sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
3061 
3062     sp<AMessage> response;
3063     status_t status = msg->postAndAwaitResponse(&response);
3064 
3065     if (status == OK && response != NULL) {
3066         CHECK(response->findInt32("status", &status));
3067         ALOGV("releaseCrypto ret: %d ", status);
3068     }
3069     else {
3070         ALOGE("releaseCrypto err: %d", status);
3071     }
3072 
3073     return status;
3074 }
3075 
onReleaseCrypto(const sp<AMessage> & msg)3076 void MediaCodec::onReleaseCrypto(const sp<AMessage>& msg)
3077 {
3078     status_t status = INVALID_OPERATION;
3079     if (mCrypto != NULL) {
3080         ALOGV("onReleaseCrypto: mCrypto: %p (%d)", mCrypto.get(), mCrypto->getStrongCount());
3081         mBufferChannel->setCrypto(NULL);
3082         // TODO change to ALOGV
3083         ALOGD("onReleaseCrypto: [before clear]  mCrypto: %p (%d)",
3084                 mCrypto.get(), mCrypto->getStrongCount());
3085         mCrypto.clear();
3086 
3087         status = OK;
3088     }
3089     else {
3090         ALOGW("onReleaseCrypto: No mCrypto. err: %d", status);
3091     }
3092 
3093     sp<AMessage> response = new AMessage;
3094     response->setInt32("status", status);
3095 
3096     sp<AReplyToken> replyID;
3097     CHECK(msg->senderAwaitsResponse(&replyID));
3098     response->postReply(replyID);
3099 }
3100 
setInputSurface(const sp<PersistentSurface> & surface)3101 status_t MediaCodec::setInputSurface(
3102         const sp<PersistentSurface> &surface) {
3103     sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
3104     msg->setObject("input-surface", surface.get());
3105 
3106     sp<AMessage> response;
3107     return PostAndAwaitResponse(msg, &response);
3108 }
3109 
detachOutputSurface()3110 status_t MediaCodec::detachOutputSurface() {
3111     sp<AMessage> msg = new AMessage(kWhatDetachSurface, this);
3112 
3113     sp<AMessage> response;
3114     return PostAndAwaitResponse(msg, &response);
3115 }
3116 
setSurface(const sp<Surface> & surface)3117 status_t MediaCodec::setSurface(const sp<Surface> &surface) {
3118     sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
3119     msg->setObject("surface", surface);
3120 
3121     sp<AMessage> response;
3122     return PostAndAwaitResponse(msg, &response);
3123 }
3124 
createInputSurface(sp<IGraphicBufferProducer> * bufferProducer)3125 status_t MediaCodec::createInputSurface(
3126         sp<IGraphicBufferProducer>* bufferProducer) {
3127     sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);
3128 
3129     sp<AMessage> response;
3130     status_t err = PostAndAwaitResponse(msg, &response);
3131     if (err == NO_ERROR) {
3132         // unwrap the sp<IGraphicBufferProducer>
3133         sp<RefBase> obj;
3134         bool found = response->findObject("input-surface", &obj);
3135         CHECK(found);
3136         sp<BufferProducerWrapper> wrapper(
3137                 static_cast<BufferProducerWrapper*>(obj.get()));
3138         *bufferProducer = wrapper->getBufferProducer();
3139     } else {
3140         ALOGW("createInputSurface failed, err=%d", err);
3141     }
3142     return err;
3143 }
3144 
getGraphicBufferSize()3145 uint64_t MediaCodec::getGraphicBufferSize() {
3146     if (mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) {
3147         return 0;
3148     }
3149 
3150     uint64_t size = 0;
3151     size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
3152     for (size_t i = 0; i < portNum; ++i) {
3153         // TODO: this is just an estimation, we should get the real buffer size from ACodec.
3154         size += mPortBuffers[i].size() * mWidth * mHeight * 3 / 2;
3155     }
3156     return size;
3157 }
3158 
start()3159 status_t MediaCodec::start() {
3160     sp<AMessage> msg = new AMessage(kWhatStart, this);
3161 
3162     sp<AMessage> callback;
3163 
3164     status_t err;
3165     std::vector<MediaResourceParcel> resources;
3166     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
3167             toMediaResourceSubType(mIsHardware, mDomain)));
3168     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
3169         // Don't know the buffer size at this point, but it's fine to use 1 because
3170         // the reclaimResource call doesn't consider the requester's buffer size for now.
3171         resources.push_back(MediaResource::GraphicMemoryResource(1));
3172     }
3173     for (int i = 0; i <= kMaxRetry; ++i) {
3174         if (i > 0) {
3175             // Don't try to reclaim resource for the first time.
3176             if (!mResourceManagerProxy->reclaimResource(resources)) {
3177                 break;
3178             }
3179             // Recover codec from previous error before retry start.
3180             err = reset();
3181             if (err != OK) {
3182                 ALOGE("retrying start: failed to reset codec");
3183                 break;
3184             }
3185             if (callback != nullptr) {
3186                 err = setCallback(callback);
3187                 if (err != OK) {
3188                     ALOGE("retrying start: failed to set callback");
3189                     break;
3190                 }
3191                 ALOGD("succeed to set callback for reclaim");
3192             }
3193             sp<AMessage> response;
3194             err = PostAndAwaitResponse(mConfigureMsg, &response);
3195             if (err != OK) {
3196                 ALOGE("retrying start: failed to configure codec");
3197                 break;
3198             }
3199         }
3200 
3201         // Keep callback message after the first iteration if necessary.
3202         if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
3203             callback = mCallback;
3204             ALOGD("keep callback message for reclaim");
3205         }
3206 
3207         sp<AMessage> response;
3208         err = PostAndAwaitResponse(msg, &response);
3209         if (!isResourceError(err)) {
3210             break;
3211         }
3212     }
3213     return err;
3214 }
3215 
stop()3216 status_t MediaCodec::stop() {
3217     sp<AMessage> msg = new AMessage(kWhatStop, this);
3218 
3219     sp<AMessage> response;
3220     return PostAndAwaitResponse(msg, &response);
3221 }
3222 
hasPendingBuffer(int portIndex)3223 bool MediaCodec::hasPendingBuffer(int portIndex) {
3224     return std::any_of(
3225             mPortBuffers[portIndex].begin(), mPortBuffers[portIndex].end(),
3226             [](const BufferInfo &info) { return info.mOwnedByClient; });
3227 }
3228 
hasPendingBuffer()3229 bool MediaCodec::hasPendingBuffer() {
3230     return hasPendingBuffer(kPortIndexInput) || hasPendingBuffer(kPortIndexOutput);
3231 }
3232 
reclaim(bool force)3233 status_t MediaCodec::reclaim(bool force) {
3234     ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
3235     sp<AMessage> msg = new AMessage(kWhatRelease, this);
3236     msg->setInt32("reclaimed", 1);
3237     msg->setInt32("force", force ? 1 : 0);
3238 
3239     sp<AMessage> response;
3240     status_t ret = PostAndAwaitResponse(msg, &response);
3241     if (ret == -ENOENT) {
3242         ALOGD("MediaCodec looper is gone, skip reclaim");
3243         ret = OK;
3244     }
3245     return ret;
3246 }
3247 
release()3248 status_t MediaCodec::release() {
3249     sp<AMessage> msg = new AMessage(kWhatRelease, this);
3250     sp<AMessage> response;
3251     return PostAndAwaitResponse(msg, &response);
3252 }
3253 
releaseAsync(const sp<AMessage> & notify)3254 status_t MediaCodec::releaseAsync(const sp<AMessage> &notify) {
3255     sp<AMessage> msg = new AMessage(kWhatRelease, this);
3256     msg->setMessage("async", notify);
3257     sp<AMessage> response;
3258     return PostAndAwaitResponse(msg, &response);
3259 }
3260 
reset()3261 status_t MediaCodec::reset() {
3262     /* When external-facing MediaCodec object is created,
3263        it is already initialized.  Thus, reset is essentially
3264        release() followed by init(), plus clearing the state */
3265 
3266     status_t err = release();
3267 
3268     // unregister handlers
3269     if (mCodec != NULL) {
3270         if (mCodecLooper != NULL) {
3271             mCodecLooper->unregisterHandler(mCodec->id());
3272         } else {
3273             mLooper->unregisterHandler(mCodec->id());
3274         }
3275         mCodec = NULL;
3276     }
3277     mLooper->unregisterHandler(id());
3278 
3279     mFlags = 0;    // clear all flags
3280     mStickyError = OK;
3281 
3282     // reset state not reset by setState(UNINITIALIZED)
3283     mDequeueInputReplyID = 0;
3284     mDequeueOutputReplyID = 0;
3285     mDequeueInputTimeoutGeneration = 0;
3286     mDequeueOutputTimeoutGeneration = 0;
3287     mHaveInputSurface = false;
3288 
3289     if (err == OK) {
3290         err = init(mInitName);
3291     }
3292     return err;
3293 }
3294 
queueInputBuffer(size_t index,size_t offset,size_t size,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)3295 status_t MediaCodec::queueInputBuffer(
3296         size_t index,
3297         size_t offset,
3298         size_t size,
3299         int64_t presentationTimeUs,
3300         uint32_t flags,
3301         AString *errorDetailMsg) {
3302     if (errorDetailMsg != NULL) {
3303         errorDetailMsg->clear();
3304     }
3305 
3306     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3307     msg->setSize("index", index);
3308     msg->setSize("offset", offset);
3309     msg->setSize("size", size);
3310     msg->setInt64("timeUs", presentationTimeUs);
3311     msg->setInt32("flags", flags);
3312     msg->setPointer("errorDetailMsg", errorDetailMsg);
3313     sp<AMessage> response;
3314     return PostAndAwaitResponse(msg, &response);
3315 }
3316 
queueInputBuffers(size_t index,size_t offset,size_t size,const sp<BufferInfosWrapper> & infos,AString * errorDetailMsg)3317 status_t MediaCodec::queueInputBuffers(
3318         size_t index,
3319         size_t offset,
3320         size_t size,
3321         const sp<BufferInfosWrapper> &infos,
3322         AString *errorDetailMsg) {
3323     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3324     uint32_t bufferFlags = 0;
3325     uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
3326     uint32_t andFlags = flagsinAllAU;
3327     if (infos == nullptr || infos->value.empty()) {
3328         ALOGE("ERROR: Large Audio frame with no BufferInfo");
3329         return BAD_VALUE;
3330     }
3331     int infoIdx = 0;
3332     std::vector<AccessUnitInfo> &accessUnitInfo = infos->value;
3333     int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
3334     bool foundEndOfStream = false;
3335     for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
3336         bufferFlags |= accessUnitInfo[infoIdx].mFlags;
3337         andFlags &= accessUnitInfo[infoIdx].mFlags;
3338         if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
3339             foundEndOfStream = true;
3340         }
3341     }
3342     bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
3343     if (infoIdx != accessUnitInfo.size()) {
3344         ALOGE("queueInputBuffers has incorrect access-units");
3345         return -EINVAL;
3346     }
3347     msg->setSize("index", index);
3348     msg->setSize("offset", offset);
3349     msg->setSize("size", size);
3350     msg->setInt64("timeUs", minTimeUs);
3351     // Make this represent flags for the entire buffer
3352     // decodeOnly Flag is set only when all buffers are decodeOnly
3353     msg->setInt32("flags", bufferFlags);
3354     msg->setObject("accessUnitInfo", infos);
3355     msg->setPointer("errorDetailMsg", errorDetailMsg);
3356     sp<AMessage> response;
3357     return PostAndAwaitResponse(msg, &response);
3358 }
3359 
queueSecureInputBuffer(size_t index,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)3360 status_t MediaCodec::queueSecureInputBuffer(
3361         size_t index,
3362         size_t offset,
3363         const CryptoPlugin::SubSample *subSamples,
3364         size_t numSubSamples,
3365         const uint8_t key[16],
3366         const uint8_t iv[16],
3367         CryptoPlugin::Mode mode,
3368         const CryptoPlugin::Pattern &pattern,
3369         int64_t presentationTimeUs,
3370         uint32_t flags,
3371         AString *errorDetailMsg) {
3372     if (errorDetailMsg != NULL) {
3373         errorDetailMsg->clear();
3374     }
3375 
3376     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3377     msg->setSize("index", index);
3378     msg->setSize("offset", offset);
3379     msg->setPointer("subSamples", (void *)subSamples);
3380     msg->setSize("numSubSamples", numSubSamples);
3381     msg->setPointer("key", (void *)key);
3382     msg->setPointer("iv", (void *)iv);
3383     msg->setInt32("mode", mode);
3384     msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
3385     msg->setInt32("skipBlocks", pattern.mSkipBlocks);
3386     msg->setInt64("timeUs", presentationTimeUs);
3387     msg->setInt32("flags", flags);
3388     msg->setPointer("errorDetailMsg", errorDetailMsg);
3389 
3390     sp<AMessage> response;
3391     status_t err = PostAndAwaitResponse(msg, &response);
3392 
3393     return err;
3394 }
3395 
queueSecureInputBuffers(size_t index,size_t offset,size_t size,const sp<BufferInfosWrapper> & auInfo,const sp<CryptoInfosWrapper> & cryptoInfos,AString * errorDetailMsg)3396 status_t MediaCodec::queueSecureInputBuffers(
3397         size_t index,
3398         size_t offset,
3399         size_t size,
3400         const sp<BufferInfosWrapper> &auInfo,
3401         const sp<CryptoInfosWrapper> &cryptoInfos,
3402         AString *errorDetailMsg) {
3403     if (errorDetailMsg != NULL) {
3404         errorDetailMsg->clear();
3405     }
3406     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3407     uint32_t bufferFlags = 0;
3408     uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
3409     uint32_t andFlags = flagsinAllAU;
3410     if (auInfo == nullptr
3411             || auInfo->value.empty()
3412             || cryptoInfos == nullptr
3413             || cryptoInfos->value.empty()) {
3414         ALOGE("ERROR: Large Audio frame with no BufferInfo/CryptoInfo");
3415         return BAD_VALUE;
3416     }
3417     int infoIdx = 0;
3418     std::vector<AccessUnitInfo> &accessUnitInfo = auInfo->value;
3419     int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
3420     bool foundEndOfStream = false;
3421     for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
3422         bufferFlags |= accessUnitInfo[infoIdx].mFlags;
3423         andFlags &= accessUnitInfo[infoIdx].mFlags;
3424         if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
3425             foundEndOfStream = true;
3426         }
3427     }
3428     bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
3429     if (infoIdx != accessUnitInfo.size()) {
3430         ALOGE("queueInputBuffers has incorrect access-units");
3431         return -EINVAL;
3432     }
3433     msg->setSize("index", index);
3434     msg->setSize("offset", offset);
3435     msg->setSize("ssize", size);
3436     msg->setInt64("timeUs", minTimeUs);
3437     msg->setInt32("flags", bufferFlags);
3438     msg->setObject("accessUnitInfo", auInfo);
3439     msg->setObject("cryptoInfos", cryptoInfos);
3440     msg->setPointer("errorDetailMsg", errorDetailMsg);
3441 
3442     sp<AMessage> response;
3443     status_t err = PostAndAwaitResponse(msg, &response);
3444 
3445     return err;
3446 }
3447 
queueBuffer(size_t index,const std::shared_ptr<C2Buffer> & buffer,const sp<BufferInfosWrapper> & bufferInfos,const sp<AMessage> & tunings,AString * errorDetailMsg)3448 status_t MediaCodec::queueBuffer(
3449         size_t index,
3450         const std::shared_ptr<C2Buffer> &buffer,
3451         const sp<BufferInfosWrapper> &bufferInfos,
3452         const sp<AMessage> &tunings,
3453         AString *errorDetailMsg) {
3454     if (errorDetailMsg != NULL) {
3455         errorDetailMsg->clear();
3456     }
3457     if (bufferInfos == nullptr || bufferInfos->value.empty()) {
3458         return BAD_VALUE;
3459     }
3460     status_t err = OK;
3461     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3462     msg->setSize("index", index);
3463     sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
3464         new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
3465     msg->setObject("c2buffer", obj);
3466     if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
3467         return err;
3468     }
3469     msg->setObject("accessUnitInfo", bufferInfos);
3470     if (tunings && tunings->countEntries() > 0) {
3471         msg->setMessage("tunings", tunings);
3472     }
3473     msg->setPointer("errorDetailMsg", errorDetailMsg);
3474     sp<AMessage> response;
3475     err = PostAndAwaitResponse(msg, &response);
3476 
3477     return err;
3478 }
3479 
queueEncryptedBuffer(size_t index,const sp<hardware::HidlMemory> & buffer,size_t offset,size_t size,const sp<BufferInfosWrapper> & bufferInfos,const sp<CryptoInfosWrapper> & cryptoInfos,const sp<AMessage> & tunings,AString * errorDetailMsg)3480 status_t MediaCodec::queueEncryptedBuffer(
3481         size_t index,
3482         const sp<hardware::HidlMemory> &buffer,
3483         size_t offset,
3484         size_t size,
3485         const sp<BufferInfosWrapper> &bufferInfos,
3486         const sp<CryptoInfosWrapper> &cryptoInfos,
3487         const sp<AMessage> &tunings,
3488         AString *errorDetailMsg) {
3489     if (errorDetailMsg != NULL) {
3490         errorDetailMsg->clear();
3491     }
3492     if (bufferInfos == nullptr || bufferInfos->value.empty()) {
3493         return BAD_VALUE;
3494     }
3495     status_t err = OK;
3496     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3497     msg->setSize("index", index);
3498     sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
3499         new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
3500     msg->setObject("memory", memory);
3501     msg->setSize("offset", offset);
3502     if (cryptoInfos != nullptr) {
3503         msg->setSize("ssize", size);
3504         msg->setObject("cryptoInfos", cryptoInfos);
3505     } else {
3506         msg->setSize("size", size);
3507     }
3508     msg->setObject("accessUnitInfo", bufferInfos);
3509     if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
3510         return err;
3511     }
3512     if (tunings && tunings->countEntries() > 0) {
3513         msg->setMessage("tunings", tunings);
3514     }
3515     msg->setPointer("errorDetailMsg", errorDetailMsg);
3516 
3517     sp<AMessage> response;
3518     err = PostAndAwaitResponse(msg, &response);
3519 
3520     return err;
3521 }
3522 
dequeueInputBuffer(size_t * index,int64_t timeoutUs)3523 status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
3524     sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
3525     msg->setInt64("timeoutUs", timeoutUs);
3526 
3527     sp<AMessage> response;
3528     status_t err;
3529     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3530         return err;
3531     }
3532 
3533     CHECK(response->findSize("index", index));
3534 
3535     return OK;
3536 }
3537 
dequeueOutputBuffer(size_t * index,size_t * offset,size_t * size,int64_t * presentationTimeUs,uint32_t * flags,int64_t timeoutUs)3538 status_t MediaCodec::dequeueOutputBuffer(
3539         size_t *index,
3540         size_t *offset,
3541         size_t *size,
3542         int64_t *presentationTimeUs,
3543         uint32_t *flags,
3544         int64_t timeoutUs) {
3545     sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
3546     msg->setInt64("timeoutUs", timeoutUs);
3547 
3548     sp<AMessage> response;
3549     status_t err;
3550     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3551         return err;
3552     }
3553 
3554     CHECK(response->findSize("index", index));
3555     CHECK(response->findSize("offset", offset));
3556     CHECK(response->findSize("size", size));
3557     CHECK(response->findInt64("timeUs", presentationTimeUs));
3558     CHECK(response->findInt32("flags", (int32_t *)flags));
3559 
3560     return OK;
3561 }
3562 
renderOutputBufferAndRelease(size_t index)3563 status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
3564     sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3565     msg->setSize("index", index);
3566     msg->setInt32("render", true);
3567 
3568     sp<AMessage> response;
3569     return PostAndAwaitResponse(msg, &response);
3570 }
3571 
renderOutputBufferAndRelease(size_t index,int64_t timestampNs)3572 status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
3573     sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3574     msg->setSize("index", index);
3575     msg->setInt32("render", true);
3576     msg->setInt64("timestampNs", timestampNs);
3577 
3578     sp<AMessage> response;
3579     return PostAndAwaitResponse(msg, &response);
3580 }
3581 
releaseOutputBuffer(size_t index)3582 status_t MediaCodec::releaseOutputBuffer(size_t index) {
3583     sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3584     msg->setSize("index", index);
3585 
3586     sp<AMessage> response;
3587     return PostAndAwaitResponse(msg, &response);
3588 }
3589 
signalEndOfInputStream()3590 status_t MediaCodec::signalEndOfInputStream() {
3591     sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);
3592 
3593     sp<AMessage> response;
3594     return PostAndAwaitResponse(msg, &response);
3595 }
3596 
getOutputFormat(sp<AMessage> * format) const3597 status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
3598     sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);
3599 
3600     sp<AMessage> response;
3601     status_t err;
3602     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3603         return err;
3604     }
3605 
3606     CHECK(response->findMessage("format", format));
3607 
3608     return OK;
3609 }
3610 
getInputFormat(sp<AMessage> * format) const3611 status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
3612     sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);
3613 
3614     sp<AMessage> response;
3615     status_t err;
3616     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3617         return err;
3618     }
3619 
3620     CHECK(response->findMessage("format", format));
3621 
3622     return OK;
3623 }
3624 
getName(AString * name) const3625 status_t MediaCodec::getName(AString *name) const {
3626     sp<AMessage> msg = new AMessage(kWhatGetName, this);
3627 
3628     sp<AMessage> response;
3629     status_t err;
3630     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3631         return err;
3632     }
3633 
3634     CHECK(response->findString("name", name));
3635 
3636     return OK;
3637 }
3638 
getCodecInfo(sp<MediaCodecInfo> * codecInfo) const3639 status_t MediaCodec::getCodecInfo(sp<MediaCodecInfo> *codecInfo) const {
3640     sp<AMessage> msg = new AMessage(kWhatGetCodecInfo, this);
3641 
3642     sp<AMessage> response;
3643     status_t err;
3644     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3645         return err;
3646     }
3647 
3648     sp<RefBase> obj;
3649     CHECK(response->findObject("codecInfo", &obj));
3650     *codecInfo = static_cast<MediaCodecInfo *>(obj.get());
3651 
3652     return OK;
3653 }
3654 
3655 // this is the user-callable entry point
getMetrics(mediametrics_handle_t & reply)3656 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
3657 
3658     reply = 0;
3659 
3660     sp<AMessage> msg = new AMessage(kWhatGetMetrics, this);
3661     sp<AMessage> response;
3662     status_t err;
3663     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3664         return err;
3665     }
3666 
3667     CHECK(response->findInt64("metrics", &reply));
3668 
3669     return OK;
3670 }
3671 
3672 // runs on the looper thread (for mutex purposes)
onGetMetrics(const sp<AMessage> & msg)3673 void MediaCodec::onGetMetrics(const sp<AMessage>& msg) {
3674 
3675     mediametrics_handle_t results = 0;
3676 
3677     sp<AReplyToken> replyID;
3678     CHECK(msg->senderAwaitsResponse(&replyID));
3679 
3680     if (mMetricsHandle != 0) {
3681         updateMediametrics();
3682         results = mediametrics_dup(mMetricsHandle);
3683         updateEphemeralMediametrics(results);
3684     } else if (mLastMetricsHandle != 0) {
3685         // After error, mMetricsHandle is cleared, but we keep the last
3686         // metrics around so that it can be queried by getMetrics().
3687         results = mediametrics_dup(mLastMetricsHandle);
3688     } else {
3689         results = mediametrics_dup(mMetricsHandle);
3690     }
3691 
3692     sp<AMessage> response = new AMessage;
3693     response->setInt64("metrics", results);
3694     response->postReply(replyID);
3695 }
3696 
getInputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const3697 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
3698     sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
3699     msg->setInt32("portIndex", kPortIndexInput);
3700     msg->setPointer("buffers", buffers);
3701 
3702     sp<AMessage> response;
3703     return PostAndAwaitResponse(msg, &response);
3704 }
3705 
getOutputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const3706 status_t MediaCodec::getOutputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
3707     sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
3708     msg->setInt32("portIndex", kPortIndexOutput);
3709     msg->setPointer("buffers", buffers);
3710 
3711     sp<AMessage> response;
3712     return PostAndAwaitResponse(msg, &response);
3713 }
3714 
getOutputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)3715 status_t MediaCodec::getOutputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
3716     sp<AMessage> format;
3717     return getBufferAndFormat(kPortIndexOutput, index, buffer, &format);
3718 }
3719 
getOutputFormat(size_t index,sp<AMessage> * format)3720 status_t MediaCodec::getOutputFormat(size_t index, sp<AMessage> *format) {
3721     sp<MediaCodecBuffer> buffer;
3722     return getBufferAndFormat(kPortIndexOutput, index, &buffer, format);
3723 }
3724 
getInputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)3725 status_t MediaCodec::getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
3726     sp<AMessage> format;
3727     return getBufferAndFormat(kPortIndexInput, index, buffer, &format);
3728 }
3729 
isExecuting() const3730 bool MediaCodec::isExecuting() const {
3731     return mState == STARTED || mState == FLUSHED;
3732 }
3733 
getBufferAndFormat(size_t portIndex,size_t index,sp<MediaCodecBuffer> * buffer,sp<AMessage> * format)3734 status_t MediaCodec::getBufferAndFormat(
3735         size_t portIndex, size_t index,
3736         sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
3737     // use mutex instead of a context switch
3738     if (mReleasedByResourceManager) {
3739         mErrorLog.log(LOG_TAG, "resource already released");
3740         return DEAD_OBJECT;
3741     }
3742 
3743     if (buffer == NULL) {
3744         mErrorLog.log(LOG_TAG, "null buffer");
3745         return INVALID_OPERATION;
3746     }
3747 
3748     if (format == NULL) {
3749         mErrorLog.log(LOG_TAG, "null format");
3750         return INVALID_OPERATION;
3751     }
3752 
3753     buffer->clear();
3754     format->clear();
3755 
3756     if (!isExecuting()) {
3757         mErrorLog.log(LOG_TAG, base::StringPrintf(
3758                 "Invalid to call %s; only valid in Executing states",
3759                 apiStateString().c_str()));
3760         return INVALID_OPERATION;
3761     }
3762 
3763     // we do not want mPortBuffers to change during this section
3764     // we also don't want mOwnedByClient to change during this
3765     Mutex::Autolock al(mBufferLock);
3766 
3767     std::vector<BufferInfo> &buffers = mPortBuffers[portIndex];
3768     if (index >= buffers.size()) {
3769         ALOGE("getBufferAndFormat - trying to get buffer with "
3770               "bad index (index=%zu buffer_size=%zu)", index, buffers.size());
3771         mErrorLog.log(LOG_TAG, base::StringPrintf("Bad index (index=%zu)", index));
3772         return INVALID_OPERATION;
3773     }
3774 
3775     const BufferInfo &info = buffers[index];
3776     if (!info.mOwnedByClient) {
3777         ALOGE("getBufferAndFormat - invalid operation "
3778               "(the index %zu is not owned by client)", index);
3779         mErrorLog.log(LOG_TAG, base::StringPrintf("index %zu is not owned by client", index));
3780         return INVALID_OPERATION;
3781     }
3782 
3783     *buffer = info.mData;
3784     *format = info.mData->format();
3785 
3786     return OK;
3787 }
3788 
flush()3789 status_t MediaCodec::flush() {
3790     sp<AMessage> msg = new AMessage(kWhatFlush, this);
3791 
3792     sp<AMessage> response;
3793     return PostAndAwaitResponse(msg, &response);
3794 }
3795 
requestIDRFrame()3796 status_t MediaCodec::requestIDRFrame() {
3797     (new AMessage(kWhatRequestIDRFrame, this))->post();
3798 
3799     return OK;
3800 }
3801 
querySupportedVendorParameters(std::vector<std::string> * names)3802 status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
3803     return mCodec->querySupportedParameters(names);
3804 }
3805 
describeParameter(const std::string & name,CodecParameterDescriptor * desc)3806 status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
3807     return mCodec->describeParameter(name, desc);
3808 }
3809 
subscribeToVendorParameters(const std::vector<std::string> & names)3810 status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
3811     return mCodec->subscribeToParameters(names);
3812 }
3813 
unsubscribeFromVendorParameters(const std::vector<std::string> & names)3814 status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
3815     return mCodec->unsubscribeFromParameters(names);
3816 }
3817 
requestActivityNotification(const sp<AMessage> & notify)3818 void MediaCodec::requestActivityNotification(const sp<AMessage> &notify) {
3819     sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
3820     msg->setMessage("notify", notify);
3821     msg->post();
3822 }
3823 
requestCpuBoostIfNeeded()3824 void MediaCodec::requestCpuBoostIfNeeded() {
3825     if (mCpuBoostRequested) {
3826         return;
3827     }
3828     int32_t colorFormat;
3829     if (mOutputFormat->contains("hdr-static-info")
3830             && mOutputFormat->findInt32("color-format", &colorFormat)
3831             // check format for OMX only, for C2 the format is always opaque since the
3832             // software rendering doesn't go through client
3833             && ((mSoftRenderer != NULL && colorFormat == OMX_COLOR_FormatYUV420Planar16)
3834                     || mOwnerName.equalsIgnoreCase("codec2::software"))) {
3835         int32_t left, top, right, bottom, width, height;
3836         int64_t totalPixel = 0;
3837         if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
3838             totalPixel = (right - left + 1) * (bottom - top + 1);
3839         } else if (mOutputFormat->findInt32("width", &width)
3840                 && mOutputFormat->findInt32("height", &height)) {
3841             totalPixel = width * height;
3842         }
3843         if (totalPixel >= 1920 * 1080) {
3844             mResourceManagerProxy->addResource(MediaResource::CpuBoostResource());
3845             mCpuBoostRequested = true;
3846         }
3847     }
3848 }
3849 
BatteryChecker(const sp<AMessage> & msg,int64_t timeoutUs)3850 BatteryChecker::BatteryChecker(const sp<AMessage> &msg, int64_t timeoutUs)
3851     : mTimeoutUs(timeoutUs)
3852     , mLastActivityTimeUs(-1ll)
3853     , mBatteryStatNotified(false)
3854     , mBatteryCheckerGeneration(0)
3855     , mIsExecuting(false)
3856     , mBatteryCheckerMsg(msg) {}
3857 
onCodecActivity(std::function<void ()> batteryOnCb)3858 void BatteryChecker::onCodecActivity(std::function<void()> batteryOnCb) {
3859     if (!isExecuting()) {
3860         // ignore if not executing
3861         return;
3862     }
3863     if (!mBatteryStatNotified) {
3864         batteryOnCb();
3865         mBatteryStatNotified = true;
3866         sp<AMessage> msg = mBatteryCheckerMsg->dup();
3867         msg->setInt32("generation", mBatteryCheckerGeneration);
3868 
3869         // post checker and clear last activity time
3870         msg->post(mTimeoutUs);
3871         mLastActivityTimeUs = -1ll;
3872     } else {
3873         // update last activity time
3874         mLastActivityTimeUs = ALooper::GetNowUs();
3875     }
3876 }
3877 
onCheckBatteryTimer(const sp<AMessage> & msg,std::function<void ()> batteryOffCb)3878 void BatteryChecker::onCheckBatteryTimer(
3879         const sp<AMessage> &msg, std::function<void()> batteryOffCb) {
3880     // ignore if this checker already expired because the client resource was removed
3881     int32_t generation;
3882     if (!msg->findInt32("generation", &generation)
3883             || generation != mBatteryCheckerGeneration) {
3884         return;
3885     }
3886 
3887     if (mLastActivityTimeUs < 0ll) {
3888         // timed out inactive, do not repost checker
3889         batteryOffCb();
3890         mBatteryStatNotified = false;
3891     } else {
3892         // repost checker and clear last activity time
3893         msg->post(mTimeoutUs + mLastActivityTimeUs - ALooper::GetNowUs());
3894         mLastActivityTimeUs = -1ll;
3895     }
3896 }
3897 
onClientRemoved()3898 void BatteryChecker::onClientRemoved() {
3899     mBatteryStatNotified = false;
3900     mBatteryCheckerGeneration++;
3901 }
3902 
3903 ////////////////////////////////////////////////////////////////////////////////
3904 
cancelPendingDequeueOperations()3905 void MediaCodec::cancelPendingDequeueOperations() {
3906     if (mFlags & kFlagDequeueInputPending) {
3907         mErrorLog.log(LOG_TAG, "Pending dequeue input buffer request cancelled");
3908         PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
3909 
3910         ++mDequeueInputTimeoutGeneration;
3911         mDequeueInputReplyID = 0;
3912         mFlags &= ~kFlagDequeueInputPending;
3913     }
3914 
3915     if (mFlags & kFlagDequeueOutputPending) {
3916         mErrorLog.log(LOG_TAG, "Pending dequeue output buffer request cancelled");
3917         PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
3918 
3919         ++mDequeueOutputTimeoutGeneration;
3920         mDequeueOutputReplyID = 0;
3921         mFlags &= ~kFlagDequeueOutputPending;
3922     }
3923 }
3924 
handleDequeueInputBuffer(const sp<AReplyToken> & replyID,bool newRequest)3925 bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
3926     if (!isExecuting()) {
3927         mErrorLog.log(LOG_TAG, base::StringPrintf(
3928                 "Invalid to call %s; only valid in executing state",
3929                 apiStateString().c_str()));
3930         PostReplyWithError(replyID, INVALID_OPERATION);
3931     } else if (mFlags & kFlagIsAsync) {
3932         mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
3933         PostReplyWithError(replyID, INVALID_OPERATION);
3934     } else if (newRequest && (mFlags & kFlagDequeueInputPending)) {
3935         mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue input request is pending");
3936         PostReplyWithError(replyID, INVALID_OPERATION);
3937         return true;
3938     } else if (mFlags & kFlagStickyError) {
3939         PostReplyWithError(replyID, getStickyError());
3940         return true;
3941     }
3942 
3943     ssize_t index = dequeuePortBuffer(kPortIndexInput);
3944 
3945     if (index < 0) {
3946         CHECK_EQ(index, -EAGAIN);
3947         return false;
3948     }
3949 
3950     sp<AMessage> response = new AMessage;
3951     response->setSize("index", index);
3952     response->postReply(replyID);
3953 
3954     return true;
3955 }
3956 
3957 // always called from the looper thread
handleDequeueOutputBuffer(const sp<AReplyToken> & replyID,bool newRequest)3958 MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
3959         const sp<AReplyToken> &replyID, bool newRequest) {
3960     if (!isExecuting()) {
3961         mErrorLog.log(LOG_TAG, base::StringPrintf(
3962                 "Invalid to call %s; only valid in executing state",
3963                 apiStateString().c_str()));
3964         PostReplyWithError(replyID, INVALID_OPERATION);
3965     } else if (mFlags & kFlagIsAsync) {
3966         mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
3967         PostReplyWithError(replyID, INVALID_OPERATION);
3968     } else if (newRequest && (mFlags & kFlagDequeueOutputPending)) {
3969         mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue output request is pending");
3970         PostReplyWithError(replyID, INVALID_OPERATION);
3971     } else if (mFlags & kFlagStickyError) {
3972         PostReplyWithError(replyID, getStickyError());
3973     } else if (mFlags & kFlagOutputBuffersChanged) {
3974         PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
3975         mFlags &= ~kFlagOutputBuffersChanged;
3976     } else {
3977         sp<AMessage> response = new AMessage;
3978         BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
3979         if (!info) {
3980             return DequeueOutputResult::kNoBuffer;
3981         }
3982 
3983         // In synchronous mode, output format change should be handled
3984         // at dequeue to put the event at the correct order.
3985 
3986         const sp<MediaCodecBuffer> &buffer = info->mData;
3987         handleOutputFormatChangeIfNeeded(buffer);
3988         if (mFlags & kFlagOutputFormatChanged) {
3989             PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
3990             mFlags &= ~kFlagOutputFormatChanged;
3991             return DequeueOutputResult::kRepliedWithError;
3992         }
3993 
3994         ssize_t index = dequeuePortBuffer(kPortIndexOutput);
3995         if (discardDecodeOnlyOutputBuffer(index)) {
3996             return DequeueOutputResult::kDiscardedBuffer;
3997         }
3998 
3999         response->setSize("index", index);
4000         response->setSize("offset", buffer->offset());
4001         response->setSize("size", buffer->size());
4002 
4003         int64_t timeUs;
4004         CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
4005 
4006         response->setInt64("timeUs", timeUs);
4007 
4008         int32_t flags;
4009         CHECK(buffer->meta()->findInt32("flags", &flags));
4010 
4011         response->setInt32("flags", flags);
4012 
4013         // NOTE: we must account the stats for an output buffer only after we
4014         // already handled a potential output format change that could have
4015         // started a new subsession.
4016         statsBufferReceived(timeUs, buffer);
4017 
4018         response->postReply(replyID);
4019         return DequeueOutputResult::kSuccess;
4020     }
4021 
4022     return DequeueOutputResult::kRepliedWithError;
4023 }
4024 
4025 
initClientConfigParcel(ClientConfigParcel & clientConfig)4026 inline void MediaCodec::initClientConfigParcel(ClientConfigParcel& clientConfig) {
4027     clientConfig.codecType = toMediaResourceSubType(mIsHardware, mDomain);
4028     clientConfig.isEncoder = mFlags & kFlagIsEncoder;
4029     clientConfig.width = mWidth;
4030     clientConfig.height = mHeight;
4031     clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
4032     clientConfig.id = mCodecId;
4033 }
4034 
onMessageReceived(const sp<AMessage> & msg)4035 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
4036     switch (msg->what()) {
4037         case kWhatCodecNotify:
4038         {
4039             int32_t what;
4040             CHECK(msg->findInt32("what", &what));
4041             AString codecErrorState;
4042             switch (what) {
4043                 case kWhatError:
4044                 case kWhatCryptoError:
4045                 {
4046                     int32_t err, actionCode;
4047                     CHECK(msg->findInt32("err", &err));
4048                     CHECK(msg->findInt32("actionCode", &actionCode));
4049 
4050                     ALOGE("Codec reported err %#x/%s, actionCode %d, while in state %d/%s",
4051                                               err, StrMediaError(err).c_str(), actionCode,
4052                                               mState, stateString(mState).c_str());
4053                     if (err == DEAD_OBJECT) {
4054                         mFlags |= kFlagSawMediaServerDie;
4055                         mFlags &= ~kFlagIsComponentAllocated;
4056                     }
4057                     bool sendErrorResponse = true;
4058                     std::string origin;
4059                     if (what == kWhatCryptoError) {
4060                         origin = "kWhatCryptoError:";
4061                     } else {
4062                         origin = "kWhatError:";
4063                         //TODO: add a new error state
4064                     }
4065                     codecErrorState = kCodecErrorState;
4066                     origin += stateString(mState);
4067                     if (mCryptoAsync) {
4068                         //TODO: do some book keeping on the buffers
4069                         mCryptoAsync->stop();
4070                     }
4071                     switch (mState) {
4072                         case INITIALIZING:
4073                         {
4074                             // Resource error during INITIALIZING state needs to be logged
4075                             // through metrics, to be able to track such occurrences.
4076                             if (isResourceError(err)) {
4077                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4078                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4079                                                         stateString(mState).c_str());
4080                                 flushMediametrics();
4081                                 initMediametrics();
4082                             }
4083                             setState(UNINITIALIZED);
4084                             break;
4085                         }
4086 
4087                         case CONFIGURING:
4088                         {
4089                             if (actionCode == ACTION_CODE_FATAL) {
4090                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4091                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4092                                                         stateString(mState).c_str());
4093                                 flushMediametrics();
4094                                 initMediametrics();
4095                             }
4096                             setState(actionCode == ACTION_CODE_FATAL ?
4097                                     UNINITIALIZED : INITIALIZED);
4098                             break;
4099                         }
4100 
4101                         case STARTING:
4102                         {
4103                             if (actionCode == ACTION_CODE_FATAL) {
4104                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4105                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4106                                                         stateString(mState).c_str());
4107                                 flushMediametrics();
4108                                 initMediametrics();
4109                             }
4110                             setState(actionCode == ACTION_CODE_FATAL ?
4111                                     UNINITIALIZED : CONFIGURED);
4112                             break;
4113                         }
4114 
4115                         case RELEASING:
4116                         {
4117                             // Ignore the error, assuming we'll still get
4118                             // the shutdown complete notification. If we
4119                             // don't, we'll timeout and force release.
4120                             sendErrorResponse = false;
4121                             FALLTHROUGH_INTENDED;
4122                         }
4123                         case STOPPING:
4124                         {
4125                             if (mFlags & kFlagSawMediaServerDie) {
4126                                 if (mState == RELEASING && !mReplyID) {
4127                                     ALOGD("Releasing asynchronously, so nothing to reply here.");
4128                                 }
4129                                 // MediaServer died, there definitely won't
4130                                 // be a shutdown complete notification after
4131                                 // all.
4132 
4133                                 // note that we may be directly going from
4134                                 // STOPPING->UNINITIALIZED, instead of the
4135                                 // usual STOPPING->INITIALIZED state.
4136                                 setState(UNINITIALIZED);
4137                                 if (mState == RELEASING) {
4138                                     mComponentName.clear();
4139                                 }
4140                                 if (mReplyID) {
4141                                     postPendingRepliesAndDeferredMessages(origin + ":dead");
4142                                 } else {
4143                                     ALOGD("no pending replies: %s:dead following %s",
4144                                           origin.c_str(), mLastReplyOrigin.c_str());
4145                                 }
4146                                 sendErrorResponse = false;
4147                             } else if (!mReplyID) {
4148                                 sendErrorResponse = false;
4149                             }
4150                             break;
4151                         }
4152 
4153                         case FLUSHING:
4154                         {
4155                             if (actionCode == ACTION_CODE_FATAL) {
4156                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4157                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4158                                                         stateString(mState).c_str());
4159                                 flushMediametrics();
4160                                 initMediametrics();
4161 
4162                                 setState(UNINITIALIZED);
4163                             } else {
4164                                 setState((mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
4165                             }
4166                             break;
4167                         }
4168 
4169                         case FLUSHED:
4170                         case STARTED:
4171                         {
4172                             sendErrorResponse = (mReplyID != nullptr);
4173 
4174                             setStickyError(err);
4175                             postActivityNotificationIfPossible();
4176 
4177                             cancelPendingDequeueOperations();
4178 
4179                             if (mFlags & kFlagIsAsync) {
4180                                 if (what == kWhatError) {
4181                                     onError(err, actionCode);
4182                                 } else if (what == kWhatCryptoError) {
4183                                     onCryptoError(msg);
4184                                 }
4185                             }
4186                             switch (actionCode) {
4187                             case ACTION_CODE_TRANSIENT:
4188                                 break;
4189                             case ACTION_CODE_RECOVERABLE:
4190                                 setState(INITIALIZED);
4191                                 break;
4192                             default:
4193                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4194                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4195                                                         stateString(mState).c_str());
4196                                 flushMediametrics();
4197                                 initMediametrics();
4198                                 setState(UNINITIALIZED);
4199                                 break;
4200                             }
4201                             break;
4202                         }
4203 
4204                         default:
4205                         {
4206                             sendErrorResponse = (mReplyID != nullptr);
4207 
4208                             setStickyError(err);
4209                             postActivityNotificationIfPossible();
4210 
4211                             // actionCode in an uninitialized state is always fatal.
4212                             if (mState == UNINITIALIZED) {
4213                                 actionCode = ACTION_CODE_FATAL;
4214                             }
4215                             if (mFlags & kFlagIsAsync) {
4216                                 if (what == kWhatError) {
4217                                     onError(err, actionCode);
4218                                 } else if (what == kWhatCryptoError) {
4219                                     onCryptoError(msg);
4220                                 }
4221                             }
4222                             switch (actionCode) {
4223                             case ACTION_CODE_TRANSIENT:
4224                                 break;
4225                             case ACTION_CODE_RECOVERABLE:
4226                                 setState(INITIALIZED);
4227                                 break;
4228                             default:
4229                                 setState(UNINITIALIZED);
4230                                 break;
4231                             }
4232                             break;
4233                         }
4234                     }
4235 
4236                     if (sendErrorResponse) {
4237                         // TRICKY: replicate PostReplyWithError logic for
4238                         //         err code override
4239                         int32_t finalErr = err;
4240                         if (mReleasedByResourceManager) {
4241                             // override the err code if MediaCodec has been
4242                             // released by ResourceManager.
4243                             finalErr = DEAD_OBJECT;
4244                         }
4245                         postPendingRepliesAndDeferredMessages(origin, finalErr);
4246                     }
4247                     break;
4248                 }
4249 
4250                 case kWhatComponentAllocated:
4251                 {
4252                     if (mState == RELEASING || mState == UNINITIALIZED) {
4253                         // In case a kWhatError or kWhatRelease message came in and replied,
4254                         // we log a warning and ignore.
4255                         ALOGW("allocate interrupted by error or release, current state %d/%s",
4256                               mState, stateString(mState).c_str());
4257                         break;
4258                     }
4259                     CHECK_EQ(mState, INITIALIZING);
4260                     setState(INITIALIZED);
4261                     mFlags |= kFlagIsComponentAllocated;
4262 
4263                     CHECK(msg->findString("componentName", &mComponentName));
4264 
4265                     if (mComponentName.c_str()) {
4266                         mIsHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
4267                         mediametrics_setCString(mMetricsHandle, kCodecCodec,
4268                                                 mComponentName.c_str());
4269                         // Update the codec name.
4270                         mResourceManagerProxy->setCodecName(mComponentName.c_str());
4271                     }
4272 
4273                     const char *owner = mCodecInfo ? mCodecInfo->getOwnerName() : "";
4274                     if (mComponentName.startsWith("OMX.google.")
4275                             && strncmp(owner, "default", 8) == 0) {
4276                         mFlags |= kFlagUsesSoftwareRenderer;
4277                     } else {
4278                         mFlags &= ~kFlagUsesSoftwareRenderer;
4279                     }
4280                     mOwnerName = owner;
4281 
4282                     if (mComponentName.endsWith(".secure")) {
4283                         mFlags |= kFlagIsSecure;
4284                         mediametrics_setInt32(mMetricsHandle, kCodecSecure, 1);
4285                     } else {
4286                         mFlags &= ~kFlagIsSecure;
4287                         mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
4288                     }
4289 
4290                     mediametrics_setInt32(mMetricsHandle, kCodecHardware,
4291                                           MediaCodecList::isSoftwareCodec(mComponentName) ? 0 : 1);
4292 
4293                     mResourceManagerProxy->addResource(MediaResource::CodecResource(
4294                             mFlags & kFlagIsSecure, toMediaResourceSubType(mIsHardware, mDomain)));
4295 
4296                     postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
4297                     break;
4298                 }
4299 
4300                 case kWhatComponentConfigured:
4301                 {
4302                     if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
4303                         // In case a kWhatError or kWhatRelease message came in and replied,
4304                         // we log a warning and ignore.
4305                         ALOGW("configure interrupted by error or release, current state %d/%s",
4306                               mState, stateString(mState).c_str());
4307                         break;
4308                     }
4309                     CHECK_EQ(mState, CONFIGURING);
4310 
4311                     // reset input surface flag
4312                     mHaveInputSurface = false;
4313 
4314                     CHECK(msg->findMessage("input-format", &mInputFormat));
4315                     CHECK(msg->findMessage("output-format", &mOutputFormat));
4316 
4317                     // limit to confirming the opt-in behavior to minimize any behavioral change
4318                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
4319                         // signal frame dropping mode in the input format as this may also be
4320                         // meaningful and confusing for an encoder in a transcoder scenario
4321                         mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
4322                     }
4323                     sp<AMessage> interestingFormat =
4324                             (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
4325                     ALOGV("[%s] configured as input format: %s, output format: %s",
4326                             mComponentName.c_str(),
4327                             mInputFormat->debugString(4).c_str(),
4328                             mOutputFormat->debugString(4).c_str());
4329                     int32_t usingSwRenderer;
4330                     if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
4331                             && usingSwRenderer) {
4332                         mFlags |= kFlagUsesSoftwareRenderer;
4333                     }
4334                     setState(CONFIGURED);
4335                     postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
4336 
4337                     // augment our media metrics info, now that we know more things
4338                     // such as what the codec extracted from any CSD passed in.
4339                     if (mMetricsHandle != 0) {
4340                         sp<AMessage> format;
4341                         if (mConfigureMsg != NULL &&
4342                             mConfigureMsg->findMessage("format", &format)) {
4343                                 // format includes: mime
4344                                 AString mime;
4345                                 if (format->findString("mime", &mime)) {
4346                                     mediametrics_setCString(mMetricsHandle, kCodecMime,
4347                                                             mime.c_str());
4348                                 }
4349                             }
4350                         // perhaps video only?
4351                         int32_t profile = 0;
4352                         if (interestingFormat->findInt32("profile", &profile)) {
4353                             mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
4354                         }
4355                         int32_t level = 0;
4356                         if (interestingFormat->findInt32("level", &level)) {
4357                             mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
4358                         }
4359                         sp<AMessage> uncompressedFormat =
4360                                 (mFlags & kFlagIsEncoder) ? mInputFormat : mOutputFormat;
4361                         int32_t componentColorFormat  = -1;
4362                         if (uncompressedFormat->findInt32("android._color-format",
4363                                 &componentColorFormat)) {
4364                             mediametrics_setInt32(mMetricsHandle,
4365                                     kCodecComponentColorFormat, componentColorFormat);
4366                         }
4367                         updateHdrMetrics(true /* isConfig */);
4368                         int32_t codecMaxInputSize = -1;
4369                         if (mInputFormat->findInt32(KEY_MAX_INPUT_SIZE, &codecMaxInputSize)) {
4370                             mApiUsageMetrics.inputBufferSize.codecMax = codecMaxInputSize;
4371                         }
4372                         // bitrate and bitrate mode, encoder only
4373                         if (mFlags & kFlagIsEncoder) {
4374                             // encoder specific values
4375                             int32_t bitrate_mode = -1;
4376                             if (mOutputFormat->findInt32(KEY_BITRATE_MODE, &bitrate_mode)) {
4377                                     mediametrics_setCString(mMetricsHandle, kCodecBitrateMode,
4378                                           asString_BitrateMode(bitrate_mode));
4379                             }
4380                             int32_t bitrate = -1;
4381                             if (mOutputFormat->findInt32(KEY_BIT_RATE, &bitrate)) {
4382                                     mediametrics_setInt32(mMetricsHandle, kCodecBitrate, bitrate);
4383                             }
4384                         } else {
4385                             // decoder specific values
4386                         }
4387                     }
4388                     break;
4389                 }
4390 
4391                 case kWhatInputSurfaceCreated:
4392                 {
4393                     if (mState != CONFIGURED) {
4394                         // state transitioned unexpectedly; we should have replied already.
4395                         ALOGD("received kWhatInputSurfaceCreated message in state %s",
4396                                 stateString(mState).c_str());
4397                         break;
4398                     }
4399                     // response to initiateCreateInputSurface()
4400                     status_t err = NO_ERROR;
4401                     sp<AMessage> response = new AMessage;
4402                     if (!msg->findInt32("err", &err)) {
4403                         sp<RefBase> obj;
4404                         msg->findObject("input-surface", &obj);
4405                         CHECK(msg->findMessage("input-format", &mInputFormat));
4406                         CHECK(msg->findMessage("output-format", &mOutputFormat));
4407                         ALOGV("[%s] input surface created as input format: %s, output format: %s",
4408                                 mComponentName.c_str(),
4409                                 mInputFormat->debugString(4).c_str(),
4410                                 mOutputFormat->debugString(4).c_str());
4411                         CHECK(obj != NULL);
4412                         response->setObject("input-surface", obj);
4413                         mHaveInputSurface = true;
4414                     } else {
4415                         response->setInt32("err", err);
4416                     }
4417                     postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
4418                     break;
4419                 }
4420 
4421                 case kWhatInputSurfaceAccepted:
4422                 {
4423                     if (mState != CONFIGURED) {
4424                         // state transitioned unexpectedly; we should have replied already.
4425                         ALOGD("received kWhatInputSurfaceAccepted message in state %s",
4426                                 stateString(mState).c_str());
4427                         break;
4428                     }
4429                     // response to initiateSetInputSurface()
4430                     status_t err = NO_ERROR;
4431                     sp<AMessage> response = new AMessage();
4432                     if (!msg->findInt32("err", &err)) {
4433                         CHECK(msg->findMessage("input-format", &mInputFormat));
4434                         CHECK(msg->findMessage("output-format", &mOutputFormat));
4435                         mHaveInputSurface = true;
4436                     } else {
4437                         response->setInt32("err", err);
4438                     }
4439                     postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
4440                     break;
4441                 }
4442 
4443                 case kWhatSignaledInputEOS:
4444                 {
4445                     if (!isExecuting()) {
4446                         // state transitioned unexpectedly; we should have replied already.
4447                         ALOGD("received kWhatSignaledInputEOS message in state %s",
4448                                 stateString(mState).c_str());
4449                         break;
4450                     }
4451                     // response to signalEndOfInputStream()
4452                     sp<AMessage> response = new AMessage;
4453                     status_t err;
4454                     if (msg->findInt32("err", &err)) {
4455                         response->setInt32("err", err);
4456                     }
4457                     postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
4458                     break;
4459                 }
4460 
4461                 case kWhatStartCompleted:
4462                 {
4463                     if (mState == RELEASING || mState == UNINITIALIZED) {
4464                         // In case a kWhatRelease message came in and replied,
4465                         // we log a warning and ignore.
4466                         ALOGW("start interrupted by release, current state %d/%s",
4467                               mState, stateString(mState).c_str());
4468                         break;
4469                     }
4470 
4471                     CHECK_EQ(mState, STARTING);
4472                     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
4473                         mResourceManagerProxy->addResource(
4474                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
4475                     }
4476                     // Notify the RM that the codec is in use (has been started).
4477                     ClientConfigParcel clientConfig;
4478                     initClientConfigParcel(clientConfig);
4479                     mResourceManagerProxy->notifyClientStarted(clientConfig);
4480 
4481                     setState(STARTED);
4482                     postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
4483 
4484                     // Now that the codec has started, configure, by default, the peek behavior to
4485                     // be undefined for backwards compatibility with older releases. Later, if an
4486                     // app explicitly enables or disables peek, the parameter will be turned off and
4487                     // the legacy undefined behavior is disallowed.
4488                     // See updateTunnelPeek called in onSetParameters for more details.
4489                     if (mTunneled && mTunnelPeekState == TunnelPeekState::kLegacyMode) {
4490                         sp<AMessage> params = new AMessage;
4491                         params->setInt32("android._tunnel-peek-set-legacy", 1);
4492                         mCodec->signalSetParameters(params);
4493                     }
4494                     break;
4495                 }
4496 
4497                 case kWhatOutputBuffersChanged:
4498                 {
4499                     mFlags |= kFlagOutputBuffersChanged;
4500                     postActivityNotificationIfPossible();
4501                     break;
4502                 }
4503 
4504                 case kWhatOutputFramesRendered:
4505                 {
4506                     // ignore these in all states except running
4507                     if (mState != STARTED) {
4508                         break;
4509                     }
4510                     TunnelPeekState previousState = mTunnelPeekState;
4511                     if (mTunnelPeekState != TunnelPeekState::kLegacyMode) {
4512                         mTunnelPeekState = TunnelPeekState::kBufferRendered;
4513                         ALOGV("TunnelPeekState: %s -> %s",
4514                                 asString(previousState),
4515                                 asString(TunnelPeekState::kBufferRendered));
4516                     }
4517                     processRenderedFrames(msg);
4518                     // check that we have a notification set
4519                     if (mOnFrameRenderedNotification != NULL) {
4520                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
4521                         notify->setMessage("data", msg);
4522                         notify->post();
4523                     }
4524                     break;
4525                 }
4526 
4527                 case kWhatFirstTunnelFrameReady:
4528                 {
4529                     if (mState != STARTED) {
4530                         break;
4531                     }
4532                     TunnelPeekState previousState = mTunnelPeekState;
4533                     switch(mTunnelPeekState) {
4534                         case TunnelPeekState::kDisabledNoBuffer:
4535                         case TunnelPeekState::kDisabledQueued:
4536                             mTunnelPeekState = TunnelPeekState::kBufferDecoded;
4537                             ALOGV("First tunnel frame ready");
4538                             ALOGV("TunnelPeekState: %s -> %s",
4539                                   asString(previousState),
4540                                   asString(mTunnelPeekState));
4541                             break;
4542                         case TunnelPeekState::kEnabledNoBuffer:
4543                         case TunnelPeekState::kEnabledQueued:
4544                             {
4545                                 sp<AMessage> parameters = new AMessage();
4546                                 parameters->setInt32("android._trigger-tunnel-peek", 1);
4547                                 mCodec->signalSetParameters(parameters);
4548                             }
4549                             mTunnelPeekState = TunnelPeekState::kBufferRendered;
4550                             ALOGV("First tunnel frame ready");
4551                             ALOGV("TunnelPeekState: %s -> %s",
4552                                   asString(previousState),
4553                                   asString(mTunnelPeekState));
4554                             break;
4555                         default:
4556                             ALOGV("Ignoring first tunnel frame ready, TunnelPeekState: %s",
4557                                   asString(mTunnelPeekState));
4558                             break;
4559                     }
4560 
4561                     if (mOnFirstTunnelFrameReadyNotification != nullptr) {
4562                         sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
4563                         notify->setMessage("data", msg);
4564                         notify->post();
4565                     }
4566                     break;
4567                 }
4568 
4569                 case kWhatFillThisBuffer:
4570                 {
4571                     /* size_t index = */updateBuffers(kPortIndexInput, msg);
4572 
4573                     bool inStateToReturnBuffers =
4574                         mState == FLUSHING || mState == STOPPING || mState == RELEASING;
4575                     if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
4576                         // Late callbacks from the codec could arrive here
4577                         // after the codec is already stopped or released.
4578                         inStateToReturnBuffers = mState == FLUSHING ||
4579                                                  mState == STOPPING || mState == INITIALIZED ||
4580                                                  mState == RELEASING || mState == UNINITIALIZED;
4581                     }
4582                     if (inStateToReturnBuffers) {
4583                         returnBuffersToCodecOnPort(kPortIndexInput);
4584                         break;
4585                     }
4586 
4587                     if (!mCSD.empty()) {
4588                         ssize_t index = dequeuePortBuffer(kPortIndexInput);
4589                         CHECK_GE(index, 0);
4590 
4591                         // If codec specific data had been specified as
4592                         // part of the format in the call to configure and
4593                         // if there's more csd left, we submit it here
4594                         // clients only get access to input buffers once
4595                         // this data has been exhausted.
4596 
4597                         status_t err = queueCSDInputBuffer(index);
4598 
4599                         if (err != OK) {
4600                             ALOGE("queueCSDInputBuffer failed w/ error %d",
4601                                   err);
4602 
4603                             setStickyError(err);
4604                             postActivityNotificationIfPossible();
4605 
4606                             cancelPendingDequeueOperations();
4607                         }
4608                         break;
4609                     }
4610                     if (!mLeftover.empty()) {
4611                         ssize_t index = dequeuePortBuffer(kPortIndexInput);
4612                         CHECK_GE(index, 0);
4613 
4614                         status_t err = handleLeftover(index);
4615                         if (err != OK) {
4616                             setStickyError(err);
4617                             postActivityNotificationIfPossible();
4618                             cancelPendingDequeueOperations();
4619                         }
4620                         break;
4621                     }
4622 
4623                     if (mFlags & kFlagIsAsync) {
4624                         if (!mHaveInputSurface) {
4625                             if (mState == FLUSHED) {
4626                                 mHavePendingInputBuffers = true;
4627                             } else {
4628                                 onInputBufferAvailable();
4629                             }
4630                         }
4631                     } else if (mFlags & kFlagDequeueInputPending) {
4632                         CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
4633 
4634                         ++mDequeueInputTimeoutGeneration;
4635                         mFlags &= ~kFlagDequeueInputPending;
4636                         mDequeueInputReplyID = 0;
4637                     } else {
4638                         postActivityNotificationIfPossible();
4639                     }
4640                     break;
4641                 }
4642 
4643                 case kWhatDrainThisBuffer:
4644                 {
4645                     if ((mFlags & kFlagUseBlockModel) == 0 && mTunneled) {
4646                         sp<RefBase> obj;
4647                         CHECK(msg->findObject("buffer", &obj));
4648                         sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
4649                         if (mFlags & kFlagIsAsync) {
4650                             // In asynchronous mode, output format change is processed immediately.
4651                             handleOutputFormatChangeIfNeeded(buffer);
4652                         } else {
4653                             postActivityNotificationIfPossible();
4654                         }
4655                         mBufferChannel->discardBuffer(buffer);
4656                         break;
4657                     }
4658 
4659                     /* size_t index = */updateBuffers(kPortIndexOutput, msg);
4660 
4661                     bool inStateToReturnBuffers =
4662                         mState == FLUSHING || mState == STOPPING || mState == RELEASING;
4663                     if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
4664                         // Late callbacks from the codec could arrive here
4665                         // after the codec is already stopped or released.
4666                         inStateToReturnBuffers = mState == FLUSHING ||
4667                                                  mState == STOPPING || mState == INITIALIZED ||
4668                                                  mState == RELEASING || mState == UNINITIALIZED;
4669                     }
4670                     if (inStateToReturnBuffers) {
4671                         returnBuffersToCodecOnPort(kPortIndexOutput);
4672                         break;
4673                     }
4674 
4675                     if (mFlags & kFlagIsAsync) {
4676                         sp<RefBase> obj;
4677                         CHECK(msg->findObject("buffer", &obj));
4678                         sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
4679 
4680                         // In asynchronous mode, output format change is processed immediately.
4681                         handleOutputFormatChangeIfNeeded(buffer);
4682                         onOutputBufferAvailable();
4683                     } else if (mFlags & kFlagDequeueOutputPending) {
4684                         DequeueOutputResult dequeueResult =
4685                             handleDequeueOutputBuffer(mDequeueOutputReplyID);
4686                         switch (dequeueResult) {
4687                             case DequeueOutputResult::kNoBuffer:
4688                                 TRESPASS();
4689                                 break;
4690                             case DequeueOutputResult::kDiscardedBuffer:
4691                                 break;
4692                             case DequeueOutputResult::kRepliedWithError:
4693                                 [[fallthrough]];
4694                             case DequeueOutputResult::kSuccess:
4695                             {
4696                                 ++mDequeueOutputTimeoutGeneration;
4697                                 mFlags &= ~kFlagDequeueOutputPending;
4698                                 mDequeueOutputReplyID = 0;
4699                                 break;
4700                             }
4701                             default:
4702                                 TRESPASS();
4703                         }
4704                     } else {
4705                         postActivityNotificationIfPossible();
4706                     }
4707 
4708                     break;
4709                 }
4710 
4711                 case kWhatMetricsUpdated:
4712                 {
4713                     sp<AMessage> updatedMetrics;
4714                     CHECK(msg->findMessage("updated-metrics", &updatedMetrics));
4715 
4716                     size_t numEntries = updatedMetrics->countEntries();
4717                     AMessage::Type type;
4718                     for (size_t i = 0; i < numEntries; ++i) {
4719                         const char *name = updatedMetrics->getEntryNameAt(i, &type);
4720                         AMessage::ItemData itemData = updatedMetrics->getEntryAt(i);
4721                         switch (type) {
4722                             case AMessage::kTypeInt32: {
4723                                 int32_t metricValue;
4724                                 itemData.find(&metricValue);
4725                                 mediametrics_setInt32(mMetricsHandle, name, metricValue);
4726                                 break;
4727                             }
4728                             case AMessage::kTypeInt64: {
4729                                 int64_t metricValue;
4730                                 itemData.find(&metricValue);
4731                                 mediametrics_setInt64(mMetricsHandle, name, metricValue);
4732                                 break;
4733                             }
4734                             case AMessage::kTypeDouble: {
4735                                 double metricValue;
4736                                 itemData.find(&metricValue);
4737                                 mediametrics_setDouble(mMetricsHandle, name, metricValue);
4738                                 break;
4739                             }
4740                             case AMessage::kTypeString: {
4741                                 AString metricValue;
4742                                 itemData.find(&metricValue);
4743                                 mediametrics_setCString(mMetricsHandle, name, metricValue.c_str());
4744                                 break;
4745                             }
4746                             // ToDo: add support for other types
4747                             default:
4748                                 ALOGW("Updated metrics type not supported.");
4749                         }
4750                     }
4751                     break;
4752                 }
4753 
4754                 case kWhatEOS:
4755                 {
4756                     // We already notify the client of this by using the
4757                     // corresponding flag in "onOutputBufferReady".
4758                     break;
4759                 }
4760 
4761                 case kWhatStopCompleted:
4762                 {
4763                     if (mState != STOPPING) {
4764                         ALOGW("Received kWhatStopCompleted in state %d/%s",
4765                               mState, stateString(mState).c_str());
4766                         break;
4767                     }
4768 
4769                     if (mIsSurfaceToDisplay) {
4770                         mVideoRenderQualityTracker.resetForDiscontinuity();
4771                     }
4772 
4773                     // Notify the RM that the codec has been stopped.
4774                     ClientConfigParcel clientConfig;
4775                     initClientConfigParcel(clientConfig);
4776                     mResourceManagerProxy->notifyClientStopped(clientConfig);
4777 
4778                     setState(INITIALIZED);
4779                     if (mReplyID) {
4780                         postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
4781                     } else {
4782                         ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
4783                               "but the operation completed anyway. (last reply origin=%s)",
4784                               mLastReplyOrigin.c_str());
4785                     }
4786                     break;
4787                 }
4788 
4789                 case kWhatReleaseCompleted:
4790                 {
4791                     if (mState != RELEASING) {
4792                         ALOGW("Received kWhatReleaseCompleted in state %d/%s",
4793                               mState, stateString(mState).c_str());
4794                         break;
4795                     }
4796                     setState(UNINITIALIZED);
4797                     mComponentName.clear();
4798 
4799                     mFlags &= ~kFlagIsComponentAllocated;
4800 
4801                     // off since we're removing all resources including the battery on
4802                     if (mBatteryChecker != nullptr) {
4803                         mBatteryChecker->onClientRemoved();
4804                     }
4805 
4806                     mResourceManagerProxy->removeClient();
4807                     mDetachedSurface.reset();
4808 
4809                     if (mReplyID != nullptr) {
4810                         postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
4811                     }
4812                     if (mAsyncReleaseCompleteNotification != nullptr) {
4813                         flushMediametrics();
4814                         mAsyncReleaseCompleteNotification->post();
4815                         mAsyncReleaseCompleteNotification.clear();
4816                     }
4817                     break;
4818                 }
4819 
4820                 case kWhatFlushCompleted:
4821                 {
4822                     if (mState != FLUSHING) {
4823                         ALOGW("received FlushCompleted message in state %d/%s",
4824                                 mState, stateString(mState).c_str());
4825                         break;
4826                     }
4827 
4828                     if (mIsSurfaceToDisplay) {
4829                         mVideoRenderQualityTracker.resetForDiscontinuity();
4830                     }
4831 
4832                     if (mFlags & kFlagIsAsync) {
4833                         setState(FLUSHED);
4834                     } else {
4835                         setState(STARTED);
4836                         mCodec->signalResume();
4837                     }
4838                     mReliabilityContextMetrics.flushCount++;
4839 
4840                     postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
4841                     break;
4842                 }
4843 
4844                 default:
4845                     TRESPASS();
4846             }
4847             break;
4848         }
4849 
4850         case kWhatInit:
4851         {
4852             if (mState != UNINITIALIZED) {
4853                 PostReplyWithError(msg, INVALID_OPERATION);
4854                 break;
4855             }
4856 
4857             if (mReplyID) {
4858                 mDeferredMessages.push_back(msg);
4859                 break;
4860             }
4861             sp<AReplyToken> replyID;
4862             CHECK(msg->senderAwaitsResponse(&replyID));
4863 
4864             mReplyID = replyID;
4865             setState(INITIALIZING);
4866 
4867             sp<RefBase> codecInfo;
4868             (void)msg->findObject("codecInfo", &codecInfo);
4869             AString name;
4870             CHECK(msg->findString("name", &name));
4871 
4872             sp<AMessage> format = new AMessage;
4873             if (codecInfo) {
4874                 format->setObject("codecInfo", codecInfo);
4875             }
4876             format->setString("componentName", name);
4877 
4878             mCodec->initiateAllocateComponent(format);
4879             break;
4880         }
4881 
4882         case kWhatSetNotification:
4883         {
4884             sp<AMessage> notify;
4885             if (msg->findMessage("on-frame-rendered", &notify)) {
4886                 mOnFrameRenderedNotification = notify;
4887             }
4888             if (msg->findMessage("first-tunnel-frame-ready", &notify)) {
4889                 mOnFirstTunnelFrameReadyNotification = notify;
4890             }
4891             break;
4892         }
4893 
4894         case kWhatSetCallback:
4895         {
4896             sp<AReplyToken> replyID;
4897             CHECK(msg->senderAwaitsResponse(&replyID));
4898 
4899             if (mState == UNINITIALIZED
4900                     || mState == INITIALIZING
4901                     || isExecuting()) {
4902                 // callback can't be set after codec is executing,
4903                 // or before it's initialized (as the callback
4904                 // will be cleared when it goes to INITIALIZED)
4905                 mErrorLog.log(LOG_TAG, base::StringPrintf(
4906                         "Invalid to call %s; only valid at Initialized state",
4907                         apiStateString().c_str()));
4908                 PostReplyWithError(replyID, INVALID_OPERATION);
4909                 break;
4910             }
4911 
4912             sp<AMessage> callback;
4913             CHECK(msg->findMessage("callback", &callback));
4914 
4915             mCallback = callback;
4916 
4917             if (mCallback != NULL) {
4918                 ALOGI("MediaCodec will operate in async mode");
4919                 mFlags |= kFlagIsAsync;
4920             } else {
4921                 mFlags &= ~kFlagIsAsync;
4922             }
4923 
4924             sp<AMessage> response = new AMessage;
4925             response->postReply(replyID);
4926             break;
4927         }
4928 
4929         case kWhatGetMetrics:
4930         {
4931             onGetMetrics(msg);
4932             break;
4933         }
4934 
4935 
4936         case kWhatConfigure:
4937         {
4938             if (mState != INITIALIZED) {
4939                 mErrorLog.log(LOG_TAG, base::StringPrintf(
4940                         "configure() is valid only at Initialized state; currently %s",
4941                         apiStateString().c_str()));
4942                 PostReplyWithError(msg, INVALID_OPERATION);
4943                 break;
4944             }
4945 
4946             if (mReplyID) {
4947                 mDeferredMessages.push_back(msg);
4948                 break;
4949             }
4950             sp<AReplyToken> replyID;
4951             CHECK(msg->senderAwaitsResponse(&replyID));
4952 
4953             sp<RefBase> obj;
4954             CHECK(msg->findObject("surface", &obj));
4955 
4956             sp<AMessage> format;
4957             CHECK(msg->findMessage("format", &format));
4958 
4959             // start with a copy of the passed metrics info for use in this run
4960             mediametrics_handle_t handle;
4961             CHECK(msg->findInt64("metrics", &handle));
4962             if (handle != 0) {
4963                 if (mMetricsHandle != 0) {
4964                     flushMediametrics();
4965                 }
4966                 mMetricsHandle = mediametrics_dup(handle);
4967                 // and set some additional metrics values
4968                 initMediametrics();
4969             }
4970 
4971             // from this point forward, in this configure/use/release lifecycle, we want to
4972             // upload our data
4973             mMetricsToUpload = true;
4974 
4975             int32_t push;
4976             if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
4977                 mFlags |= kFlagPushBlankBuffersOnShutdown;
4978             }
4979 
4980             uint32_t flags;
4981             CHECK(msg->findInt32("flags", (int32_t *)&flags));
4982 
4983             if (android::media::codec::provider_->null_output_surface_support()) {
4984                 if (obj == nullptr
4985                         && (flags & CONFIGURE_FLAG_DETACHED_SURFACE)
4986                         && !(flags & CONFIGURE_FLAG_ENCODE)) {
4987                     sp<Surface> surface = getOrCreateDetachedSurface();
4988                     if (surface == nullptr) {
4989                         mErrorLog.log(
4990                                 LOG_TAG, "Detached surface mode is not supported by this codec");
4991                         PostReplyWithError(replyID, INVALID_OPERATION);
4992                     }
4993                     obj = surface;
4994                 }
4995             }
4996 
4997             if (obj != NULL) {
4998                 if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
4999                     // allow frame dropping by surface by default
5000                     mAllowFrameDroppingBySurface = true;
5001                 }
5002 
5003                 format->setObject("native-window", obj);
5004                 status_t err = handleSetSurface(static_cast<Surface *>(obj.get()));
5005                 if (err != OK) {
5006                     PostReplyWithError(replyID, err);
5007                     break;
5008                 }
5009                 uint32_t generation = mSurfaceGeneration;
5010                 format->setInt32("native-window-generation", generation);
5011             } else {
5012                 // we are not using surface so this variable is not used, but initialize sensibly anyway
5013                 mAllowFrameDroppingBySurface = false;
5014 
5015                 handleSetSurface(NULL);
5016             }
5017 
5018             mApiUsageMetrics.isUsingOutputSurface = true;
5019 
5020             if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
5021                 flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
5022                 if (!(mFlags & kFlagIsAsync)) {
5023                     mErrorLog.log(
5024                             LOG_TAG, "Block model is only valid with callback set (async mode)");
5025                     PostReplyWithError(replyID, INVALID_OPERATION);
5026                     break;
5027                 }
5028                 if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
5029                     mFlags |= kFlagUseBlockModel;
5030                 }
5031                 if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
5032                     mFlags |= kFlagUseCryptoAsync;
5033                     if ((mFlags & kFlagUseBlockModel)) {
5034                         ALOGW("CrytoAsync not yet enabled for block model, "
5035                                 "falling back to normal");
5036                     }
5037                 }
5038             }
5039             int32_t largeFrameParamMax = 0, largeFrameParamThreshold = 0;
5040             if (format->findInt32(KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, &largeFrameParamMax) ||
5041                     format->findInt32(KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
5042                     &largeFrameParamThreshold)) {
5043                 if (largeFrameParamMax > 0 || largeFrameParamThreshold > 0) {
5044                     if(mComponentName.startsWith("OMX")) {
5045                         mErrorLog.log(LOG_TAG,
5046                                 "Large Frame params are not supported on OMX codecs."
5047                                 "Currently only supported on C2 audio codec.");
5048                         PostReplyWithError(replyID, INVALID_OPERATION);
5049                         break;
5050                     }
5051                     AString mime;
5052                     CHECK(format->findString("mime", &mime));
5053                     if (!mime.startsWith("audio")) {
5054                         mErrorLog.log(LOG_TAG,
5055                                 "Large Frame params only works with audio codec");
5056                         PostReplyWithError(replyID, INVALID_OPERATION);
5057                         break;
5058                     }
5059                     if (!(mFlags & kFlagIsAsync)) {
5060                             mErrorLog.log(LOG_TAG, "Large Frame audio" \
5061                                     "config works only with async mode");
5062                         PostReplyWithError(replyID, INVALID_OPERATION);
5063                         break;
5064                     }
5065                 }
5066             }
5067 
5068             mReplyID = replyID;
5069             setState(CONFIGURING);
5070 
5071             void *crypto;
5072             if (!msg->findPointer("crypto", &crypto)) {
5073                 crypto = NULL;
5074             }
5075 
5076             ALOGV("kWhatConfigure: Old mCrypto: %p (%d)",
5077                     mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
5078 
5079             mCrypto = static_cast<ICrypto *>(crypto);
5080             mBufferChannel->setCrypto(mCrypto);
5081 
5082             ALOGV("kWhatConfigure: New mCrypto: %p (%d)",
5083                     mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
5084 
5085             void *descrambler;
5086             if (!msg->findPointer("descrambler", &descrambler)) {
5087                 descrambler = NULL;
5088             }
5089 
5090             mDescrambler = static_cast<IDescrambler *>(descrambler);
5091             mBufferChannel->setDescrambler(mDescrambler);
5092             if ((mFlags & kFlagUseCryptoAsync) && mCrypto) {
5093                 // set kFlagUseCryptoAsync but do-not use this for block model
5094                 // this is to propagate the error in onCryptoError()
5095                 // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
5096                 //                     with CONFIGURE_FLAG_USE_BLOCK_MODEL)
5097                 if (!(mFlags & kFlagUseBlockModel)) {
5098                     mCryptoAsync = new CryptoAsync(mBufferChannel);
5099                     mCryptoAsync->setCallback(
5100                     std::make_unique<CryptoAsyncCallback>(new AMessage(kWhatCodecNotify, this)));
5101                     mCryptoLooper = new ALooper();
5102                     mCryptoLooper->setName("CryptoAsyncLooper");
5103                     mCryptoLooper->registerHandler(mCryptoAsync);
5104                     status_t err = mCryptoLooper->start();
5105                     if (err != OK) {
5106                         ALOGE("Crypto Looper failed to start");
5107                         mCryptoAsync = nullptr;
5108                         mCryptoLooper = nullptr;
5109                     }
5110                 }
5111             }
5112 
5113             format->setInt32("flags", flags);
5114             if (flags & CONFIGURE_FLAG_ENCODE) {
5115                 format->setInt32("encoder", true);
5116                 mFlags |= kFlagIsEncoder;
5117             }
5118 
5119             extractCSD(format);
5120 
5121             int32_t tunneled;
5122             if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
5123                 ALOGI("Configuring TUNNELED video playback.");
5124                 mTunneled = true;
5125             } else {
5126                 mTunneled = false;
5127             }
5128             mediametrics_setInt32(mMetricsHandle, kCodecTunneled, mTunneled ? 1 : 0);
5129 
5130             int32_t background = 0;
5131             if (format->findInt32("android._background-mode", &background) && background) {
5132                 androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
5133             }
5134 
5135             mCodec->initiateConfigureComponent(format);
5136             break;
5137         }
5138 
5139         case kWhatDetachSurface:
5140         {
5141             // detach surface is equivalent to setSurface(mDetachedSurface)
5142             sp<Surface> surface = getOrCreateDetachedSurface();
5143 
5144             if (surface == nullptr) {
5145                 sp<AReplyToken> replyID;
5146                 CHECK(msg->senderAwaitsResponse(&replyID));
5147                 mErrorLog.log(LOG_TAG, "Detaching surface is not supported by the codec.");
5148                 PostReplyWithError(replyID, INVALID_OPERATION);
5149                 break;
5150             }
5151 
5152             msg->setObject("surface", surface);
5153         }
5154         [[fallthrough]];
5155 
5156         case kWhatSetSurface:
5157         {
5158             sp<AReplyToken> replyID;
5159             CHECK(msg->senderAwaitsResponse(&replyID));
5160 
5161             status_t err = OK;
5162 
5163             switch (mState) {
5164                 case CONFIGURED:
5165                 case STARTED:
5166                 case FLUSHED:
5167                 {
5168                     sp<RefBase> obj;
5169                     (void)msg->findObject("surface", &obj);
5170                     sp<Surface> surface = static_cast<Surface *>(obj.get());
5171                     if (mSurface == NULL) {
5172                         // do not support setting surface if it was not set
5173                         mErrorLog.log(LOG_TAG, base::StringPrintf(
5174                                       "Cannot %s surface if the codec is not configured with "
5175                                       "a surface already",
5176                                       msg->what() == kWhatDetachSurface ? "detach" : "set"));
5177                         err = INVALID_OPERATION;
5178                     } else if (obj == NULL) {
5179                         // do not support unsetting surface
5180                         mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
5181                         err = BAD_VALUE;
5182                     } else if (android::media::codec::provider_->null_output_surface_support()) {
5183                         err = handleSetSurface(surface, true /* callCodec */);
5184                     } else {
5185                         uint32_t generation;
5186                         err = connectToSurface(surface, &generation);
5187                         if (err == ALREADY_EXISTS) {
5188                             // reconnecting to same surface
5189                             err = OK;
5190                         } else {
5191                             if (err == OK) {
5192                                 if (mFlags & kFlagUsesSoftwareRenderer) {
5193                                     if (mSoftRenderer != NULL
5194                                             && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
5195                                         pushBlankBuffersToNativeWindow(mSurface.get());
5196                                     }
5197                                     surface->setDequeueTimeout(-1);
5198                                     mSoftRenderer = new SoftwareRenderer(surface);
5199                                     // TODO: check if this was successful
5200                                 } else {
5201                                     err = mCodec->setSurface(surface, generation);
5202                                 }
5203                             }
5204                             if (err == OK) {
5205                                 (void)disconnectFromSurface();
5206                                 mSurface = surface;
5207                                 mSurfaceGeneration = generation;
5208                             }
5209                             mReliabilityContextMetrics.setOutputSurfaceCount++;
5210                         }
5211                     }
5212                     break;
5213                 }
5214 
5215                 default:
5216                     mErrorLog.log(LOG_TAG, base::StringPrintf(
5217                             "%sSurface() is valid only at Executing states; currently %s",
5218                             msg->what() == kWhatDetachSurface ? "detach" : "set",
5219                             apiStateString().c_str()));
5220                     err = INVALID_OPERATION;
5221                     break;
5222             }
5223 
5224             PostReplyWithError(replyID, err);
5225             break;
5226         }
5227 
5228         case kWhatCreateInputSurface:
5229         case kWhatSetInputSurface:
5230         {
5231             // Must be configured, but can't have been started yet.
5232             if (mState != CONFIGURED) {
5233                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5234                         "setInputSurface() is valid only at Configured state; currently %s",
5235                         apiStateString().c_str()));
5236                 PostReplyWithError(msg, INVALID_OPERATION);
5237                 break;
5238             }
5239 
5240             if (mReplyID) {
5241                 mDeferredMessages.push_back(msg);
5242                 break;
5243             }
5244             sp<AReplyToken> replyID;
5245             CHECK(msg->senderAwaitsResponse(&replyID));
5246 
5247             mReplyID = replyID;
5248             if (msg->what() == kWhatCreateInputSurface) {
5249                 mCodec->initiateCreateInputSurface();
5250             } else {
5251                 sp<RefBase> obj;
5252                 CHECK(msg->findObject("input-surface", &obj));
5253 
5254                 mCodec->initiateSetInputSurface(
5255                         static_cast<PersistentSurface *>(obj.get()));
5256             }
5257             break;
5258         }
5259         case kWhatStart:
5260         {
5261             if (mState == FLUSHED) {
5262                 setState(STARTED);
5263                 if (mHavePendingInputBuffers) {
5264                     onInputBufferAvailable();
5265                     mHavePendingInputBuffers = false;
5266                 }
5267                 mCodec->signalResume();
5268                 PostReplyWithError(msg, OK);
5269                 break;
5270             } else if (mState != CONFIGURED) {
5271                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5272                         "start() is valid only at Configured state; currently %s",
5273                         apiStateString().c_str()));
5274                 PostReplyWithError(msg, INVALID_OPERATION);
5275                 break;
5276             }
5277 
5278             if (mReplyID) {
5279                 mDeferredMessages.push_back(msg);
5280                 break;
5281             }
5282             sp<AReplyToken> replyID;
5283             CHECK(msg->senderAwaitsResponse(&replyID));
5284             TunnelPeekState previousState = mTunnelPeekState;
5285             if (previousState != TunnelPeekState::kLegacyMode) {
5286                 mTunnelPeekState = mTunnelPeekEnabled ? TunnelPeekState::kEnabledNoBuffer :
5287                     TunnelPeekState::kDisabledNoBuffer;
5288                 ALOGV("TunnelPeekState: %s -> %s",
5289                         asString(previousState),
5290                         asString(mTunnelPeekState));
5291             }
5292 
5293             mReplyID = replyID;
5294             setState(STARTING);
5295 
5296             mCodec->initiateStart();
5297             break;
5298         }
5299 
5300         case kWhatStop: {
5301             if (mReplyID) {
5302                 mDeferredMessages.push_back(msg);
5303                 break;
5304             }
5305             [[fallthrough]];
5306         }
5307         case kWhatRelease:
5308         {
5309             State targetState =
5310                 (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
5311 
5312             if ((mState == RELEASING && targetState == UNINITIALIZED)
5313                     || (mState == STOPPING && targetState == INITIALIZED)) {
5314                 mDeferredMessages.push_back(msg);
5315                 break;
5316             }
5317 
5318             sp<AReplyToken> replyID;
5319             CHECK(msg->senderAwaitsResponse(&replyID));
5320             if (mCryptoAsync) {
5321                 mCryptoAsync->stop();
5322             }
5323             sp<AMessage> asyncNotify;
5324             (void)msg->findMessage("async", &asyncNotify);
5325             // post asyncNotify if going out of scope.
5326             struct AsyncNotifyPost {
5327                 AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
5328                 ~AsyncNotifyPost() {
5329                     if (mAsyncNotify) {
5330                         mAsyncNotify->post();
5331                     }
5332                 }
5333                 void clear() { mAsyncNotify.clear(); }
5334             private:
5335                 sp<AMessage> mAsyncNotify;
5336             } asyncNotifyPost{asyncNotify};
5337 
5338             // already stopped/released
5339             if (mState == UNINITIALIZED && mReleasedByResourceManager) {
5340                 sp<AMessage> response = new AMessage;
5341                 response->setInt32("err", OK);
5342                 response->postReply(replyID);
5343                 break;
5344             }
5345 
5346             int32_t reclaimed = 0;
5347             msg->findInt32("reclaimed", &reclaimed);
5348             if (reclaimed) {
5349                 if (!mReleasedByResourceManager) {
5350                     // notify the async client
5351                     if (mFlags & kFlagIsAsync) {
5352                         onError(DEAD_OBJECT, ACTION_CODE_FATAL);
5353                     }
5354                     mErrorLog.log(LOG_TAG, "Released by resource manager");
5355                     mReleasedByResourceManager = true;
5356                 }
5357 
5358                 int32_t force = 0;
5359                 msg->findInt32("force", &force);
5360                 if (!force && hasPendingBuffer()) {
5361                     ALOGW("Can't reclaim codec right now due to pending buffers.");
5362 
5363                     // return WOULD_BLOCK to ask resource manager to retry later.
5364                     sp<AMessage> response = new AMessage;
5365                     response->setInt32("err", WOULD_BLOCK);
5366                     response->postReply(replyID);
5367 
5368                     break;
5369                 }
5370             }
5371 
5372             bool isReleasingAllocatedComponent =
5373                     (mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED;
5374             if (!isReleasingAllocatedComponent // See 1
5375                     && mState != INITIALIZED
5376                     && mState != CONFIGURED && !isExecuting()) {
5377                 // 1) Permit release to shut down the component if allocated.
5378                 //
5379                 // 2) We may be in "UNINITIALIZED" state already and
5380                 // also shutdown the encoder/decoder without the
5381                 // client being aware of this if media server died while
5382                 // we were being stopped. The client would assume that
5383                 // after stop() returned, it would be safe to call release()
5384                 // and it should be in this case, no harm to allow a release()
5385                 // if we're already uninitialized.
5386                 sp<AMessage> response = new AMessage;
5387                 // TODO: we shouldn't throw an exception for stop/release. Change this to wait until
5388                 // the previous stop/release completes and then reply with OK.
5389                 status_t err = mState == targetState ? OK : INVALID_OPERATION;
5390                 response->setInt32("err", err);
5391                 // TODO: mErrorLog
5392                 if (err == OK && targetState == UNINITIALIZED) {
5393                     mComponentName.clear();
5394                 }
5395                 response->postReply(replyID);
5396                 break;
5397             }
5398 
5399             // If we're flushing, configuring or starting  but
5400             // received a release request, post the reply for the pending call
5401             // first, and consider it done. The reply token will be replaced
5402             // after this, and we'll no longer be able to reply.
5403             if (mState == FLUSHING || mState == CONFIGURING || mState == STARTING) {
5404                 // mReply is always set if in these states.
5405                 postPendingRepliesAndDeferredMessages(
5406                         std::string("kWhatRelease:") + stateString(mState));
5407             }
5408             // If we're stopping but received a release request, post the reply
5409             // for the pending call if necessary. Note that the reply may have been
5410             // already posted due to an error.
5411             if (mState == STOPPING && mReplyID) {
5412                 postPendingRepliesAndDeferredMessages("kWhatRelease:STOPPING");
5413             }
5414 
5415             if (mFlags & kFlagSawMediaServerDie) {
5416                 // It's dead, Jim. Don't expect initiateShutdown to yield
5417                 // any useful results now...
5418                 // Any pending reply would have been handled at kWhatError.
5419                 setState(UNINITIALIZED);
5420                 if (targetState == UNINITIALIZED) {
5421                     mComponentName.clear();
5422                 }
5423                 (new AMessage)->postReply(replyID);
5424                 break;
5425             }
5426 
5427             // If we already have an error, component may not be able to
5428             // complete the shutdown properly. If we're stopping, post the
5429             // reply now with an error to unblock the client, client can
5430             // release after the failure (instead of ANR).
5431             if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
5432                 // Any pending reply would have been handled at kWhatError.
5433                 PostReplyWithError(replyID, getStickyError());
5434                 break;
5435             }
5436 
5437             bool forceSync = false;
5438             if (asyncNotify != nullptr && mSurface != NULL) {
5439                 if (android::media::codec::provider_->null_output_surface_support()) {
5440                     if (handleSetSurface(getOrCreateDetachedSurface(), true /* callCodec */,
5441                                          true /* onShutDown */) != OK) {
5442                         // We were not able to detach the surface, so force
5443                         // synchronous release.
5444                         forceSync = true;
5445                     }
5446                 } else {
5447                     if (!mDetachedSurface) {
5448                         uint64_t usage = 0;
5449                         if (mSurface->getConsumerUsage(&usage) != OK) {
5450                             usage = 0;
5451                         }
5452                         mDetachedSurface.reset(new ReleaseSurface(usage));
5453                     }
5454                     if (mSurface != mDetachedSurface->getSurface()) {
5455                         uint32_t generation;
5456                         status_t err =
5457                             connectToSurface(mDetachedSurface->getSurface(), &generation);
5458                         ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
5459                         if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
5460                             err = mCodec->setSurface(mDetachedSurface->getSurface(), generation);
5461                             ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
5462                         }
5463                         if (err == OK) {
5464                             (void)disconnectFromSurface();
5465                             mSurface = mDetachedSurface->getSurface();
5466                             mSurfaceGeneration = generation;
5467                         } else {
5468                             // We were not able to switch the surface, so force
5469                             // synchronous release.
5470                             forceSync = true;
5471                         }
5472                     }
5473                 }
5474             }
5475 
5476             if (mReplyID) {
5477                 // State transition replies are handled above, so this reply
5478                 // would not be related to state transition. As we are
5479                 // shutting down the component, just fail the operation.
5480                 postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
5481             }
5482             mReplyID = replyID;
5483             setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
5484 
5485             mCodec->initiateShutdown(
5486                     msg->what() == kWhatStop /* keepComponentAllocated */);
5487 
5488             returnBuffersToCodec(reclaimed);
5489 
5490             if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
5491                 pushBlankBuffersToNativeWindow(mSurface.get());
5492             }
5493 
5494             if (asyncNotify != nullptr) {
5495                 if (!forceSync) {
5496                     mResourceManagerProxy->markClientForPendingRemoval();
5497                     postPendingRepliesAndDeferredMessages("kWhatRelease:async");
5498                 }
5499                 asyncNotifyPost.clear();
5500                 mAsyncReleaseCompleteNotification = asyncNotify;
5501             }
5502 
5503             break;
5504         }
5505 
5506         case kWhatDequeueInputBuffer:
5507         {
5508             sp<AReplyToken> replyID;
5509             CHECK(msg->senderAwaitsResponse(&replyID));
5510 
5511             if (mFlags & kFlagIsAsync) {
5512                 mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used in async mode");
5513                 PostReplyWithError(replyID, INVALID_OPERATION);
5514                 break;
5515             }
5516 
5517             if (mHaveInputSurface) {
5518                 mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used with input surface");
5519                 PostReplyWithError(replyID, INVALID_OPERATION);
5520                 break;
5521             }
5522 
5523             if (handleDequeueInputBuffer(replyID, true /* new request */)) {
5524                 break;
5525             }
5526 
5527             int64_t timeoutUs;
5528             CHECK(msg->findInt64("timeoutUs", &timeoutUs));
5529 
5530             if (timeoutUs == 0LL) {
5531                 PostReplyWithError(replyID, -EAGAIN);
5532                 break;
5533             }
5534 
5535             mFlags |= kFlagDequeueInputPending;
5536             mDequeueInputReplyID = replyID;
5537 
5538             if (timeoutUs > 0LL) {
5539                 sp<AMessage> timeoutMsg =
5540                     new AMessage(kWhatDequeueInputTimedOut, this);
5541                 timeoutMsg->setInt32(
5542                         "generation", ++mDequeueInputTimeoutGeneration);
5543                 timeoutMsg->post(timeoutUs);
5544             }
5545             break;
5546         }
5547 
5548         case kWhatDequeueInputTimedOut:
5549         {
5550             int32_t generation;
5551             CHECK(msg->findInt32("generation", &generation));
5552 
5553             if (generation != mDequeueInputTimeoutGeneration) {
5554                 // Obsolete
5555                 break;
5556             }
5557 
5558             CHECK(mFlags & kFlagDequeueInputPending);
5559 
5560             PostReplyWithError(mDequeueInputReplyID, -EAGAIN);
5561 
5562             mFlags &= ~kFlagDequeueInputPending;
5563             mDequeueInputReplyID = 0;
5564             break;
5565         }
5566 
5567         case kWhatQueueInputBuffer:
5568         {
5569             sp<AReplyToken> replyID;
5570             CHECK(msg->senderAwaitsResponse(&replyID));
5571 
5572             if (!isExecuting()) {
5573                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5574                         "queueInputBuffer() is valid only at Executing states; currently %s",
5575                         apiStateString().c_str()));
5576                 PostReplyWithError(replyID, INVALID_OPERATION);
5577                 break;
5578             } else if (mFlags & kFlagStickyError) {
5579                 PostReplyWithError(replyID, getStickyError());
5580                 break;
5581             }
5582 
5583             status_t err = UNKNOWN_ERROR;
5584             if (!mLeftover.empty()) {
5585                 mLeftover.push_back(msg);
5586                 size_t index;
5587                 msg->findSize("index", &index);
5588                 err = handleLeftover(index);
5589             } else {
5590                 err = onQueueInputBuffer(msg);
5591             }
5592 
5593             PostReplyWithError(replyID, err);
5594             break;
5595         }
5596 
5597         case kWhatDequeueOutputBuffer:
5598         {
5599             sp<AReplyToken> replyID;
5600             CHECK(msg->senderAwaitsResponse(&replyID));
5601 
5602             if (mFlags & kFlagIsAsync) {
5603                 mErrorLog.log(LOG_TAG, "dequeueOutputBuffer can't be used in async mode");
5604                 PostReplyWithError(replyID, INVALID_OPERATION);
5605                 break;
5606             }
5607 
5608             DequeueOutputResult dequeueResult =
5609                 handleDequeueOutputBuffer(replyID, true /* new request */);
5610             switch (dequeueResult) {
5611                 case DequeueOutputResult::kNoBuffer:
5612                     [[fallthrough]];
5613                 case DequeueOutputResult::kDiscardedBuffer:
5614                 {
5615                     int64_t timeoutUs;
5616                     CHECK(msg->findInt64("timeoutUs", &timeoutUs));
5617 
5618                     if (timeoutUs == 0LL) {
5619                         PostReplyWithError(replyID, -EAGAIN);
5620                         break;
5621                     }
5622 
5623                     mFlags |= kFlagDequeueOutputPending;
5624                     mDequeueOutputReplyID = replyID;
5625 
5626                     if (timeoutUs > 0LL) {
5627                         sp<AMessage> timeoutMsg =
5628                             new AMessage(kWhatDequeueOutputTimedOut, this);
5629                         timeoutMsg->setInt32(
5630                                 "generation", ++mDequeueOutputTimeoutGeneration);
5631                         timeoutMsg->post(timeoutUs);
5632                     }
5633                     break;
5634                 }
5635                 case DequeueOutputResult::kRepliedWithError:
5636                     [[fallthrough]];
5637                 case DequeueOutputResult::kSuccess:
5638                     break;
5639                 default:
5640                     TRESPASS();
5641             }
5642             break;
5643         }
5644 
5645         case kWhatDequeueOutputTimedOut:
5646         {
5647             int32_t generation;
5648             CHECK(msg->findInt32("generation", &generation));
5649 
5650             if (generation != mDequeueOutputTimeoutGeneration) {
5651                 // Obsolete
5652                 break;
5653             }
5654 
5655             CHECK(mFlags & kFlagDequeueOutputPending);
5656 
5657             PostReplyWithError(mDequeueOutputReplyID, -EAGAIN);
5658 
5659             mFlags &= ~kFlagDequeueOutputPending;
5660             mDequeueOutputReplyID = 0;
5661             break;
5662         }
5663 
5664         case kWhatReleaseOutputBuffer:
5665         {
5666             sp<AReplyToken> replyID;
5667             CHECK(msg->senderAwaitsResponse(&replyID));
5668 
5669             if (!isExecuting()) {
5670                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5671                         "releaseOutputBuffer() is valid only at Executing states; currently %s",
5672                         apiStateString().c_str()));
5673                 PostReplyWithError(replyID, INVALID_OPERATION);
5674                 break;
5675             } else if (mFlags & kFlagStickyError) {
5676                 PostReplyWithError(replyID, getStickyError());
5677                 break;
5678             }
5679 
5680             status_t err = onReleaseOutputBuffer(msg);
5681 
5682             PostReplyWithError(replyID, err);
5683             break;
5684         }
5685 
5686         case kWhatPollForRenderedBuffers:
5687         {
5688             if (isExecuting()) {
5689                 mBufferChannel->pollForRenderedBuffers();
5690             }
5691             break;
5692         }
5693 
5694         case kWhatSignalEndOfInputStream:
5695         {
5696             if (!isExecuting()) {
5697                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5698                         "signalEndOfInputStream() is valid only at Executing states; currently %s",
5699                         apiStateString().c_str()));
5700                 PostReplyWithError(msg, INVALID_OPERATION);
5701                 break;
5702             } else if (!mHaveInputSurface) {
5703                 mErrorLog.log(
5704                         LOG_TAG, "signalEndOfInputStream() called without an input surface set");
5705                 PostReplyWithError(msg, INVALID_OPERATION);
5706                 break;
5707             } else if (mFlags & kFlagStickyError) {
5708                 PostReplyWithError(msg, getStickyError());
5709                 break;
5710             }
5711 
5712             if (mReplyID) {
5713                 mDeferredMessages.push_back(msg);
5714                 break;
5715             }
5716             sp<AReplyToken> replyID;
5717             CHECK(msg->senderAwaitsResponse(&replyID));
5718 
5719             mReplyID = replyID;
5720             mCodec->signalEndOfInputStream();
5721             break;
5722         }
5723 
5724         case kWhatGetBuffers:
5725         {
5726             sp<AReplyToken> replyID;
5727             CHECK(msg->senderAwaitsResponse(&replyID));
5728             if (!isExecuting()) {
5729                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5730                         "getInput/OutputBuffers() is valid only at Executing states; currently %s",
5731                         apiStateString().c_str()));
5732                 PostReplyWithError(replyID, INVALID_OPERATION);
5733                 break;
5734             } else if (mFlags & kFlagIsAsync) {
5735                 mErrorLog.log(LOG_TAG, "getInput/OutputBuffers() is not supported with callbacks");
5736                 PostReplyWithError(replyID, INVALID_OPERATION);
5737                 break;
5738             } else if (mFlags & kFlagStickyError) {
5739                 PostReplyWithError(replyID, getStickyError());
5740                 break;
5741             }
5742 
5743             int32_t portIndex;
5744             CHECK(msg->findInt32("portIndex", &portIndex));
5745 
5746             Vector<sp<MediaCodecBuffer> > *dstBuffers;
5747             CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
5748 
5749             dstBuffers->clear();
5750             // If we're using input surface (either non-persistent created by
5751             // createInputSurface(), or persistent set by setInputSurface()),
5752             // give the client an empty input buffers array.
5753             if (portIndex != kPortIndexInput || !mHaveInputSurface) {
5754                 if (portIndex == kPortIndexInput) {
5755                     mBufferChannel->getInputBufferArray(dstBuffers);
5756                 } else {
5757                     mBufferChannel->getOutputBufferArray(dstBuffers);
5758                 }
5759             }
5760 
5761             mApiUsageMetrics.isArrayMode = true;
5762 
5763             (new AMessage)->postReply(replyID);
5764             break;
5765         }
5766 
5767         case kWhatFlush:
5768         {
5769             if (!isExecuting()) {
5770                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5771                         "flush() is valid only at Executing states; currently %s",
5772                         apiStateString().c_str()));
5773                 PostReplyWithError(msg, INVALID_OPERATION);
5774                 break;
5775             } else if (mFlags & kFlagStickyError) {
5776                 PostReplyWithError(msg, getStickyError());
5777                 break;
5778             }
5779 
5780             if (mReplyID) {
5781                 mDeferredMessages.push_back(msg);
5782                 break;
5783             }
5784             sp<AReplyToken> replyID;
5785             CHECK(msg->senderAwaitsResponse(&replyID));
5786 
5787             mReplyID = replyID;
5788             // TODO: skip flushing if already FLUSHED
5789             setState(FLUSHING);
5790             if (mCryptoAsync) {
5791                 std::list<sp<AMessage>> pendingBuffers;
5792                 mCryptoAsync->stop(&pendingBuffers);
5793                 //TODO: do something with these buffers
5794             }
5795             mCodec->signalFlush();
5796             returnBuffersToCodec();
5797             TunnelPeekState previousState = mTunnelPeekState;
5798             if (previousState != TunnelPeekState::kLegacyMode) {
5799                 mTunnelPeekState = mTunnelPeekEnabled ? TunnelPeekState::kEnabledNoBuffer :
5800                     TunnelPeekState::kDisabledNoBuffer;
5801                 ALOGV("TunnelPeekState: %s -> %s",
5802                         asString(previousState),
5803                         asString(mTunnelPeekState));
5804             }
5805             break;
5806         }
5807 
5808         case kWhatGetInputFormat:
5809         case kWhatGetOutputFormat:
5810         {
5811             sp<AMessage> format =
5812                 (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
5813 
5814             sp<AReplyToken> replyID;
5815             CHECK(msg->senderAwaitsResponse(&replyID));
5816 
5817             if (mState != CONFIGURED && mState != STARTING &&
5818                     mState != STARTED && mState != FLUSHING &&
5819                     mState != FLUSHED) {
5820                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5821                         "getInput/OutputFormat() is valid at Executing states "
5822                         "and Configured state; currently %s",
5823                         apiStateString().c_str()));
5824                 PostReplyWithError(replyID, INVALID_OPERATION);
5825                 break;
5826             } else if (format == NULL) {
5827                 mErrorLog.log(LOG_TAG, "Fatal error: format is not initialized");
5828                 PostReplyWithError(replyID, INVALID_OPERATION);
5829                 break;
5830             } else if (mFlags & kFlagStickyError) {
5831                 PostReplyWithError(replyID, getStickyError());
5832                 break;
5833             }
5834 
5835             sp<AMessage> response = new AMessage;
5836             response->setMessage("format", format);
5837             response->postReply(replyID);
5838             break;
5839         }
5840 
5841         case kWhatRequestIDRFrame:
5842         {
5843             mCodec->signalRequestIDRFrame();
5844             break;
5845         }
5846 
5847         case kWhatRequestActivityNotification:
5848         {
5849             CHECK(mActivityNotify == NULL);
5850             CHECK(msg->findMessage("notify", &mActivityNotify));
5851 
5852             postActivityNotificationIfPossible();
5853             break;
5854         }
5855 
5856         case kWhatGetName:
5857         {
5858             sp<AReplyToken> replyID;
5859             CHECK(msg->senderAwaitsResponse(&replyID));
5860 
5861             if (mComponentName.empty()) {
5862                 mErrorLog.log(LOG_TAG, "Fatal error: name is not set");
5863                 PostReplyWithError(replyID, INVALID_OPERATION);
5864                 break;
5865             }
5866 
5867             sp<AMessage> response = new AMessage;
5868             response->setString("name", mComponentName.c_str());
5869             response->postReply(replyID);
5870             break;
5871         }
5872 
5873         case kWhatGetCodecInfo:
5874         {
5875             sp<AReplyToken> replyID;
5876             CHECK(msg->senderAwaitsResponse(&replyID));
5877 
5878             sp<AMessage> response = new AMessage;
5879             response->setObject("codecInfo", mCodecInfo);
5880             response->postReply(replyID);
5881             break;
5882         }
5883 
5884         case kWhatSetParameters:
5885         {
5886             sp<AReplyToken> replyID;
5887             CHECK(msg->senderAwaitsResponse(&replyID));
5888 
5889             sp<AMessage> params;
5890             CHECK(msg->findMessage("params", &params));
5891 
5892             status_t err = onSetParameters(params);
5893 
5894             PostReplyWithError(replyID, err);
5895             break;
5896         }
5897 
5898         case kWhatDrmReleaseCrypto:
5899         {
5900             onReleaseCrypto(msg);
5901             break;
5902         }
5903 
5904         case kWhatCheckBatteryStats:
5905         {
5906             if (mBatteryChecker != nullptr) {
5907                 mBatteryChecker->onCheckBatteryTimer(msg, [this] () {
5908                     mResourceManagerProxy->removeResource(
5909                             MediaResource::VideoBatteryResource(mIsHardware));
5910                 });
5911             }
5912             break;
5913         }
5914 
5915         default:
5916             TRESPASS();
5917     }
5918 }
5919 
5920 // always called from the looper thread
handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> & buffer)5921 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
5922     sp<AMessage> format = buffer->format();
5923     if (mOutputFormat == format) {
5924         return;
5925     }
5926     if (mFlags & kFlagUseBlockModel) {
5927         sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
5928         sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
5929         std::set<std::string> keys;
5930         size_t numEntries = diff1->countEntries();
5931         AMessage::Type type;
5932         for (size_t i = 0; i < numEntries; ++i) {
5933             keys.emplace(diff1->getEntryNameAt(i, &type));
5934         }
5935         numEntries = diff2->countEntries();
5936         for (size_t i = 0; i < numEntries; ++i) {
5937             keys.emplace(diff2->getEntryNameAt(i, &type));
5938         }
5939         sp<WrapperObject<std::set<std::string>>> changedKeys{
5940             new WrapperObject<std::set<std::string>>{std::move(keys)}};
5941         buffer->meta()->setObject("changedKeys", changedKeys);
5942     }
5943     mOutputFormat = format;
5944     mapFormat(mComponentName, format, nullptr, true);
5945     ALOGV("[%s] output format changed to: %s",
5946             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
5947 
5948     if (mSoftRenderer == NULL &&
5949             mSurface != NULL &&
5950             (mFlags & kFlagUsesSoftwareRenderer)) {
5951         AString mime;
5952         CHECK(mOutputFormat->findString("mime", &mime));
5953 
5954         // TODO: propagate color aspects to software renderer to allow better
5955         // color conversion to RGB. For now, just mark dataspace for YUV
5956         // rendering.
5957         int32_t dataSpace;
5958         if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
5959             ALOGD("[%s] setting dataspace on output surface to %#x",
5960                     mComponentName.c_str(), dataSpace);
5961             int err = native_window_set_buffers_data_space(
5962                     mSurface.get(), (android_dataspace)dataSpace);
5963             ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
5964         }
5965         if (mOutputFormat->contains("hdr-static-info")) {
5966             HDRStaticInfo info;
5967             if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
5968                 setNativeWindowHdrMetadata(mSurface.get(), &info);
5969             }
5970         }
5971 
5972         sp<ABuffer> hdr10PlusInfo;
5973         if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
5974                 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
5975             native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
5976                     hdr10PlusInfo->size(), hdr10PlusInfo->data());
5977         }
5978 
5979         if (mime.startsWithIgnoreCase("video/")) {
5980             mSurface->setDequeueTimeout(-1);
5981             mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
5982         }
5983     }
5984 
5985     requestCpuBoostIfNeeded();
5986 
5987     if (mFlags & kFlagIsEncoder) {
5988         // Before we announce the format change we should
5989         // collect codec specific data and amend the output
5990         // format as necessary.
5991         int32_t flags = 0;
5992         (void) buffer->meta()->findInt32("flags", &flags);
5993         if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)
5994                 && !mOwnerName.startsWith("codec2::")) {
5995             status_t err =
5996                 amendOutputFormatWithCodecSpecificData(buffer);
5997 
5998             if (err != OK) {
5999                 ALOGE("Codec spit out malformed codec "
6000                       "specific data!");
6001             }
6002         }
6003     }
6004 
6005     // Update the width and the height.
6006     int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
6007     bool newSubsession = false;
6008     if (android::media::codec::provider_->subsession_metrics()
6009             && mOutputFormat->findInt32("width", &width)
6010             && mOutputFormat->findInt32("height", &height)
6011             && (width != mWidth || height != mHeight)) {
6012         // consider a new subsession if the width or height changes.
6013         newSubsession = true;
6014     }
6015     // TODO: properly detect new audio subsession
6016 
6017     // Only consider a new subsession if we already have output (from a previous subsession).
6018     if (newSubsession && mMetricsToUpload && mBytesEncoded > 0) {
6019         handleStartingANewSubsession();
6020     }
6021 
6022     if (mFlags & kFlagIsAsync) {
6023         onOutputFormatChanged();
6024     } else {
6025         mFlags |= kFlagOutputFormatChanged;
6026         postActivityNotificationIfPossible();
6027     }
6028 
6029     bool resolutionChanged = false;
6030     if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
6031         mWidth = right - left + 1;
6032         mHeight = bottom - top + 1;
6033         resolutionChanged = true;
6034     } else if (mOutputFormat->findInt32("width", &width) &&
6035                mOutputFormat->findInt32("height", &height)) {
6036         mWidth = width;
6037         mHeight = height;
6038         resolutionChanged = true;
6039     }
6040 
6041     // Notify mCrypto and the RM of video resolution changes
6042     if (resolutionChanged) {
6043         if (mCrypto != NULL) {
6044             mCrypto->notifyResolution(mWidth, mHeight);
6045         }
6046         ClientConfigParcel clientConfig;
6047         initClientConfigParcel(clientConfig);
6048         mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
6049         mReliabilityContextMetrics.resolutionChangeCount++;
6050     }
6051 
6052     updateHdrMetrics(false /* isConfig */);
6053 }
6054 
6055 // always called from the looper thread (and therefore not mutexed)
handleStartingANewSubsession()6056 void MediaCodec::handleStartingANewSubsession() {
6057     // create a new metrics item for the subsession with the new resolution.
6058     // TODO: properly account input counts for the previous and the new
6059     // subsessions. We only find out that a new subsession started from the
6060     // output format, but by that time we already accounted the input counts
6061     // to the previous subsession.
6062     flushMediametrics(); // this deletes mMetricsHandle, but stores it in mLastMetricsHandle
6063 
6064     // hence mLastMetricsHandle has the metrics item for the previous subsession.
6065     if ((mFlags & kFlagIsAsync) && mCallback != nullptr) {
6066         sp<AMessage> msg = mCallback->dup();
6067         msg->setInt32("callbackID", CB_METRICS_FLUSHED);
6068         std::unique_ptr<mediametrics::Item> flushedMetrics(
6069                 mediametrics::Item::convert(mediametrics_dup(mLastMetricsHandle)));
6070         msg->setObject("metrics", new WrapperObject<std::unique_ptr<mediametrics::Item>>(
6071                 std::move(flushedMetrics)));
6072         msg->post();
6073     }
6074 
6075     // reuse/continue old metrics item for the new subsession.
6076     mMetricsHandle = mediametrics_dup(mLastMetricsHandle);
6077     mMetricsToUpload = true;
6078     // TODO: configured width/height for the new subsession should be the
6079     // previous width/height.
6080     mSubsessionCount++;
6081     resetSubsessionMetricsFields();
6082 }
6083 
extractCSD(const sp<AMessage> & format)6084 void MediaCodec::extractCSD(const sp<AMessage> &format) {
6085     mCSD.clear();
6086 
6087     size_t i = 0;
6088     for (;;) {
6089         sp<ABuffer> csd;
6090         if (!format->findBuffer(base::StringPrintf("csd-%zu", i).c_str(), &csd)) {
6091             break;
6092         }
6093         if (csd->size() == 0) {
6094             ALOGW("csd-%zu size is 0", i);
6095         } else {
6096             mCSD.push_back(csd);
6097         }
6098         ++i;
6099     }
6100 
6101     ALOGV("Found %zu pieces of codec specific data.", mCSD.size());
6102 }
6103 
queueCSDInputBuffer(size_t bufferIndex)6104 status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
6105     CHECK(!mCSD.empty());
6106 
6107     sp<ABuffer> csd = *mCSD.begin();
6108     mCSD.erase(mCSD.begin());
6109     std::shared_ptr<C2Buffer> c2Buffer;
6110     sp<hardware::HidlMemory> memory;
6111 
6112     if (mFlags & kFlagUseBlockModel) {
6113         if (hasCryptoOrDescrambler()) {
6114             constexpr size_t kInitialDealerCapacity = 1048576;  // 1MB
6115             thread_local sp<MemoryDealer> sDealer = new MemoryDealer(
6116                     kInitialDealerCapacity, "CSD(1MB)");
6117             sp<IMemory> mem = sDealer->allocate(csd->size());
6118             if (mem == nullptr) {
6119                 size_t newDealerCapacity = sDealer->getMemoryHeap()->getSize() * 2;
6120                 while (csd->size() * 2 > newDealerCapacity) {
6121                     newDealerCapacity *= 2;
6122                 }
6123                 sDealer = new MemoryDealer(
6124                         newDealerCapacity,
6125                         base::StringPrintf("CSD(%zuMB)", newDealerCapacity / 1048576).c_str());
6126                 mem = sDealer->allocate(csd->size());
6127             }
6128             memcpy(mem->unsecurePointer(), csd->data(), csd->size());
6129             ssize_t heapOffset;
6130             memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
6131         } else {
6132             std::shared_ptr<C2LinearBlock> block =
6133                 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
6134             C2WriteView view{block->map().get()};
6135             if (view.error() != C2_OK) {
6136                 mErrorLog.log(LOG_TAG, "Fatal error: failed to allocate and map a block");
6137                 return -EINVAL;
6138             }
6139             if (csd->size() > view.capacity()) {
6140                 mErrorLog.log(LOG_TAG, base::StringPrintf(
6141                         "Fatal error: allocated block is too small "
6142                         "(csd size %zu; block cap %u)",
6143                         csd->size(), view.capacity()));
6144                 return -EINVAL;
6145             }
6146             memcpy(view.base(), csd->data(), csd->size());
6147             c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
6148         }
6149     } else {
6150         const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
6151         const sp<MediaCodecBuffer> &codecInputData = info.mData;
6152 
6153         if (csd->size() > codecInputData->capacity()) {
6154             mErrorLog.log(LOG_TAG, base::StringPrintf(
6155                     "CSD is too large to fit in input buffer "
6156                     "(csd size %zu; buffer cap %zu)",
6157                     csd->size(), codecInputData->capacity()));
6158             return -EINVAL;
6159         }
6160         if (codecInputData->data() == NULL) {
6161             mErrorLog.log(LOG_TAG, base::StringPrintf(
6162                     "Fatal error: input buffer %zu is not properly allocated", bufferIndex));
6163             return -EINVAL;
6164         }
6165 
6166         memcpy(codecInputData->data(), csd->data(), csd->size());
6167     }
6168 
6169     AString errorDetailMsg;
6170 
6171     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
6172     msg->setSize("index", bufferIndex);
6173     msg->setSize("offset", 0);
6174     msg->setSize("size", csd->size());
6175     msg->setInt64("timeUs", 0LL);
6176     msg->setInt32("flags", BUFFER_FLAG_CODECCONFIG);
6177     msg->setPointer("errorDetailMsg", &errorDetailMsg);
6178     if (c2Buffer) {
6179         sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
6180             new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
6181         msg->setObject("c2buffer", obj);
6182     } else if (memory) {
6183         sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
6184             new WrapperObject<sp<hardware::HidlMemory>>{memory}};
6185         msg->setObject("memory", obj);
6186     }
6187 
6188     return onQueueInputBuffer(msg);
6189 }
6190 
setState(State newState)6191 void MediaCodec::setState(State newState) {
6192     if (newState == INITIALIZED || newState == UNINITIALIZED) {
6193         delete mSoftRenderer;
6194         mSoftRenderer = NULL;
6195 
6196         if ( mCrypto != NULL ) {
6197             ALOGV("setState: ~mCrypto: %p (%d)",
6198                     mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
6199         }
6200         mCrypto.clear();
6201         mDescrambler.clear();
6202         handleSetSurface(NULL);
6203 
6204         mInputFormat.clear();
6205         mOutputFormat.clear();
6206         if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
6207             mCSD.clear();
6208             mLeftover.clear();
6209         }
6210         mFlags &= ~kFlagOutputFormatChanged;
6211         mFlags &= ~kFlagOutputBuffersChanged;
6212         mFlags &= ~kFlagStickyError;
6213         mFlags &= ~kFlagIsEncoder;
6214         mFlags &= ~kFlagIsAsync;
6215         mStickyError = OK;
6216 
6217         mActivityNotify.clear();
6218         mCallback.clear();
6219         mErrorLog.clear();
6220     }
6221 
6222     if (android::media::codec::provider_->set_state_early()) {
6223         mState = newState;
6224     }
6225 
6226     if (newState == UNINITIALIZED) {
6227         // return any straggling buffers, e.g. if we got here on an error
6228         returnBuffersToCodec();
6229 
6230         // The component is gone, mediaserver's probably back up already
6231         // but should definitely be back up should we try to instantiate
6232         // another component.. and the cycle continues.
6233         mFlags &= ~kFlagSawMediaServerDie;
6234     }
6235 
6236     if (!android::media::codec::provider_->set_state_early()) {
6237         mState = newState;
6238     }
6239 
6240     if (mBatteryChecker != nullptr) {
6241         mBatteryChecker->setExecuting(isExecuting());
6242     }
6243 
6244     cancelPendingDequeueOperations();
6245 }
6246 
returnBuffersToCodec(bool isReclaim)6247 void MediaCodec::returnBuffersToCodec(bool isReclaim) {
6248     returnBuffersToCodecOnPort(kPortIndexInput, isReclaim);
6249     returnBuffersToCodecOnPort(kPortIndexOutput, isReclaim);
6250 }
6251 
returnBuffersToCodecOnPort(int32_t portIndex,bool isReclaim)6252 void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex, bool isReclaim) {
6253     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6254     Mutex::Autolock al(mBufferLock);
6255 
6256     if (portIndex == kPortIndexInput) {
6257         mLeftover.clear();
6258     }
6259     for (size_t i = 0; i < mPortBuffers[portIndex].size(); ++i) {
6260         BufferInfo *info = &mPortBuffers[portIndex][i];
6261 
6262         if (info->mData != nullptr) {
6263             sp<MediaCodecBuffer> buffer = info->mData;
6264             if (isReclaim && info->mOwnedByClient) {
6265                 ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
6266                         portIndex, i);
6267             } else {
6268                 ALOGV("returnBuffersToCodecOnPort: mPortBuffers[%s][%zu] NOT owned by client",
6269                       portIndex == kPortIndexInput ? "in" : "out", i);
6270                 info->mOwnedByClient = false;
6271                 info->mData.clear();
6272             }
6273             mBufferChannel->discardBuffer(buffer);
6274         }
6275     }
6276 
6277     mAvailPortBuffers[portIndex].clear();
6278 }
6279 
updateBuffers(int32_t portIndex,const sp<AMessage> & msg)6280 size_t MediaCodec::updateBuffers(
6281         int32_t portIndex, const sp<AMessage> &msg) {
6282     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6283     size_t index;
6284     CHECK(msg->findSize("index", &index));
6285     sp<RefBase> obj;
6286     CHECK(msg->findObject("buffer", &obj));
6287     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
6288 
6289     {
6290         Mutex::Autolock al(mBufferLock);
6291         if (mPortBuffers[portIndex].size() <= index) {
6292             mPortBuffers[portIndex].resize(align(index + 1, kNumBuffersAlign));
6293         }
6294         mPortBuffers[portIndex][index].mData = buffer;
6295     }
6296     mAvailPortBuffers[portIndex].push_back(index);
6297 
6298     return index;
6299 }
6300 
onQueueInputBuffer(const sp<AMessage> & msg)6301 status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
6302     size_t index;
6303     size_t offset = 0;
6304     size_t size = 0;
6305     int64_t timeUs = 0;
6306     uint32_t flags = 0;
6307     CHECK(msg->findSize("index", &index));
6308     CHECK(msg->findInt64("timeUs", &timeUs));
6309     CHECK(msg->findInt32("flags", (int32_t *)&flags));
6310     std::shared_ptr<C2Buffer> c2Buffer;
6311     sp<hardware::HidlMemory> memory;
6312     sp<RefBase> obj;
6313     if (msg->findObject("c2buffer", &obj)) {
6314         CHECK(obj);
6315         c2Buffer = static_cast<WrapperObject<std::shared_ptr<C2Buffer>> *>(obj.get())->value;
6316     } else if (msg->findObject("memory", &obj)) {
6317         CHECK(obj);
6318         memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
6319         CHECK(msg->findSize("offset", &offset));
6320     } else {
6321         CHECK(msg->findSize("offset", &offset));
6322     }
6323     const CryptoPlugin::SubSample *subSamples;
6324     size_t numSubSamples = 0;
6325     const uint8_t *key = NULL;
6326     const uint8_t *iv = NULL;
6327     CryptoPlugin::Mode mode = CryptoPlugin::kMode_Unencrypted;
6328 
6329     // We allow the simpler queueInputBuffer API to be used even in
6330     // secure mode, by fabricating a single unencrypted subSample.
6331     CryptoPlugin::SubSample ss;
6332     CryptoPlugin::Pattern pattern;
6333 
6334     if (android::media::codec::provider_->secure_codecs_require_crypto()
6335             && (mFlags & kFlagIsSecure) && !hasCryptoOrDescrambler()) {
6336         mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
6337         return INVALID_OPERATION;
6338     }
6339 
6340     if (msg->findSize("size", &size)) {
6341         if (hasCryptoOrDescrambler()) {
6342             ss.mNumBytesOfClearData = size;
6343             ss.mNumBytesOfEncryptedData = 0;
6344 
6345             subSamples = &ss;
6346             numSubSamples = 1;
6347             pattern.mEncryptBlocks = 0;
6348             pattern.mSkipBlocks = 0;
6349         }
6350     } else if (!c2Buffer) {
6351         if (!hasCryptoOrDescrambler()) {
6352             ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
6353                     mComponentName.c_str());
6354             mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
6355             return -EINVAL;
6356         }
6357         sp<RefBase> obj;
6358         if (msg->findObject("cryptoInfos", &obj)) {
6359             CHECK(msg->findSize("ssize", &size));
6360         } else {
6361             CHECK(msg->findPointer("subSamples", (void **)&subSamples));
6362             CHECK(msg->findSize("numSubSamples", &numSubSamples));
6363             CHECK(msg->findPointer("key", (void **)&key));
6364             CHECK(msg->findPointer("iv", (void **)&iv));
6365             CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
6366             CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
6367 
6368             int32_t tmp;
6369             CHECK(msg->findInt32("mode", &tmp));
6370 
6371             mode = (CryptoPlugin::Mode)tmp;
6372             size = 0;
6373             for (size_t i = 0; i < numSubSamples; ++i) {
6374                 size += subSamples[i].mNumBytesOfClearData;
6375                 size += subSamples[i].mNumBytesOfEncryptedData;
6376             }
6377         }
6378     }
6379 
6380     if (index >= mPortBuffers[kPortIndexInput].size()) {
6381         mErrorLog.log(LOG_TAG, base::StringPrintf(
6382                 "index out of range (index=%zu)", mPortBuffers[kPortIndexInput].size()));
6383         return -ERANGE;
6384     }
6385 
6386     BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
6387     sp<MediaCodecBuffer> buffer = info->mData;
6388     if (buffer == nullptr) {
6389         mErrorLog.log(LOG_TAG, base::StringPrintf(
6390                 "Fatal error: failed to fetch buffer for index %zu", index));
6391         return -EACCES;
6392     }
6393     if (!info->mOwnedByClient) {
6394         mErrorLog.log(LOG_TAG, base::StringPrintf(
6395                 "client does not own the buffer #%zu", index));
6396         return -EACCES;
6397     }
6398     auto setInputBufferParams = [this, &msg, &buffer]
6399         (int64_t timeUs, uint32_t flags = 0) -> status_t {
6400         status_t err = OK;
6401         sp<RefBase> obj;
6402         if (msg->findObject("accessUnitInfo", &obj)) {
6403             buffer->meta()->setObject("accessUnitInfo", obj);
6404         }
6405         buffer->meta()->setInt64("timeUs", timeUs);
6406         if (flags & BUFFER_FLAG_EOS) {
6407             buffer->meta()->setInt32("eos", true);
6408         }
6409 
6410         if (flags & BUFFER_FLAG_CODECCONFIG) {
6411             buffer->meta()->setInt32("csd", true);
6412         }
6413         bool isBufferDecodeOnly = ((flags & BUFFER_FLAG_DECODE_ONLY) != 0);
6414         if (isBufferDecodeOnly) {
6415             buffer->meta()->setInt32("decode-only", true);
6416         }
6417         if (mTunneled && !isBufferDecodeOnly && !(flags & BUFFER_FLAG_CODECCONFIG)) {
6418             TunnelPeekState previousState = mTunnelPeekState;
6419             switch(mTunnelPeekState){
6420                 case TunnelPeekState::kEnabledNoBuffer:
6421                     buffer->meta()->setInt32("tunnel-first-frame", 1);
6422                     mTunnelPeekState = TunnelPeekState::kEnabledQueued;
6423                     ALOGV("TunnelPeekState: %s -> %s",
6424                         asString(previousState),
6425                         asString(mTunnelPeekState));
6426                 break;
6427                 case TunnelPeekState::kDisabledNoBuffer:
6428                     buffer->meta()->setInt32("tunnel-first-frame", 1);
6429                     mTunnelPeekState = TunnelPeekState::kDisabledQueued;
6430                     ALOGV("TunnelPeekState: %s -> %s",
6431                         asString(previousState),
6432                         asString(mTunnelPeekState));
6433                 break;
6434             default:
6435                 break;
6436            }
6437         }
6438      return err;
6439     };
6440     auto buildCryptoInfoAMessage = [&](const sp<AMessage> & cryptoInfo, int32_t action) {
6441         // set decrypt Action
6442         cryptoInfo->setInt32("action", action);
6443         cryptoInfo->setObject("buffer", buffer);
6444         cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
6445         sp<RefBase> obj;
6446         if (msg->findObject("cryptoInfos", &obj)) {
6447             // this object is a standalone object when created (no copy requied here)
6448             buffer->meta()->setObject("cryptoInfos", obj);
6449         } else {
6450             size_t key_len = (key != nullptr)? 16 : 0;
6451             size_t iv_len = (iv != nullptr)? 16 : 0;
6452             sp<ABuffer> shared_key;
6453             sp<ABuffer> shared_iv;
6454             if (key_len > 0) {
6455                 shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
6456             }
6457             if (iv_len > 0) {
6458                 shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
6459             }
6460             sp<ABuffer> subSamples_buffer =
6461                 new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
6462             CryptoPlugin::SubSample * samples =
6463                (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
6464             for (int s = 0 ; s < numSubSamples ; s++) {
6465                 samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
6466                 samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
6467             }
6468             cryptoInfo->setBuffer("key", shared_key);
6469             cryptoInfo->setBuffer("iv", shared_iv);
6470             cryptoInfo->setInt32("mode", (int)mode);
6471             cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
6472             cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
6473             cryptoInfo->setBuffer("subSamples", subSamples_buffer);
6474             cryptoInfo->setSize("numSubSamples", numSubSamples);
6475         }
6476     };
6477     if (c2Buffer || memory) {
6478         sp<AMessage> tunings = NULL;
6479         if (msg->findMessage("tunings", &tunings) && tunings != NULL) {
6480             onSetParameters(tunings);
6481         }
6482         status_t err = OK;
6483         if (c2Buffer) {
6484             err = mBufferChannel->attachBuffer(c2Buffer, buffer);
6485             // to prevent unnecessary copy for single info case.
6486             if (msg->findObject("accessUnitInfo", &obj)) {
6487                 sp<BufferInfosWrapper> infos{(BufferInfosWrapper*)(obj.get())};
6488                 if (infos->value.size() == 1) {
6489                    msg->removeEntryByName("accessUnitInfo");
6490                 }
6491             }
6492         } else if (memory) {
6493             AString errorDetailMsg;
6494             if (msg->findObject("cryptoInfos", &obj)) {
6495                 buffer->meta()->setSize("ssize", size);
6496                 buffer->meta()->setObject("cryptoInfos", obj);
6497                 if (msg->findObject("accessUnitInfo", &obj)) {
6498                     // the reference will be same here and
6499                     // setBufferParams
6500                     buffer->meta()->setObject("accessUnitInfo", obj);
6501                 }
6502                 err = mBufferChannel->attachEncryptedBuffers(
6503                     memory,
6504                     offset,
6505                     buffer,
6506                     (mFlags & kFlagIsSecure),
6507                     &errorDetailMsg);
6508             } else {
6509                 err = mBufferChannel->attachEncryptedBuffer(
6510                         memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
6511                         offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
6512             }
6513             if (err != OK && hasCryptoOrDescrambler()
6514                     && (mFlags & kFlagUseCryptoAsync)) {
6515                 // create error detail
6516                 sp<AMessage> cryptoErrorInfo = new AMessage();
6517                 buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
6518                 cryptoErrorInfo->setInt32("err", err);
6519                 cryptoErrorInfo->setInt32("actionCode", ACTION_CODE_FATAL);
6520                 cryptoErrorInfo->setString("errorDetail", errorDetailMsg);
6521                 onCryptoError(cryptoErrorInfo);
6522                 // we want cryptoError to be in the callback
6523                 // but Codec IllegalStateException to be triggered.
6524                 err = INVALID_OPERATION;
6525             }
6526         } else {
6527             mErrorLog.log(LOG_TAG, "Fatal error: invalid queue request without a buffer");
6528             err = UNKNOWN_ERROR;
6529         }
6530         if (err == OK && !buffer->asC2Buffer()
6531                 && c2Buffer && c2Buffer->data().type() == C2BufferData::LINEAR) {
6532             C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
6533             if (block.size() > buffer->size()) {
6534                 C2ConstLinearBlock leftover = block.subBlock(
6535                         block.offset() + buffer->size(), block.size() - buffer->size());
6536                 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
6537                     new WrapperObject<std::shared_ptr<C2Buffer>>{
6538                         C2Buffer::CreateLinearBuffer(leftover)}};
6539                 msg->setObject("c2buffer", obj);
6540                 mLeftover.push_front(msg);
6541                 // Not sending EOS if we have leftovers
6542                 flags &= ~BUFFER_FLAG_EOS;
6543             }
6544         }
6545         offset = buffer->offset();
6546         size = buffer->size();
6547         if (err != OK) {
6548             ALOGE("block model buffer attach failed: err = %s (%d)",
6549                   StrMediaError(err).c_str(), err);
6550             return err;
6551         }
6552     }
6553 
6554     if (offset + size > buffer->capacity()) {
6555         mErrorLog.log(LOG_TAG, base::StringPrintf(
6556                 "buffer offset and size goes beyond the capacity: "
6557                 "offset=%zu, size=%zu, cap=%zu",
6558                 offset, size, buffer->capacity()));
6559         return -EINVAL;
6560     }
6561     buffer->setRange(offset, size);
6562     status_t err = OK;
6563     err = setInputBufferParams(timeUs, flags);
6564     if (err != OK) {
6565         return -EINVAL;
6566     }
6567 
6568     int32_t usedMaxInputSize = mApiUsageMetrics.inputBufferSize.usedMax;
6569     mApiUsageMetrics.inputBufferSize.usedMax = size > usedMaxInputSize ? size : usedMaxInputSize;
6570 
6571     if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
6572         AString *errorDetailMsg;
6573         CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
6574         // Notify mCrypto of video resolution changes
6575         if (mTunneled && mCrypto != NULL) {
6576             int32_t width, height;
6577             if (mInputFormat->findInt32("width", &width) &&
6578                 mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
6579                 if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
6580                     mTunneledInputWidth = width;
6581                     mTunneledInputHeight = height;
6582                     mCrypto->notifyResolution(width, height);
6583                 }
6584             }
6585         }
6586         if (mCryptoAsync) {
6587             // prepare a message and enqueue
6588             sp<AMessage> cryptoInfo = new AMessage();
6589             buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
6590             mCryptoAsync->decrypt(cryptoInfo);
6591         } else if (msg->findObject("cryptoInfos", &obj)) {
6592                 buffer->meta()->setObject("cryptoInfos", obj);
6593                 err = mBufferChannel->queueSecureInputBuffers(
6594                         buffer,
6595                         (mFlags & kFlagIsSecure),
6596                         errorDetailMsg);
6597         } else {
6598             err = mBufferChannel->queueSecureInputBuffer(
6599                 buffer,
6600                 (mFlags & kFlagIsSecure),
6601                 key,
6602                 iv,
6603                 mode,
6604                 pattern,
6605                 subSamples,
6606                 numSubSamples,
6607                 errorDetailMsg);
6608         }
6609         if (err != OK) {
6610             mediametrics_setInt32(mMetricsHandle, kCodecQueueSecureInputBufferError, err);
6611             ALOGW("Log queueSecureInputBuffer error: %d", err);
6612         }
6613     } else {
6614         err = mBufferChannel->queueInputBuffer(buffer);
6615         if (err != OK) {
6616             mediametrics_setInt32(mMetricsHandle, kCodecQueueInputBufferError, err);
6617             ALOGW("Log queueInputBuffer error: %d", err);
6618         }
6619     }
6620 
6621     if (err == OK) {
6622         if (mTunneled && (flags & (BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_END_OF_STREAM)) == 0) {
6623             mVideoRenderQualityTracker.onTunnelFrameQueued(timeUs);
6624         }
6625 
6626         // synchronization boundary for getBufferAndFormat
6627         Mutex::Autolock al(mBufferLock);
6628         ALOGV("onQueueInputBuffer: mPortBuffers[in][%zu] NOT owned by client", index);
6629         info->mOwnedByClient = false;
6630         info->mData.clear();
6631 
6632         statsBufferSent(timeUs, buffer);
6633     }
6634 
6635     return err;
6636 }
6637 
handleLeftover(size_t index)6638 status_t MediaCodec::handleLeftover(size_t index) {
6639     if (mLeftover.empty()) {
6640         return OK;
6641     }
6642     sp<AMessage> msg = mLeftover.front();
6643     mLeftover.pop_front();
6644     msg->setSize("index", index);
6645     ALOGV("handleLeftover(%zu)", index);
6646     return onQueueInputBuffer(msg);
6647 }
6648 
6649 template<typename T>
CreateFramesRenderedMessageInternal(const std::list<T> & done,sp<AMessage> & msg)6650 static size_t CreateFramesRenderedMessageInternal(const std::list<T> &done, sp<AMessage> &msg) {
6651     size_t index = 0;
6652     for (typename std::list<T>::const_iterator it = done.cbegin(); it != done.cend(); ++it) {
6653         if (it->getRenderTimeNs() < 0) {
6654             continue; // dropped frame from tracking
6655         }
6656         msg->setInt64(base::StringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
6657         msg->setInt64(base::StringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
6658         ++index;
6659     }
6660     return index;
6661 }
6662 
6663 //static
CreateFramesRenderedMessage(const std::list<RenderedFrameInfo> & done,sp<AMessage> & msg)6664 size_t MediaCodec::CreateFramesRenderedMessage(
6665         const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg) {
6666     return CreateFramesRenderedMessageInternal(done, msg);
6667 }
6668 
6669 //static
CreateFramesRenderedMessage(const std::list<FrameRenderTracker::Info> & done,sp<AMessage> & msg)6670 size_t MediaCodec::CreateFramesRenderedMessage(
6671         const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
6672     return CreateFramesRenderedMessageInternal(done, msg);
6673 }
6674 
onReleaseOutputBuffer(const sp<AMessage> & msg)6675 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
6676     size_t index;
6677     CHECK(msg->findSize("index", &index));
6678 
6679     int32_t render;
6680     if (!msg->findInt32("render", &render)) {
6681         render = 0;
6682     }
6683 
6684     if (!isExecuting()) {
6685         mErrorLog.log(LOG_TAG, base::StringPrintf(
6686                 "releaseOutputBuffer() is valid at Executing states; currently %s",
6687                 apiStateString().c_str()));
6688         return -EINVAL;
6689     }
6690 
6691     if (index >= mPortBuffers[kPortIndexOutput].size()) {
6692         mErrorLog.log(LOG_TAG, base::StringPrintf(
6693                 "index out of range (index=%zu)", mPortBuffers[kPortIndexOutput].size()));
6694         return -ERANGE;
6695     }
6696 
6697     BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
6698 
6699     if (!info->mOwnedByClient) {
6700         mErrorLog.log(LOG_TAG, base::StringPrintf(
6701                 "client does not own the buffer #%zu", index));
6702         return -EACCES;
6703     }
6704     if (info->mData == nullptr) {
6705         mErrorLog.log(LOG_TAG, base::StringPrintf(
6706                 "Fatal error: null buffer for index %zu", index));
6707         return -EACCES;
6708     }
6709 
6710     // synchronization boundary for getBufferAndFormat
6711     sp<MediaCodecBuffer> buffer;
6712     {
6713         Mutex::Autolock al(mBufferLock);
6714         ALOGV("onReleaseOutputBuffer: mPortBuffers[out][%zu] NOT owned by client", index);
6715         info->mOwnedByClient = false;
6716         buffer = info->mData;
6717         info->mData.clear();
6718     }
6719 
6720     if (render && buffer->size() != 0) {
6721         int64_t mediaTimeUs = INT64_MIN;
6722         buffer->meta()->findInt64("timeUs", &mediaTimeUs);
6723 
6724         bool noRenderTime = false;
6725         int64_t renderTimeNs = 0;
6726         if (!msg->findInt64("timestampNs", &renderTimeNs)) {
6727             // use media timestamp if client did not request a specific render timestamp
6728             ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
6729             renderTimeNs = mediaTimeUs * 1000;
6730             noRenderTime = true;
6731         }
6732 
6733         if (mSoftRenderer != NULL) {
6734             std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
6735                     buffer->data(), buffer->size(), mediaTimeUs, renderTimeNs,
6736                     mPortBuffers[kPortIndexOutput].size(), buffer->format());
6737 
6738             // if we are running, notify rendered frames
6739             if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
6740                 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
6741                 sp<AMessage> data = new AMessage;
6742                 if (CreateFramesRenderedMessage(doneFrames, data)) {
6743                     notify->setMessage("data", data);
6744                     notify->post();
6745                 }
6746             }
6747         }
6748 
6749         // If rendering to the screen, then schedule a time in the future to poll to see if this
6750         // frame was ever rendered to seed onFrameRendered callbacks.
6751         if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
6752             if (mediaTimeUs != INT64_MIN) {
6753                 noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
6754                              : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
6755                                                                           renderTimeNs);
6756             }
6757             // can't initialize this in the constructor because the Looper parent class needs to be
6758             // initialized first
6759             if (mMsgPollForRenderedBuffers == nullptr) {
6760                 mMsgPollForRenderedBuffers = new AMessage(kWhatPollForRenderedBuffers, this);
6761             }
6762             // Schedule the poll to occur 100ms after the render time - should be safe for
6763             // determining if the frame was ever rendered. If no render time was specified, the
6764             // presentation timestamp is used instead, which almost certainly occurs in the past,
6765             // since it's almost always a zero-based offset from the start of the stream. In these
6766             // scenarios, we expect the frame to be rendered with no delay.
6767             int64_t nowUs = ALooper::GetNowUs();
6768             int64_t renderTimeUs = renderTimeNs / 1000;
6769             int64_t delayUs = renderTimeUs < nowUs ? 0 : renderTimeUs - nowUs;
6770             delayUs += 100 * 1000; /* 100ms in microseconds */
6771             status_t err =
6772                     mMsgPollForRenderedBuffers->postUnique(/* token= */ mMsgPollForRenderedBuffers,
6773                                                            delayUs);
6774             if (err != OK) {
6775                 ALOGE("unexpected failure to post pollForRenderedBuffers: %d", err);
6776             }
6777         }
6778         status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
6779 
6780         if (err == NO_INIT) {
6781             mErrorLog.log(LOG_TAG, "rendering to non-initialized(obsolete) surface");
6782             return err;
6783         }
6784         if (err != OK) {
6785             ALOGI("rendring output error %d", err);
6786         }
6787     } else {
6788         if (mIsSurfaceToDisplay && buffer->size() != 0) {
6789             int64_t mediaTimeUs = INT64_MIN;
6790             if (buffer->meta()->findInt64("timeUs", &mediaTimeUs)) {
6791                 mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
6792             }
6793         }
6794         mBufferChannel->discardBuffer(buffer);
6795     }
6796 
6797     return OK;
6798 }
6799 
peekNextPortBuffer(int32_t portIndex)6800 MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
6801     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6802 
6803     std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
6804 
6805     if (availBuffers->empty()) {
6806         return nullptr;
6807     }
6808 
6809     return &mPortBuffers[portIndex][*availBuffers->begin()];
6810 }
6811 
dequeuePortBuffer(int32_t portIndex)6812 ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
6813     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6814 
6815     BufferInfo *info = peekNextPortBuffer(portIndex);
6816     if (!info) {
6817         return -EAGAIN;
6818     }
6819 
6820     std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
6821     size_t index = *availBuffers->begin();
6822     CHECK_EQ(info, &mPortBuffers[portIndex][index]);
6823     availBuffers->erase(availBuffers->begin());
6824 
6825     {
6826         Mutex::Autolock al(mBufferLock);
6827         ALOGV("dequeuePortBuffer: mPortBuffers[%s][%zu] checking if not owned by client",
6828               portIndex == kPortIndexInput ? "in" : "out", index);
6829         CHECK(!info->mOwnedByClient);
6830         info->mOwnedByClient = true;
6831 
6832         // set image-data
6833         if (info->mData->format() != NULL) {
6834             sp<ABuffer> imageData;
6835             if (info->mData->format()->findBuffer("image-data", &imageData)) {
6836                 info->mData->meta()->setBuffer("image-data", imageData);
6837             }
6838             int32_t left, top, right, bottom;
6839             if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
6840                 info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
6841             }
6842         }
6843     }
6844 
6845     return index;
6846 }
6847 
getOrCreateDetachedSurface()6848 sp<Surface> MediaCodec::getOrCreateDetachedSurface() {
6849     if (mDomain != DOMAIN_VIDEO || (mFlags & kFlagIsEncoder)) {
6850         return nullptr;
6851     }
6852 
6853     if (!mDetachedSurface) {
6854         uint64_t usage = 0;
6855         if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
6856             // By default prepare buffer to be displayed on any of the common surfaces
6857             usage = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER);
6858         }
6859         mDetachedSurface.reset(new ReleaseSurface(usage));
6860     }
6861 
6862     return mDetachedSurface->getSurface();
6863 }
6864 
connectToSurface(const sp<Surface> & surface,uint32_t * generation)6865 status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
6866     status_t err = OK;
6867     if (surface != NULL) {
6868         uint64_t oldId, newId;
6869         if (mSurface != NULL
6870                 && surface->getUniqueId(&newId) == NO_ERROR
6871                 && mSurface->getUniqueId(&oldId) == NO_ERROR
6872                 && newId == oldId) {
6873             ALOGI("[%s] connecting to the same surface. Nothing to do.", mComponentName.c_str());
6874             return ALREADY_EXISTS;
6875         }
6876 
6877         // in case we don't connect, ensure that we don't signal the surface is
6878         // connected to the screen
6879         mIsSurfaceToDisplay = false;
6880 
6881         err = nativeWindowConnect(surface.get(), "connectToSurface");
6882         if (err == OK) {
6883             // Require a fresh set of buffers after each connect by using a unique generation
6884             // number. Rely on the fact that max supported process id by Linux is 2^22.
6885             // PID is never 0 so we don't have to worry that we use the default generation of 0.
6886             // TODO: come up with a unique scheme if other producers also set the generation number.
6887             static uint32_t sSurfaceGeneration = 0;
6888             *generation = (getpid() << 10) | (++sSurfaceGeneration & ((1 << 10) - 1));
6889             surface->setGenerationNumber(*generation);
6890             ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), *generation);
6891 
6892             // HACK: clear any free buffers. Remove when connect will automatically do this.
6893             // This is needed as the consumer may be holding onto stale frames that it can reattach
6894             // to this surface after disconnect/connect, and those free frames would inherit the new
6895             // generation number. Disconnecting after setting a unique generation prevents this.
6896             nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
6897             sp<SurfaceListener> listener =
6898                     new OnBufferReleasedListener(*generation, mBufferChannel);
6899             err = surfaceConnectWithListener(
6900                     surface, listener, "connectToSurface(reconnect-with-listener)");
6901         }
6902 
6903         if (err != OK) {
6904             *generation = 0;
6905             ALOGE("nativeWindowConnect/surfaceConnectWithListener returned an error: %s (%d)",
6906                     strerror(-err), err);
6907         } else {
6908             if (!mAllowFrameDroppingBySurface) {
6909                 disableLegacyBufferDropPostQ(surface);
6910             }
6911             // keep track whether or not the buffers of the connected surface go to the screen
6912             int result = 0;
6913             surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
6914             mIsSurfaceToDisplay = result != 0;
6915         }
6916     }
6917     // do not return ALREADY_EXISTS unless surfaces are the same
6918     return err == ALREADY_EXISTS ? BAD_VALUE : err;
6919 }
6920 
disconnectFromSurface()6921 status_t MediaCodec::disconnectFromSurface() {
6922     status_t err = OK;
6923     if (mSurface != NULL) {
6924         // Resetting generation is not technically needed, but there is no need to keep it either
6925         mSurface->setGenerationNumber(0);
6926         err = nativeWindowDisconnect(mSurface.get(), "disconnectFromSurface");
6927         if (err != OK) {
6928             ALOGW("nativeWindowDisconnect returned an error: %s (%d)", strerror(-err), err);
6929         }
6930         // assume disconnected even on error
6931         mSurface.clear();
6932         mSurfaceGeneration = 0;
6933         mIsSurfaceToDisplay = false;
6934     }
6935     return err;
6936 }
6937 
handleSetSurface(const sp<Surface> & surface,bool callCodec,bool onShutDown)6938 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutDown) {
6939     uint32_t generation;
6940     status_t err = OK;
6941     if (surface != nullptr) {
6942         err = connectToSurface(surface, &generation);
6943         if (err == ALREADY_EXISTS) {
6944             // reconnecting to same surface
6945             return OK;
6946         }
6947 
6948         if (err == OK && callCodec) {
6949             if (mFlags & kFlagUsesSoftwareRenderer) {
6950                 if (mSoftRenderer != NULL
6951                         && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
6952                     pushBlankBuffersToNativeWindow(mSurface.get());
6953                 }
6954                 // do not create a new software renderer on shutdown (release)
6955                 // as it will not be used anyway
6956                 if (!onShutDown) {
6957                     surface->setDequeueTimeout(-1);
6958                     mSoftRenderer = new SoftwareRenderer(surface);
6959                     // TODO: check if this was successful
6960                 }
6961             } else {
6962                 err = mCodec->setSurface(surface, generation);
6963             }
6964 
6965             mReliabilityContextMetrics.setOutputSurfaceCount++;
6966         }
6967     }
6968 
6969     if (err == OK) {
6970         if (mSurface != NULL) {
6971             (void)disconnectFromSurface();
6972         }
6973 
6974         if (surface != NULL) {
6975             mSurface = surface;
6976             mSurfaceGeneration = generation;
6977         }
6978     }
6979 
6980     return err;
6981 }
6982 
handleSetSurface(const sp<Surface> & surface)6983 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
6984     if (android::media::codec::provider_->null_output_surface_support()) {
6985         return handleSetSurface(surface, false /* callCodec */);
6986     }
6987 
6988     status_t err = OK;
6989     if (mSurface != NULL) {
6990         (void)disconnectFromSurface();
6991     }
6992     if (surface != NULL) {
6993         uint32_t generation;
6994         err = connectToSurface(surface, &generation);
6995         if (err == OK) {
6996             mSurface = surface;
6997             mSurfaceGeneration = generation;
6998         }
6999     }
7000     return err;
7001 }
7002 
onInputBufferAvailable()7003 void MediaCodec::onInputBufferAvailable() {
7004     int32_t index;
7005     while ((index = dequeuePortBuffer(kPortIndexInput)) >= 0) {
7006         sp<AMessage> msg = mCallback->dup();
7007         msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
7008         msg->setInt32("index", index);
7009         msg->post();
7010     }
7011 }
7012 
onOutputBufferAvailable()7013 void MediaCodec::onOutputBufferAvailable() {
7014     int32_t index;
7015     while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
7016         if (discardDecodeOnlyOutputBuffer(index)) {
7017             continue;
7018         }
7019         sp<AMessage> msg = mCallback->dup();
7020         const sp<MediaCodecBuffer> &buffer =
7021             mPortBuffers[kPortIndexOutput][index].mData;
7022         int32_t outputCallbackID = CB_OUTPUT_AVAILABLE;
7023         sp<RefBase> accessUnitInfoObj;
7024         msg->setInt32("index", index);
7025         msg->setSize("offset", buffer->offset());
7026         msg->setSize("size", buffer->size());
7027 
7028         int64_t timeUs;
7029         CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
7030 
7031         msg->setInt64("timeUs", timeUs);
7032 
7033         int32_t flags;
7034         CHECK(buffer->meta()->findInt32("flags", &flags));
7035 
7036         msg->setInt32("flags", flags);
7037         buffer->meta()->findObject("accessUnitInfo", &accessUnitInfoObj);
7038         if (accessUnitInfoObj) {
7039             outputCallbackID = CB_LARGE_FRAME_OUTPUT_AVAILABLE;
7040             msg->setObject("accessUnitInfo", accessUnitInfoObj);
7041             sp<BufferInfosWrapper> auInfo(
7042                     (decltype(auInfo.get()))accessUnitInfoObj.get());
7043              auInfo->value.back().mFlags |= flags & BUFFER_FLAG_END_OF_STREAM;
7044         }
7045         msg->setInt32("callbackID", outputCallbackID);
7046 
7047         statsBufferReceived(timeUs, buffer);
7048 
7049         msg->post();
7050     }
7051 }
onCryptoError(const sp<AMessage> & msg)7052 void MediaCodec::onCryptoError(const sp<AMessage> & msg) {
7053     if (mCallback != NULL) {
7054         sp<AMessage> cb_msg = mCallback->dup();
7055         cb_msg->setInt32("callbackID", CB_CRYPTO_ERROR);
7056         cb_msg->extend(msg);
7057         cb_msg->post();
7058     }
7059 }
onError(status_t err,int32_t actionCode,const char * detail)7060 void MediaCodec::onError(status_t err, int32_t actionCode, const char *detail) {
7061     if (mCallback != NULL) {
7062         sp<AMessage> msg = mCallback->dup();
7063         msg->setInt32("callbackID", CB_ERROR);
7064         msg->setInt32("err", err);
7065         msg->setInt32("actionCode", actionCode);
7066 
7067         if (detail != NULL) {
7068             msg->setString("detail", detail);
7069         }
7070 
7071         msg->post();
7072     }
7073 }
7074 
onOutputFormatChanged()7075 void MediaCodec::onOutputFormatChanged() {
7076     if (mCallback != NULL) {
7077         sp<AMessage> msg = mCallback->dup();
7078         msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
7079         msg->setMessage("format", mOutputFormat);
7080         msg->post();
7081     }
7082 }
7083 
onRequiredResourcesChanged(const std::vector<InstanceResourceInfo> & resourceInfo)7084 void MediaCodec::onRequiredResourcesChanged(
7085         const std::vector<InstanceResourceInfo>& resourceInfo) {
7086     mRequiredResourceInfo = resourceInfo;
7087     // Make sure codec availability feature is on.
7088     if (mCallback != nullptr && android::media::codec::codec_availability()) {
7089         // Post the callback
7090         sp<AMessage> msg = mCallback->dup();
7091         msg->setInt32("callbackID", CB_REQUIRED_RESOURCES_CHANGED);
7092         msg->post();
7093     }
7094 }
7095 
postActivityNotificationIfPossible()7096 void MediaCodec::postActivityNotificationIfPossible() {
7097     if (mActivityNotify == NULL) {
7098         return;
7099     }
7100 
7101     bool isErrorOrOutputChanged =
7102             (mFlags & (kFlagStickyError
7103                     | kFlagOutputBuffersChanged
7104                     | kFlagOutputFormatChanged));
7105 
7106     if (isErrorOrOutputChanged
7107             || !mAvailPortBuffers[kPortIndexInput].empty()
7108             || !mAvailPortBuffers[kPortIndexOutput].empty()) {
7109         mActivityNotify->setInt32("input-buffers",
7110                 mAvailPortBuffers[kPortIndexInput].size());
7111 
7112         if (isErrorOrOutputChanged) {
7113             // we want consumer to dequeue as many times as it can
7114             mActivityNotify->setInt32("output-buffers", INT32_MAX);
7115         } else {
7116             mActivityNotify->setInt32("output-buffers",
7117                     mAvailPortBuffers[kPortIndexOutput].size());
7118         }
7119         mActivityNotify->post();
7120         mActivityNotify.clear();
7121     }
7122 }
7123 
setParameters(const sp<AMessage> & params)7124 status_t MediaCodec::setParameters(const sp<AMessage> &params) {
7125     sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
7126     msg->setMessage("params", params);
7127 
7128     sp<AMessage> response;
7129     return PostAndAwaitResponse(msg, &response);
7130 }
7131 
onSetParameters(const sp<AMessage> & params)7132 status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
7133     if (mState == UNINITIALIZED || mState == INITIALIZING) {
7134         return NO_INIT;
7135     }
7136     updateLowLatency(params);
7137     updateCodecImportance(params);
7138     mapFormat(mComponentName, params, nullptr, false);
7139     updateTunnelPeek(params);
7140     mCodec->signalSetParameters(params);
7141 
7142     return OK;
7143 }
7144 
amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> & buffer)7145 status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
7146         const sp<MediaCodecBuffer> &buffer) {
7147     AString mime;
7148     CHECK(mOutputFormat->findString("mime", &mime));
7149 
7150     if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
7151         // Codec specific data should be SPS and PPS in a single buffer,
7152         // each prefixed by a startcode (0x00 0x00 0x00 0x01).
7153         // We separate the two and put them into the output format
7154         // under the keys "csd-0" and "csd-1".
7155 
7156         unsigned csdIndex = 0;
7157 
7158         const uint8_t *data = buffer->data();
7159         size_t size = buffer->size();
7160 
7161         const uint8_t *nalStart;
7162         size_t nalSize;
7163         while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
7164             sp<ABuffer> csd = new ABuffer(nalSize + 4);
7165             memcpy(csd->data(), "\x00\x00\x00\x01", 4);
7166             memcpy(csd->data() + 4, nalStart, nalSize);
7167 
7168             mOutputFormat->setBuffer(
7169                     base::StringPrintf("csd-%u", csdIndex).c_str(), csd);
7170 
7171             ++csdIndex;
7172         }
7173 
7174         if (csdIndex != 2) {
7175             mErrorLog.log(LOG_TAG, base::StringPrintf(
7176                     "codec config data contains %u NAL units; expected 2.", csdIndex));
7177             return ERROR_MALFORMED;
7178         }
7179     } else {
7180         // For everything else we just stash the codec specific data into
7181         // the output format as a single piece of csd under "csd-0".
7182         sp<ABuffer> csd = new ABuffer(buffer->size());
7183         memcpy(csd->data(), buffer->data(), buffer->size());
7184         csd->setRange(0, buffer->size());
7185         mOutputFormat->setBuffer("csd-0", csd);
7186     }
7187 
7188     return OK;
7189 }
7190 
postPendingRepliesAndDeferredMessages(std::string origin,status_t err)7191 void MediaCodec::postPendingRepliesAndDeferredMessages(
7192         std::string origin, status_t err /* = OK */) {
7193     sp<AMessage> response{new AMessage};
7194     if (err != OK) {
7195         response->setInt32("err", err);
7196     }
7197     postPendingRepliesAndDeferredMessages(origin, response);
7198 }
7199 
postPendingRepliesAndDeferredMessages(std::string origin,const sp<AMessage> & response)7200 void MediaCodec::postPendingRepliesAndDeferredMessages(
7201         std::string origin, const sp<AMessage> &response) {
7202     LOG_ALWAYS_FATAL_IF(
7203             !mReplyID,
7204             "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
7205             origin.c_str(),
7206             mLastReplyOrigin.c_str());
7207     mLastReplyOrigin = origin;
7208     response->postReply(mReplyID);
7209     mReplyID.clear();
7210     ALOGV_IF(!mDeferredMessages.empty(),
7211             "posting %zu deferred messages", mDeferredMessages.size());
7212     for (sp<AMessage> msg : mDeferredMessages) {
7213         msg->post();
7214     }
7215     mDeferredMessages.clear();
7216 }
7217 
apiStateString()7218 std::string MediaCodec::apiStateString() {
7219     const char *rval = NULL;
7220     char rawbuffer[16]; // room for "%d"
7221 
7222     switch (mState) {
7223         case UNINITIALIZED:
7224             rval = (mFlags & kFlagStickyError) ? "at Error state" : "at Released state";
7225             break;
7226         case INITIALIZING: rval = "while constructing"; break;
7227         case INITIALIZED: rval = "at Uninitialized state"; break;
7228         case CONFIGURING: rval = "during configure()"; break;
7229         case CONFIGURED: rval = "at Configured state"; break;
7230         case STARTING: rval = "during start()"; break;
7231         case STARTED: rval = "at Running state"; break;
7232         case FLUSHING: rval = "during flush()"; break;
7233         case FLUSHED: rval = "at Flushed state"; break;
7234         case STOPPING: rval = "during stop()"; break;
7235         case RELEASING: rval = "during release()"; break;
7236         default:
7237             snprintf(rawbuffer, sizeof(rawbuffer), "at %d", mState);
7238             rval = rawbuffer;
7239             break;
7240     }
7241     return rval;
7242 }
7243 
stateString(State state)7244 std::string MediaCodec::stateString(State state) {
7245     const char *rval = NULL;
7246     char rawbuffer[16]; // room for "%d"
7247 
7248     switch (state) {
7249         case UNINITIALIZED: rval = "UNINITIALIZED"; break;
7250         case INITIALIZING: rval = "INITIALIZING"; break;
7251         case INITIALIZED: rval = "INITIALIZED"; break;
7252         case CONFIGURING: rval = "CONFIGURING"; break;
7253         case CONFIGURED: rval = "CONFIGURED"; break;
7254         case STARTING: rval = "STARTING"; break;
7255         case STARTED: rval = "STARTED"; break;
7256         case FLUSHING: rval = "FLUSHING"; break;
7257         case FLUSHED: rval = "FLUSHED"; break;
7258         case STOPPING: rval = "STOPPING"; break;
7259         case RELEASING: rval = "RELEASING"; break;
7260         default:
7261             snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
7262             rval = rawbuffer;
7263             break;
7264     }
7265     return rval;
7266 }
7267 
7268 // static
CanFetchLinearBlock(const std::vector<std::string> & names,bool * isCompatible)7269 status_t MediaCodec::CanFetchLinearBlock(
7270         const std::vector<std::string> &names, bool *isCompatible) {
7271     *isCompatible = false;
7272     if (names.size() == 0) {
7273         *isCompatible = true;
7274         return OK;
7275     }
7276     const CodecListCache &cache = GetCodecListCache();
7277     for (const std::string &name : names) {
7278         auto it = cache.mCodecInfoMap.find(name);
7279         if (it == cache.mCodecInfoMap.end()) {
7280             return NAME_NOT_FOUND;
7281         }
7282         const char *owner = it->second->getOwnerName();
7283         if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
7284             *isCompatible = false;
7285             return OK;
7286         } else if (strncmp(owner, "codec2::", 8) != 0) {
7287             return NAME_NOT_FOUND;
7288         }
7289     }
7290     return CCodec::CanFetchLinearBlock(names, kDefaultReadWriteUsage, isCompatible);
7291 }
7292 
7293 // static
FetchLinearBlock(size_t capacity,const std::vector<std::string> & names)7294 std::shared_ptr<C2LinearBlock> MediaCodec::FetchLinearBlock(
7295         size_t capacity, const std::vector<std::string> &names) {
7296     return CCodec::FetchLinearBlock(capacity, kDefaultReadWriteUsage, names);
7297 }
7298 
7299 // static
CanFetchGraphicBlock(const std::vector<std::string> & names,bool * isCompatible)7300 status_t MediaCodec::CanFetchGraphicBlock(
7301         const std::vector<std::string> &names, bool *isCompatible) {
7302     *isCompatible = false;
7303     if (names.size() == 0) {
7304         *isCompatible = true;
7305         return OK;
7306     }
7307     const CodecListCache &cache = GetCodecListCache();
7308     for (const std::string &name : names) {
7309         auto it = cache.mCodecInfoMap.find(name);
7310         if (it == cache.mCodecInfoMap.end()) {
7311             return NAME_NOT_FOUND;
7312         }
7313         const char *owner = it->second->getOwnerName();
7314         if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
7315             *isCompatible = false;
7316             return OK;
7317         } else if (strncmp(owner, "codec2.", 7) != 0) {
7318             return NAME_NOT_FOUND;
7319         }
7320     }
7321     return CCodec::CanFetchGraphicBlock(names, isCompatible);
7322 }
7323 
7324 // static
FetchGraphicBlock(int32_t width,int32_t height,int32_t format,uint64_t usage,const std::vector<std::string> & names)7325 std::shared_ptr<C2GraphicBlock> MediaCodec::FetchGraphicBlock(
7326         int32_t width,
7327         int32_t height,
7328         int32_t format,
7329         uint64_t usage,
7330         const std::vector<std::string> &names) {
7331     return CCodec::FetchGraphicBlock(width, height, format, usage, names);
7332 }
7333 
7334 }  // namespace android
7335