1 /*
2 * Copyright 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 // TODO(b/129481165): remove the #pragma below and fix conversion issues
18 #pragma clang diagnostic push
19 #pragma clang diagnostic ignored "-Wextra"
20
21 #undef LOG_TAG
22 #define LOG_TAG "VSyncPredictor"
23
24 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
25
26 #include <algorithm>
27 #include <chrono>
28 #include <sstream>
29
30 #include <android-base/logging.h>
31 #include <android-base/stringprintf.h>
32 #include <common/FlagManager.h>
33 #include <common/trace.h>
34 #include <cutils/compiler.h>
35 #include <cutils/properties.h>
36 #include <ftl/concat.h>
37 #include <utils/Log.h>
38
39 #include "RefreshRateSelector.h"
40 #include "VSyncPredictor.h"
41
42 namespace android::scheduler {
43
44 using base::StringAppendF;
45
46 static auto constexpr kMaxPercent = 100u;
47
48 namespace {
numVsyncsPerFrame(const ftl::NonNull<DisplayModePtr> & displayModePtr)49 int numVsyncsPerFrame(const ftl::NonNull<DisplayModePtr>& displayModePtr) {
50 const auto idealPeakRefreshPeriod = displayModePtr->getPeakFps().getPeriodNsecs();
51 const auto idealRefreshPeriod = displayModePtr->getVsyncRate().getPeriodNsecs();
52 return static_cast<int>(std::round(static_cast<float>(idealPeakRefreshPeriod) /
53 static_cast<float>(idealRefreshPeriod)));
54 }
55 } // namespace
56
57 VSyncPredictor::~VSyncPredictor() = default;
58
VSyncPredictor(std::unique_ptr<Clock> clock,ftl::NonNull<DisplayModePtr> modePtr,size_t historySize,size_t minimumSamplesForPrediction,uint32_t outlierTolerancePercent)59 VSyncPredictor::VSyncPredictor(std::unique_ptr<Clock> clock, ftl::NonNull<DisplayModePtr> modePtr,
60 size_t historySize, size_t minimumSamplesForPrediction,
61 uint32_t outlierTolerancePercent)
62 : mClock(std::move(clock)),
63 mId(modePtr->getPhysicalDisplayId()),
64 mTraceOn(property_get_bool("debug.sf.vsp_trace", false)),
65 kHistorySize(historySize),
66 kMinimumSamplesForPrediction(minimumSamplesForPrediction),
67 kOutlierTolerancePercent(std::min(outlierTolerancePercent, kMaxPercent)),
68 mDisplayModePtr(modePtr),
69 mNumVsyncsForFrame(numVsyncsPerFrame(mDisplayModePtr)) {
70 resetModel();
71 }
72
traceInt64If(const char * name,int64_t value) const73 inline void VSyncPredictor::traceInt64If(const char* name, int64_t value) const {
74 if (CC_UNLIKELY(mTraceOn)) {
75 traceInt64(name, value);
76 }
77 }
78
traceInt64(const char * name,int64_t value) const79 inline void VSyncPredictor::traceInt64(const char* name, int64_t value) const {
80 SFTRACE_INT64(ftl::Concat(ftl::truncated<14>(name), " ", mId.value).c_str(), value);
81 }
82
next(size_t i) const83 inline size_t VSyncPredictor::next(size_t i) const {
84 return (i + 1) % mTimestamps.size();
85 }
86
idealPeriod() const87 nsecs_t VSyncPredictor::idealPeriod() const {
88 return mDisplayModePtr->getVsyncRate().getPeriodNsecs();
89 }
90
validate(nsecs_t timestamp) const91 bool VSyncPredictor::validate(nsecs_t timestamp) const {
92 SFTRACE_CALL();
93 if (mLastTimestampIndex < 0 || mTimestamps.empty()) {
94 SFTRACE_INSTANT("timestamp valid (first)");
95 return true;
96 }
97
98 const auto aValidTimestamp = mTimestamps[mLastTimestampIndex];
99 const auto percent =
100 (timestamp - aValidTimestamp) % idealPeriod() * kMaxPercent / idealPeriod();
101 if (percent >= kOutlierTolerancePercent &&
102 percent <= (kMaxPercent - kOutlierTolerancePercent)) {
103 SFTRACE_FORMAT_INSTANT("timestamp not aligned with model. aValidTimestamp %.2fms ago"
104 ", timestamp %.2fms ago, idealPeriod=%.2 percent=%d",
105 (mClock->now() - aValidTimestamp) / 1e6f,
106 (mClock->now() - timestamp) / 1e6f,
107 idealPeriod() / 1e6f, percent);
108 return false;
109 }
110
111 const auto iter = std::min_element(mTimestamps.begin(), mTimestamps.end(),
112 [timestamp](nsecs_t a, nsecs_t b) {
113 return std::abs(timestamp - a) < std::abs(timestamp - b);
114 });
115 const auto distancePercent = std::abs(*iter - timestamp) * kMaxPercent / idealPeriod();
116 if (distancePercent < kOutlierTolerancePercent) {
117 // duplicate timestamp
118 SFTRACE_FORMAT_INSTANT("duplicate timestamp");
119 return false;
120 }
121 return true;
122 }
123
currentPeriod() const124 nsecs_t VSyncPredictor::currentPeriod() const {
125 std::lock_guard lock(mMutex);
126 return mRateMap.find(idealPeriod())->second.slope;
127 }
128
minFramePeriod() const129 Period VSyncPredictor::minFramePeriod() const {
130 if (!FlagManager::getInstance().vrr_config()) {
131 return Period::fromNs(currentPeriod());
132 }
133
134 std::lock_guard lock(mMutex);
135 return minFramePeriodLocked();
136 }
137
minFramePeriodLocked() const138 Period VSyncPredictor::minFramePeriodLocked() const {
139 const auto slope = mRateMap.find(idealPeriod())->second.slope;
140 return Period::fromNs(slope * mNumVsyncsForFrame);
141 }
142
addVsyncTimestamp(nsecs_t timestamp)143 bool VSyncPredictor::addVsyncTimestamp(nsecs_t timestamp) {
144 SFTRACE_CALL();
145
146 std::lock_guard lock(mMutex);
147
148 if (!validate(timestamp)) {
149 // VSR could elect to ignore the incongruent timestamp or resetModel(). If ts is ignored,
150 // don't insert this ts into mTimestamps ringbuffer. If we are still
151 // in the learning phase we should just clear all timestamps and start
152 // over.
153 if (mTimestamps.size() < kMinimumSamplesForPrediction) {
154 // Add the timestamp to mTimestamps before clearing it so we could
155 // update mKnownTimestamp based on the new timestamp.
156 mTimestamps.push_back(timestamp);
157
158 // Do not clear timelines as we don't want to break the phase while
159 // we are still learning.
160 clearTimestamps(/* clearTimelines */ false);
161 } else if (!mTimestamps.empty()) {
162 mKnownTimestamp =
163 std::max(timestamp, *std::max_element(mTimestamps.begin(), mTimestamps.end()));
164 } else {
165 mKnownTimestamp = timestamp;
166 }
167 SFTRACE_FORMAT_INSTANT("timestamp rejected. mKnownTimestamp was %.2fms ago",
168 (mClock->now() - *mKnownTimestamp) / 1e6f);
169 return false;
170 }
171
172 if (mTimestamps.size() != kHistorySize) {
173 mTimestamps.push_back(timestamp);
174 mLastTimestampIndex = next(mLastTimestampIndex);
175 } else {
176 mLastTimestampIndex = next(mLastTimestampIndex);
177 mTimestamps[mLastTimestampIndex] = timestamp;
178 }
179
180 traceInt64If("VSP-ts", timestamp);
181
182 const size_t numSamples = mTimestamps.size();
183 if (numSamples < kMinimumSamplesForPrediction) {
184 mRateMap[idealPeriod()] = {idealPeriod(), 0};
185 return true;
186 }
187
188 // This is a 'simple linear regression' calculation of Y over X, with Y being the
189 // vsync timestamps, and X being the ordinal of vsync count.
190 // The calculated slope is the vsync period.
191 // Formula for reference:
192 // Sigma_i: means sum over all timestamps.
193 // mean(variable): statistical mean of variable.
194 // X: snapped ordinal of the timestamp
195 // Y: vsync timestamp
196 //
197 // Sigma_i( (X_i - mean(X)) * (Y_i - mean(Y) )
198 // slope = -------------------------------------------
199 // Sigma_i ( X_i - mean(X) ) ^ 2
200 //
201 // intercept = mean(Y) - slope * mean(X)
202 //
203 std::vector<nsecs_t> vsyncTS(numSamples);
204 std::vector<nsecs_t> ordinals(numSamples);
205
206 // Normalizing to the oldest timestamp cuts down on error in calculating the intercept.
207 const auto oldestTS = *std::min_element(mTimestamps.begin(), mTimestamps.end());
208 auto it = mRateMap.find(idealPeriod());
209 auto const currentPeriod = it->second.slope;
210
211 // The mean of the ordinals must be precise for the intercept calculation, so scale them up for
212 // fixed-point arithmetic.
213 constexpr int64_t kScalingFactor = 1000;
214
215 nsecs_t meanTS = 0;
216 nsecs_t meanOrdinal = 0;
217
218 for (size_t i = 0; i < numSamples; i++) {
219 const auto timestamp = mTimestamps[i] - oldestTS;
220 vsyncTS[i] = timestamp;
221 meanTS += timestamp;
222
223 const auto ordinal = currentPeriod == 0
224 ? 0
225 : (vsyncTS[i] + currentPeriod / 2) / currentPeriod * kScalingFactor;
226 ordinals[i] = ordinal;
227 meanOrdinal += ordinal;
228 }
229
230 meanTS /= numSamples;
231 meanOrdinal /= numSamples;
232
233 for (size_t i = 0; i < numSamples; i++) {
234 vsyncTS[i] -= meanTS;
235 ordinals[i] -= meanOrdinal;
236 }
237
238 nsecs_t top = 0;
239 nsecs_t bottom = 0;
240 for (size_t i = 0; i < numSamples; i++) {
241 top += vsyncTS[i] * ordinals[i];
242 bottom += ordinals[i] * ordinals[i];
243 }
244
245 if (CC_UNLIKELY(bottom == 0)) {
246 it->second = {idealPeriod(), 0};
247 clearTimestamps(/* clearTimelines */ true);
248 return false;
249 }
250
251 nsecs_t const anticipatedPeriod = top * kScalingFactor / bottom;
252 nsecs_t const intercept = meanTS - (anticipatedPeriod * meanOrdinal / kScalingFactor);
253
254 auto const percent = std::abs(anticipatedPeriod - idealPeriod()) * kMaxPercent / idealPeriod();
255 if (percent >= kOutlierTolerancePercent) {
256 it->second = {idealPeriod(), 0};
257 clearTimestamps(/* clearTimelines */ true);
258 return false;
259 }
260
261 traceInt64If("VSP-period", anticipatedPeriod);
262 traceInt64If("VSP-intercept", intercept);
263
264 it->second = {anticipatedPeriod, intercept};
265
266 ALOGV("model update ts %" PRIu64 ": %" PRId64 " slope: %" PRId64 " intercept: %" PRId64,
267 mId.value, timestamp, anticipatedPeriod, intercept);
268 return true;
269 }
270
snapToVsync(nsecs_t timePoint) const271 nsecs_t VSyncPredictor::snapToVsync(nsecs_t timePoint) const {
272 auto const [slope, intercept] = getVSyncPredictionModelLocked();
273
274 if (mTimestamps.empty()) {
275 traceInt64("VSP-mode", 1);
276 auto const knownTimestamp = mKnownTimestamp ? *mKnownTimestamp : timePoint;
277 auto const numPeriodsOut = ((timePoint - knownTimestamp) / idealPeriod()) + 1;
278 return knownTimestamp + numPeriodsOut * idealPeriod();
279 }
280
281 auto const oldest = *std::min_element(mTimestamps.begin(), mTimestamps.end());
282
283 // See b/145667109, the ordinal calculation must take into account the intercept.
284 auto const zeroPoint = oldest + intercept;
285 auto const ordinalRequest = (timePoint - zeroPoint + slope) / slope;
286 auto const prediction = (ordinalRequest * slope) + intercept + oldest;
287
288 traceInt64("VSP-mode", 0);
289 traceInt64If("VSP-timePoint", timePoint);
290 traceInt64If("VSP-prediction", prediction);
291
292 auto const printer = [&, slope = slope, intercept = intercept] {
293 std::stringstream str;
294 str << "prediction made from: " << timePoint << "prediction: " << prediction << " (+"
295 << prediction - timePoint << ") slope: " << slope << " intercept: " << intercept
296 << "oldestTS: " << oldest << " ordinal: " << ordinalRequest;
297 return str.str();
298 };
299
300 ALOGV("%s", printer().c_str());
301 LOG_ALWAYS_FATAL_IF(prediction < timePoint, "VSyncPredictor: model miscalculation: %s",
302 printer().c_str());
303
304 return prediction;
305 }
306
nextAnticipatedVSyncTimeFrom(nsecs_t timePoint,std::optional<nsecs_t> lastVsyncOpt)307 nsecs_t VSyncPredictor::nextAnticipatedVSyncTimeFrom(nsecs_t timePoint,
308 std::optional<nsecs_t> lastVsyncOpt) {
309 SFTRACE_CALL();
310 std::lock_guard lock(mMutex);
311
312 const auto now = TimePoint::fromNs(mClock->now());
313 purgeTimelines(now);
314
315 if (lastVsyncOpt && *lastVsyncOpt > timePoint) {
316 timePoint = *lastVsyncOpt;
317 }
318
319 const auto model = getVSyncPredictionModelLocked();
320 const auto threshold = model.slope / 2;
321 std::optional<Period> minFramePeriodOpt;
322
323 if (mNumVsyncsForFrame > 1) {
324 minFramePeriodOpt = minFramePeriodLocked();
325 }
326
327 std::optional<TimePoint> vsyncOpt;
328 for (auto& timeline : mTimelines) {
329 vsyncOpt = timeline.nextAnticipatedVSyncTimeFrom(model, minFramePeriodOpt,
330 snapToVsync(timePoint), mMissedVsync,
331 lastVsyncOpt ? snapToVsync(*lastVsyncOpt -
332 threshold)
333 : lastVsyncOpt);
334 if (vsyncOpt) {
335 break;
336 }
337 }
338 LOG_ALWAYS_FATAL_IF(!vsyncOpt);
339
340 if (*vsyncOpt > mLastCommittedVsync) {
341 mLastCommittedVsync = *vsyncOpt;
342 SFTRACE_FORMAT_INSTANT("mLastCommittedVsync in %.2fms",
343 float(mLastCommittedVsync.ns() - mClock->now()) / 1e6f);
344 }
345
346 return vsyncOpt->ns();
347 }
348
349 /*
350 * Returns whether a given vsync timestamp is in phase with a frame rate.
351 * If the frame rate is not a divisor of the refresh rate, it is always considered in phase.
352 * For example, if the vsync timestamps are (16.6,33.3,50.0,66.6):
353 * isVSyncInPhase(16.6, 30) = true
354 * isVSyncInPhase(33.3, 30) = false
355 * isVSyncInPhase(50.0, 30) = true
356 */
isVSyncInPhase(nsecs_t timePoint,Fps frameRate)357 bool VSyncPredictor::isVSyncInPhase(nsecs_t timePoint, Fps frameRate) {
358 if (timePoint == 0) {
359 return true;
360 }
361
362 std::lock_guard lock(mMutex);
363 const auto model = getVSyncPredictionModelLocked();
364 const nsecs_t period = model.slope;
365 const nsecs_t justBeforeTimePoint = timePoint - period / 2;
366 const auto now = TimePoint::fromNs(mClock->now());
367 const auto vsync = snapToVsync(justBeforeTimePoint);
368
369 purgeTimelines(now);
370
371 for (auto& timeline : mTimelines) {
372 const bool isVsyncValid = FlagManager::getInstance().vrr_bugfix_24q4()
373 ? timeline.isWithin(TimePoint::fromNs(vsync)) ==
374 VsyncTimeline::VsyncOnTimeline::Unique
375 : timeline.validUntil() && timeline.validUntil()->ns() > vsync;
376 if (isVsyncValid) {
377 return timeline.isVSyncInPhase(model, vsync, frameRate);
378 }
379 }
380
381 // The last timeline should always be valid
382 return mTimelines.back().isVSyncInPhase(model, vsync, frameRate);
383 }
384
setRenderRate(Fps renderRate,bool applyImmediately)385 void VSyncPredictor::setRenderRate(Fps renderRate, bool applyImmediately) {
386 SFTRACE_FORMAT("%s %s", __func__, to_string(renderRate).c_str());
387 ALOGV("%s %s: RenderRate %s ", __func__, to_string(mId).c_str(), to_string(renderRate).c_str());
388 std::lock_guard lock(mMutex);
389 const auto prevRenderRate = mRenderRateOpt;
390 mRenderRateOpt = renderRate;
391 const auto renderPeriodDelta =
392 prevRenderRate ? prevRenderRate->getPeriodNsecs() - renderRate.getPeriodNsecs() : 0;
393 if (applyImmediately) {
394 SFTRACE_FORMAT_INSTANT("applyImmediately");
395 while (mTimelines.size() > 1) {
396 mTimelines.pop_front();
397 }
398
399 mTimelines.front().setRenderRate(renderRate);
400 return;
401 }
402
403 const bool newRenderRateIsHigher = renderPeriodDelta > renderRate.getPeriodNsecs() &&
404 mLastCommittedVsync.ns() - mClock->now() > 2 * renderRate.getPeriodNsecs();
405 if (newRenderRateIsHigher) {
406 SFTRACE_FORMAT_INSTANT("newRenderRateIsHigher");
407 mTimelines.clear();
408 mLastCommittedVsync = TimePoint::fromNs(0);
409
410 } else {
411 if (FlagManager::getInstance().vrr_bugfix_24q4()) {
412 // We need to freeze the timeline at the committed vsync, and
413 // then use with threshold adjustments when required to avoid
414 // marginal errors when checking the vsync on the timeline.
415 mTimelines.back().freeze(mLastCommittedVsync);
416 } else {
417 mTimelines.back().freeze(
418 TimePoint::fromNs(mLastCommittedVsync.ns() + mIdealPeriod.ns() / 2));
419 }
420 }
421 mTimelines.emplace_back(mLastCommittedVsync, mIdealPeriod, renderRate);
422 purgeTimelines(TimePoint::fromNs(mClock->now()));
423 }
424
setDisplayModePtr(ftl::NonNull<DisplayModePtr> modePtr)425 void VSyncPredictor::setDisplayModePtr(ftl::NonNull<DisplayModePtr> modePtr) {
426 LOG_ALWAYS_FATAL_IF(mId != modePtr->getPhysicalDisplayId(),
427 "mode does not belong to the display");
428 SFTRACE_FORMAT("%s %s", __func__, to_string(*modePtr).c_str());
429 const auto timeout = modePtr->getVrrConfig()
430 ? modePtr->getVrrConfig()->notifyExpectedPresentConfig
431 : std::nullopt;
432 ALOGV("%s %s: DisplayMode %s notifyExpectedPresentTimeout %s", __func__, to_string(mId).c_str(),
433 to_string(*modePtr).c_str(),
434 timeout ? std::to_string(timeout->timeoutNs).c_str() : "N/A");
435 std::lock_guard lock(mMutex);
436
437 // do not clear the timelines on VRR displays if we didn't change the mode
438 const bool isVrr = modePtr->getVrrConfig().has_value();
439 const bool clearTimelines = !isVrr || mDisplayModePtr->getId() != modePtr->getId();
440 mDisplayModePtr = modePtr;
441 mNumVsyncsForFrame = numVsyncsPerFrame(mDisplayModePtr);
442 traceInt64("VSP-setPeriod", modePtr->getVsyncRate().getPeriodNsecs());
443
444 static constexpr size_t kSizeLimit = 30;
445 if (CC_UNLIKELY(mRateMap.size() == kSizeLimit)) {
446 mRateMap.erase(mRateMap.begin());
447 }
448
449 if (mRateMap.find(idealPeriod()) == mRateMap.end()) {
450 mRateMap[idealPeriod()] = {idealPeriod(), 0};
451 }
452
453 if (clearTimelines) {
454 mTimelines.clear();
455 }
456 clearTimestamps(clearTimelines);
457 }
458
ensureMinFrameDurationIsKept(TimePoint expectedPresentTime,TimePoint lastConfirmedPresentTime)459 Duration VSyncPredictor::ensureMinFrameDurationIsKept(TimePoint expectedPresentTime,
460 TimePoint lastConfirmedPresentTime) {
461 SFTRACE_FORMAT("%s mNumVsyncsForFrame=%d mPastExpectedPresentTimes.size()=%zu", __func__,
462 mNumVsyncsForFrame, mPastExpectedPresentTimes.size());
463
464 if (mNumVsyncsForFrame <= 1) {
465 return 0ns;
466 }
467
468 const auto currentPeriod = mRateMap.find(idealPeriod())->second.slope;
469 const auto threshold = currentPeriod / 2;
470 const auto minFramePeriod = minFramePeriodLocked();
471
472 auto prev = lastConfirmedPresentTime.ns();
473 for (auto& current : mPastExpectedPresentTimes) {
474 SFTRACE_FORMAT_INSTANT("current %.2f past last signaled fence",
475 static_cast<float>(current.ns() - prev) / 1e6f);
476
477 const auto minPeriodViolation = current.ns() - prev + threshold < minFramePeriod.ns();
478 if (minPeriodViolation) {
479 SFTRACE_NAME("minPeriodViolation");
480 current = TimePoint::fromNs(prev + minFramePeriod.ns());
481 prev = current.ns();
482 } else {
483 break;
484 }
485 }
486
487 if (!mPastExpectedPresentTimes.empty()) {
488 const auto phase = Duration(mPastExpectedPresentTimes.back() - expectedPresentTime);
489 if (phase > 0ns) {
490 for (auto& timeline : mTimelines) {
491 timeline.shiftVsyncSequence(phase, minFramePeriod);
492 }
493 mPastExpectedPresentTimes.clear();
494 return phase;
495 }
496 }
497
498 return 0ns;
499 }
500
onFrameBegin(TimePoint expectedPresentTime,FrameTime lastSignaledFrameTime)501 void VSyncPredictor::onFrameBegin(TimePoint expectedPresentTime, FrameTime lastSignaledFrameTime) {
502 SFTRACE_NAME("VSyncPredictor::onFrameBegin");
503 std::lock_guard lock(mMutex);
504
505 if (!mDisplayModePtr->getVrrConfig()) return;
506
507 const auto [lastConfirmedPresentTime, lastConfirmedExpectedPresentTime] = lastSignaledFrameTime;
508 if (CC_UNLIKELY(mTraceOn)) {
509 SFTRACE_FORMAT_INSTANT("vsync is %.2f past last signaled fence",
510 static_cast<float>(expectedPresentTime.ns() -
511 lastConfirmedPresentTime.ns()) /
512 1e6f);
513 }
514 const auto currentPeriod = mRateMap.find(idealPeriod())->second.slope;
515 const auto threshold = currentPeriod / 2;
516 mPastExpectedPresentTimes.push_back(expectedPresentTime);
517
518 while (!mPastExpectedPresentTimes.empty()) {
519 const auto front = mPastExpectedPresentTimes.front().ns();
520 const bool frontIsBeforeConfirmed = front < lastConfirmedPresentTime.ns() + threshold;
521 if (frontIsBeforeConfirmed) {
522 SFTRACE_FORMAT_INSTANT("Discarding old vsync - %.2f before last signaled fence",
523 static_cast<float>(lastConfirmedPresentTime.ns() - front) /
524 1e6f);
525 mPastExpectedPresentTimes.pop_front();
526 } else {
527 break;
528 }
529 }
530
531 if (lastConfirmedExpectedPresentTime.ns() - lastConfirmedPresentTime.ns() > threshold) {
532 SFTRACE_FORMAT_INSTANT("lastFramePresentedEarly");
533 return;
534 }
535
536 const auto phase = ensureMinFrameDurationIsKept(expectedPresentTime, lastConfirmedPresentTime);
537 if (phase > 0ns) {
538 mMissedVsync = {expectedPresentTime, minFramePeriodLocked()};
539 }
540 }
541
onFrameMissed(TimePoint expectedPresentTime)542 void VSyncPredictor::onFrameMissed(TimePoint expectedPresentTime) {
543 SFTRACE_NAME("VSyncPredictor::onFrameMissed");
544
545 std::lock_guard lock(mMutex);
546 if (!mDisplayModePtr->getVrrConfig()) return;
547
548 // We don't know when the frame is going to be presented, so we assume it missed one vsync
549 const auto currentPeriod = mRateMap.find(idealPeriod())->second.slope;
550 const auto lastConfirmedPresentTime =
551 TimePoint::fromNs(expectedPresentTime.ns() + currentPeriod);
552
553 const auto phase = ensureMinFrameDurationIsKept(expectedPresentTime, lastConfirmedPresentTime);
554 if (phase > 0ns) {
555 mMissedVsync = {expectedPresentTime, Duration::fromNs(0)};
556 }
557 }
558
getVSyncPredictionModel() const559 VSyncPredictor::Model VSyncPredictor::getVSyncPredictionModel() const {
560 std::lock_guard lock(mMutex);
561 return VSyncPredictor::getVSyncPredictionModelLocked();
562 }
563
getVSyncPredictionModelLocked() const564 VSyncPredictor::Model VSyncPredictor::getVSyncPredictionModelLocked() const {
565 return mRateMap.find(idealPeriod())->second;
566 }
567
clearTimestamps(bool clearTimelines)568 void VSyncPredictor::clearTimestamps(bool clearTimelines) {
569 SFTRACE_FORMAT("%s: clearTimelines=%d", __func__, clearTimelines);
570
571 if (!mTimestamps.empty()) {
572 auto const maxRb = *std::max_element(mTimestamps.begin(), mTimestamps.end());
573 if (mKnownTimestamp) {
574 mKnownTimestamp = std::max(*mKnownTimestamp, maxRb);
575 SFTRACE_FORMAT_INSTANT("mKnownTimestamp was %.2fms ago",
576 (mClock->now() - *mKnownTimestamp) / 1e6f);
577 } else {
578 mKnownTimestamp = maxRb;
579 SFTRACE_FORMAT_INSTANT("mKnownTimestamp (maxRb) was %.2fms ago",
580 (mClock->now() - *mKnownTimestamp) / 1e6f);
581 }
582
583 mTimestamps.clear();
584 mLastTimestampIndex = 0;
585 }
586
587 mIdealPeriod = Period::fromNs(idealPeriod());
588 if (mTimelines.empty()) {
589 mLastCommittedVsync = TimePoint::fromNs(0);
590 mTimelines.emplace_back(mLastCommittedVsync, mIdealPeriod, mRenderRateOpt);
591 } else if (clearTimelines) {
592 while (mTimelines.size() > 1) {
593 mTimelines.pop_front();
594 }
595 mTimelines.front().setRenderRate(mRenderRateOpt);
596 // set mLastCommittedVsync to a valid vsync but don't commit too much in the future
597 const auto vsyncOpt = mTimelines.front().nextAnticipatedVSyncTimeFrom(
598 getVSyncPredictionModelLocked(),
599 /* minFramePeriodOpt */ std::nullopt,
600 snapToVsync(mClock->now()), MissedVsync{},
601 /* lastVsyncOpt */ std::nullopt);
602 mLastCommittedVsync = *vsyncOpt;
603 }
604 }
605
needsMoreSamples() const606 bool VSyncPredictor::needsMoreSamples() const {
607 std::lock_guard lock(mMutex);
608 return mTimestamps.size() < kMinimumSamplesForPrediction;
609 }
610
resetModel()611 void VSyncPredictor::resetModel() {
612 SFTRACE_CALL();
613 std::lock_guard lock(mMutex);
614 mRateMap[idealPeriod()] = {idealPeriod(), 0};
615 clearTimestamps(/* clearTimelines */ true);
616 }
617
dump(std::string & result) const618 void VSyncPredictor::dump(std::string& result) const {
619 std::lock_guard lock(mMutex);
620 StringAppendF(&result, "\tmDisplayModePtr=%s\n", to_string(*mDisplayModePtr).c_str());
621 StringAppendF(&result, "\tRefresh Rate Map:\n");
622 for (const auto& [period, periodInterceptTuple] : mRateMap) {
623 StringAppendF(&result,
624 "\t\tFor ideal period %.2fms: period = %.2fms, intercept = %" PRId64 "\n",
625 period / 1e6f, periodInterceptTuple.slope / 1e6f,
626 periodInterceptTuple.intercept);
627 }
628 StringAppendF(&result, "\tmTimelines.size()=%zu\n", mTimelines.size());
629 }
630
purgeTimelines(android::TimePoint now)631 void VSyncPredictor::purgeTimelines(android::TimePoint now) {
632 const auto kEnoughFramesToBreakPhase = 5;
633 if (mRenderRateOpt &&
634 mLastCommittedVsync.ns() + mRenderRateOpt->getPeriodNsecs() * kEnoughFramesToBreakPhase <
635 mClock->now()) {
636 SFTRACE_FORMAT_INSTANT("kEnoughFramesToBreakPhase");
637 mTimelines.clear();
638 mLastCommittedVsync = TimePoint::fromNs(0);
639 mTimelines.emplace_back(mLastCommittedVsync, mIdealPeriod, mRenderRateOpt);
640 return;
641 }
642
643 while (mTimelines.size() > 1) {
644 const auto validUntilOpt = mTimelines.front().validUntil();
645 const bool isTimelineOutDated = FlagManager::getInstance().vrr_bugfix_24q4()
646 ? mTimelines.front().isWithin(now) == VsyncTimeline::VsyncOnTimeline::Outside
647 : validUntilOpt && *validUntilOpt < now;
648 if (isTimelineOutDated) {
649 mTimelines.pop_front();
650 } else {
651 break;
652 }
653 }
654 LOG_ALWAYS_FATAL_IF(mTimelines.empty());
655 LOG_ALWAYS_FATAL_IF(mTimelines.back().validUntil().has_value());
656 }
657
makeVsyncSequence(TimePoint knownVsync)658 auto VSyncPredictor::VsyncTimeline::makeVsyncSequence(TimePoint knownVsync)
659 -> std::optional<VsyncSequence> {
660 if (knownVsync.ns() == 0) return std::nullopt;
661 return std::make_optional<VsyncSequence>({knownVsync.ns(), 0});
662 }
663
VsyncTimeline(TimePoint knownVsync,Period idealPeriod,std::optional<Fps> renderRateOpt)664 VSyncPredictor::VsyncTimeline::VsyncTimeline(TimePoint knownVsync, Period idealPeriod,
665 std::optional<Fps> renderRateOpt)
666 : mIdealPeriod(idealPeriod),
667 mRenderRateOpt(renderRateOpt),
668 mLastVsyncSequence(makeVsyncSequence(knownVsync)) {}
669
freeze(TimePoint lastVsync)670 void VSyncPredictor::VsyncTimeline::freeze(TimePoint lastVsync) {
671 LOG_ALWAYS_FATAL_IF(mValidUntil.has_value());
672 SFTRACE_FORMAT_INSTANT("renderRate %s valid for %.2f",
673 mRenderRateOpt ? to_string(*mRenderRateOpt).c_str() : "NA",
674 float(lastVsync.ns() - TimePoint::now().ns()) / 1e6f);
675 mValidUntil = lastVsync;
676 }
677
nextAnticipatedVSyncTimeFrom(Model model,std::optional<Period> minFramePeriodOpt,nsecs_t vsync,MissedVsync missedVsync,std::optional<nsecs_t> lastVsyncOpt)678 std::optional<TimePoint> VSyncPredictor::VsyncTimeline::nextAnticipatedVSyncTimeFrom(
679 Model model, std::optional<Period> minFramePeriodOpt, nsecs_t vsync,
680 MissedVsync missedVsync, std::optional<nsecs_t> lastVsyncOpt) {
681 SFTRACE_FORMAT("renderRate %s", mRenderRateOpt ? to_string(*mRenderRateOpt).c_str() : "NA");
682
683 nsecs_t vsyncTime = snapToVsyncAlignedWithRenderRate(model, vsync);
684 const auto threshold = model.slope / 2;
685 const auto lastFrameMissed =
686 lastVsyncOpt && std::abs(*lastVsyncOpt - missedVsync.vsync.ns()) < threshold;
687 const auto mightBackpressure = minFramePeriodOpt && mRenderRateOpt &&
688 mRenderRateOpt->getPeriod() < 2 * (*minFramePeriodOpt);
689 if (FlagManager::getInstance().vrr_config()) {
690 if (lastFrameMissed) {
691 // If the last frame missed is the last vsync, we already shifted the timeline. Depends
692 // on whether we skipped the frame (onFrameMissed) or not (onFrameBegin) we apply a
693 // different fixup if we are violating the minFramePeriod.
694 // There is no need to shift the vsync timeline again.
695 if (vsyncTime - missedVsync.vsync.ns() < minFramePeriodOpt->ns()) {
696 vsyncTime += missedVsync.fixup.ns();
697 SFTRACE_FORMAT_INSTANT("lastFrameMissed");
698 }
699 } else if (mightBackpressure && lastVsyncOpt) {
700 if (!FlagManager::getInstance().vrr_bugfix_24q4()) {
701 // lastVsyncOpt does not need to be corrected with the new rate, and
702 // it should be used as is to avoid skipping a frame when changing rates are
703 // aligned at vsync time.
704 lastVsyncOpt = snapToVsyncAlignedWithRenderRate(model, *lastVsyncOpt);
705 }
706 const auto vsyncDiff = vsyncTime - *lastVsyncOpt;
707 if (vsyncDiff <= minFramePeriodOpt->ns() - threshold) {
708 // avoid a duplicate vsync
709 SFTRACE_FORMAT_INSTANT("skipping a vsync to avoid duplicate frame. next in %.2f "
710 "which "
711 "is %.2f "
712 "from "
713 "prev. "
714 "adjust by %.2f",
715 static_cast<float>(vsyncTime - TimePoint::now().ns()) / 1e6f,
716 static_cast<float>(vsyncDiff) / 1e6f,
717 static_cast<float>(mRenderRateOpt->getPeriodNsecs()) / 1e6f);
718 vsyncTime += mRenderRateOpt->getPeriodNsecs();
719 }
720 }
721 }
722
723 SFTRACE_FORMAT_INSTANT("vsync in %.2fms", float(vsyncTime - TimePoint::now().ns()) / 1e6f);
724 const bool isVsyncInvalid = FlagManager::getInstance().vrr_bugfix_24q4()
725 ? isWithin(TimePoint::fromNs(vsyncTime)) == VsyncOnTimeline::Outside
726 : mValidUntil && vsyncTime > mValidUntil->ns();
727 if (isVsyncInvalid) {
728 SFTRACE_FORMAT_INSTANT("no longer valid for vsync in %.2f",
729 static_cast<float>(vsyncTime - TimePoint::now().ns()) / 1e6f);
730 return std::nullopt;
731 }
732
733 return TimePoint::fromNs(vsyncTime);
734 }
735
getVsyncSequenceLocked(Model model,nsecs_t vsync)736 auto VSyncPredictor::VsyncTimeline::getVsyncSequenceLocked(Model model, nsecs_t vsync)
737 -> VsyncSequence {
738 if (!mLastVsyncSequence) return {vsync, 0};
739
740 const auto [lastVsyncTime, lastVsyncSequence] = *mLastVsyncSequence;
741 const auto vsyncSequence = lastVsyncSequence +
742 static_cast<int64_t>(std::round((vsync - lastVsyncTime) /
743 static_cast<float>(model.slope)));
744 return {vsync, vsyncSequence};
745 }
746
snapToVsyncAlignedWithRenderRate(Model model,nsecs_t vsync)747 nsecs_t VSyncPredictor::VsyncTimeline::snapToVsyncAlignedWithRenderRate(Model model,
748 nsecs_t vsync) {
749 // update the mLastVsyncSequence for reference point
750 mLastVsyncSequence = getVsyncSequenceLocked(model, vsync);
751
752 const auto renderRatePhase = [&]() -> int {
753 if (!mRenderRateOpt) return 0;
754 const auto divisor =
755 RefreshRateSelector::getFrameRateDivisor(Fps::fromPeriodNsecs(mIdealPeriod.ns()),
756 *mRenderRateOpt);
757 if (divisor <= 1) return 0;
758
759 int mod = mLastVsyncSequence->seq % divisor;
760 if (mod == 0) return 0;
761
762 // This is actually a bug fix, but guarded with vrr_config since we found it with this
763 // config
764 if (FlagManager::getInstance().vrr_config()) {
765 if (mod < 0) mod += divisor;
766 }
767
768 return divisor - mod;
769 }();
770
771 if (renderRatePhase == 0) {
772 return mLastVsyncSequence->vsyncTime;
773 }
774
775 return mLastVsyncSequence->vsyncTime + model.slope * renderRatePhase;
776 }
777
isVSyncInPhase(Model model,nsecs_t vsync,Fps frameRate)778 bool VSyncPredictor::VsyncTimeline::isVSyncInPhase(Model model, nsecs_t vsync, Fps frameRate) {
779 const auto getVsyncIn = [](TimePoint now, nsecs_t timePoint) -> float {
780 return ticks<std::milli, float>(TimePoint::fromNs(timePoint) - now);
781 };
782
783 Fps displayFps = !FlagManager::getInstance().vrr_bugfix_24q4() && mRenderRateOpt
784 ? *mRenderRateOpt
785 : Fps::fromPeriodNsecs(mIdealPeriod.ns());
786 const auto divisor = RefreshRateSelector::getFrameRateDivisor(displayFps, frameRate);
787 const auto now = TimePoint::now();
788
789 if (divisor <= 1) {
790 return true;
791 }
792 const auto vsyncSequence = getVsyncSequenceLocked(model, vsync);
793 SFTRACE_FORMAT_INSTANT("vsync in: %.2f sequence: %" PRId64 " divisor: %zu",
794 getVsyncIn(now, vsyncSequence.vsyncTime), vsyncSequence.seq, divisor);
795 return vsyncSequence.seq % divisor == 0;
796 }
797
shiftVsyncSequence(Duration phase,Period minFramePeriod)798 void VSyncPredictor::VsyncTimeline::shiftVsyncSequence(Duration phase, Period minFramePeriod) {
799 if (mLastVsyncSequence) {
800 const auto renderRate = mRenderRateOpt.value_or(Fps::fromPeriodNsecs(mIdealPeriod.ns()));
801 const auto threshold = mIdealPeriod.ns() / 2;
802 if (renderRate.getPeriodNsecs() - phase.ns() + threshold >= minFramePeriod.ns()) {
803 SFTRACE_FORMAT_INSTANT("Not-Adjusting vsync by %.2f",
804 static_cast<float>(phase.ns()) / 1e6f);
805 return;
806 }
807 SFTRACE_FORMAT_INSTANT("adjusting vsync by %.2f", static_cast<float>(phase.ns()) / 1e6f);
808 mLastVsyncSequence->vsyncTime += phase.ns();
809 }
810 }
811
isWithin(TimePoint vsync)812 VSyncPredictor::VsyncTimeline::VsyncOnTimeline VSyncPredictor::VsyncTimeline::isWithin(
813 TimePoint vsync) {
814 const auto threshold = mIdealPeriod.ns() / 2;
815 if (!mValidUntil || vsync.ns() < mValidUntil->ns() - threshold) {
816 // if mValidUntil is absent then timeline is not frozen and
817 // vsync should be unique to that timeline.
818 return VsyncOnTimeline::Unique;
819 }
820 if (vsync.ns() > mValidUntil->ns() + threshold) {
821 return VsyncOnTimeline::Outside;
822 }
823 return VsyncOnTimeline::Shared;
824 }
825
826 } // namespace android::scheduler
827
828 // TODO(b/129481165): remove the #pragma below and fix conversion issues
829 #pragma clang diagnostic pop // ignored "-Wextra"
830