1 /*
2 * Copyright (c) 2019, Alliance for Open Media. All rights reserved.
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
12 #include <stdint.h>
13
14 #include "av1/common/blockd.h"
15 #include "config/aom_config.h"
16 #include "config/aom_scale_rtcd.h"
17
18 #include "aom/aom_codec.h"
19 #include "aom/aom_encoder.h"
20
21 #if CONFIG_MISMATCH_DEBUG
22 #include "aom_util/debug_util.h"
23 #endif // CONFIG_MISMATCH_DEBUG
24
25 #include "av1/common/av1_common_int.h"
26 #include "av1/common/reconinter.h"
27
28 #include "av1/encoder/encoder.h"
29 #include "av1/encoder/encode_strategy.h"
30 #include "av1/encoder/encodeframe.h"
31 #include "av1/encoder/encoder_alloc.h"
32 #include "av1/encoder/firstpass.h"
33 #include "av1/encoder/gop_structure.h"
34 #include "av1/encoder/pass2_strategy.h"
35 #include "av1/encoder/temporal_filter.h"
36 #if CONFIG_THREE_PASS
37 #include "av1/encoder/thirdpass.h"
38 #endif // CONFIG_THREE_PASS
39 #include "av1/encoder/tpl_model.h"
40
41 #if CONFIG_TUNE_VMAF
42 #include "av1/encoder/tune_vmaf.h"
43 #endif
44
45 #define TEMPORAL_FILTER_KEY_FRAME (CONFIG_REALTIME_ONLY ? 0 : 1)
46
set_refresh_frame_flags(RefreshFrameInfo * const refresh_frame,bool refresh_gf,bool refresh_bwdref,bool refresh_arf)47 static inline void set_refresh_frame_flags(
48 RefreshFrameInfo *const refresh_frame, bool refresh_gf, bool refresh_bwdref,
49 bool refresh_arf) {
50 refresh_frame->golden_frame = refresh_gf;
51 refresh_frame->bwd_ref_frame = refresh_bwdref;
52 refresh_frame->alt_ref_frame = refresh_arf;
53 }
54
av1_configure_buffer_updates(AV1_COMP * const cpi,RefreshFrameInfo * const refresh_frame,const FRAME_UPDATE_TYPE type,const REFBUF_STATE refbuf_state,int force_refresh_all)55 void av1_configure_buffer_updates(AV1_COMP *const cpi,
56 RefreshFrameInfo *const refresh_frame,
57 const FRAME_UPDATE_TYPE type,
58 const REFBUF_STATE refbuf_state,
59 int force_refresh_all) {
60 // NOTE(weitinglin): Should we define another function to take care of
61 // cpi->rc.is_$Source_Type to make this function as it is in the comment?
62 const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags =
63 &cpi->ext_flags.refresh_frame;
64 cpi->rc.is_src_frame_alt_ref = 0;
65
66 switch (type) {
67 case KF_UPDATE:
68 set_refresh_frame_flags(refresh_frame, true, true, true);
69 break;
70
71 case LF_UPDATE:
72 set_refresh_frame_flags(refresh_frame, false, false, false);
73 break;
74
75 case GF_UPDATE:
76 set_refresh_frame_flags(refresh_frame, true, false, false);
77 break;
78
79 case OVERLAY_UPDATE:
80 if (refbuf_state == REFBUF_RESET)
81 set_refresh_frame_flags(refresh_frame, true, true, true);
82 else
83 set_refresh_frame_flags(refresh_frame, true, false, false);
84
85 cpi->rc.is_src_frame_alt_ref = 1;
86 break;
87
88 case ARF_UPDATE:
89 // NOTE: BWDREF does not get updated along with ALTREF_FRAME.
90 if (refbuf_state == REFBUF_RESET)
91 set_refresh_frame_flags(refresh_frame, true, true, true);
92 else
93 set_refresh_frame_flags(refresh_frame, false, false, true);
94
95 break;
96
97 case INTNL_OVERLAY_UPDATE:
98 set_refresh_frame_flags(refresh_frame, false, false, false);
99 cpi->rc.is_src_frame_alt_ref = 1;
100 break;
101
102 case INTNL_ARF_UPDATE:
103 set_refresh_frame_flags(refresh_frame, false, true, false);
104 break;
105
106 default: assert(0); break;
107 }
108
109 if (ext_refresh_frame_flags->update_pending &&
110 (!is_stat_generation_stage(cpi))) {
111 set_refresh_frame_flags(refresh_frame,
112 ext_refresh_frame_flags->golden_frame,
113 ext_refresh_frame_flags->bwd_ref_frame,
114 ext_refresh_frame_flags->alt_ref_frame);
115 GF_GROUP *gf_group = &cpi->ppi->gf_group;
116 if (ext_refresh_frame_flags->golden_frame)
117 gf_group->update_type[cpi->gf_frame_index] = GF_UPDATE;
118 if (ext_refresh_frame_flags->alt_ref_frame)
119 gf_group->update_type[cpi->gf_frame_index] = ARF_UPDATE;
120 if (ext_refresh_frame_flags->bwd_ref_frame)
121 gf_group->update_type[cpi->gf_frame_index] = INTNL_ARF_UPDATE;
122 }
123
124 if (force_refresh_all)
125 set_refresh_frame_flags(refresh_frame, true, true, true);
126 }
127
set_additional_frame_flags(const AV1_COMMON * const cm,unsigned int * const frame_flags)128 static void set_additional_frame_flags(const AV1_COMMON *const cm,
129 unsigned int *const frame_flags) {
130 if (frame_is_intra_only(cm)) {
131 *frame_flags |= FRAMEFLAGS_INTRAONLY;
132 }
133 if (frame_is_sframe(cm)) {
134 *frame_flags |= FRAMEFLAGS_SWITCH;
135 }
136 if (cm->features.error_resilient_mode) {
137 *frame_flags |= FRAMEFLAGS_ERROR_RESILIENT;
138 }
139 }
140
set_ext_overrides(AV1_COMMON * const cm,EncodeFrameParams * const frame_params,ExternalFlags * const ext_flags)141 static void set_ext_overrides(AV1_COMMON *const cm,
142 EncodeFrameParams *const frame_params,
143 ExternalFlags *const ext_flags) {
144 // Overrides the defaults with the externally supplied values with
145 // av1_update_reference() and av1_update_entropy() calls
146 // Note: The overrides are valid only for the next frame passed
147 // to av1_encode_lowlevel()
148
149 if (ext_flags->use_s_frame) {
150 frame_params->frame_type = S_FRAME;
151 }
152
153 if (ext_flags->refresh_frame_context_pending) {
154 cm->features.refresh_frame_context = ext_flags->refresh_frame_context;
155 ext_flags->refresh_frame_context_pending = 0;
156 }
157 cm->features.allow_ref_frame_mvs = ext_flags->use_ref_frame_mvs;
158
159 frame_params->error_resilient_mode = ext_flags->use_error_resilient;
160 // A keyframe is already error resilient and keyframes with
161 // error_resilient_mode interferes with the use of show_existing_frame
162 // when forward reference keyframes are enabled.
163 frame_params->error_resilient_mode &= frame_params->frame_type != KEY_FRAME;
164 // For bitstream conformance, s-frames must be error-resilient
165 frame_params->error_resilient_mode |= frame_params->frame_type == S_FRAME;
166 }
167
choose_primary_ref_frame(AV1_COMP * const cpi,const EncodeFrameParams * const frame_params)168 static int choose_primary_ref_frame(
169 AV1_COMP *const cpi, const EncodeFrameParams *const frame_params) {
170 const AV1_COMMON *const cm = &cpi->common;
171
172 const int intra_only = frame_params->frame_type == KEY_FRAME ||
173 frame_params->frame_type == INTRA_ONLY_FRAME;
174 if (intra_only || frame_params->error_resilient_mode ||
175 cpi->ext_flags.use_primary_ref_none) {
176 return PRIMARY_REF_NONE;
177 }
178
179 #if !CONFIG_REALTIME_ONLY
180 if (cpi->use_ducky_encode) {
181 int wanted_fb = cpi->ppi->gf_group.primary_ref_idx[cpi->gf_frame_index];
182 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
183 if (get_ref_frame_map_idx(cm, ref_frame) == wanted_fb)
184 return ref_frame - LAST_FRAME;
185 }
186
187 return PRIMARY_REF_NONE;
188 }
189 #endif // !CONFIG_REALTIME_ONLY
190
191 // In large scale case, always use Last frame's frame contexts.
192 // Note(yunqing): In other cases, primary_ref_frame is chosen based on
193 // cpi->ppi->gf_group.layer_depth[cpi->gf_frame_index], which also controls
194 // frame bit allocation.
195 if (cm->tiles.large_scale) return (LAST_FRAME - LAST_FRAME);
196
197 if (cpi->ppi->use_svc || cpi->ppi->rtc_ref.set_ref_frame_config)
198 return av1_svc_primary_ref_frame(cpi);
199
200 // Find the most recent reference frame with the same reference type as the
201 // current frame
202 const int current_ref_type = get_current_frame_ref_type(cpi);
203 int wanted_fb = cpi->ppi->fb_of_context_type[current_ref_type];
204 #if CONFIG_FPMT_TEST
205 if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) {
206 GF_GROUP *const gf_group = &cpi->ppi->gf_group;
207 if (gf_group->update_type[cpi->gf_frame_index] == INTNL_ARF_UPDATE) {
208 int frame_level = gf_group->frame_parallel_level[cpi->gf_frame_index];
209 // Book keep wanted_fb of frame_parallel_level 1 frame in an FP2 set.
210 if (frame_level == 1) {
211 cpi->wanted_fb = wanted_fb;
212 }
213 // Use the wanted_fb of level 1 frame in an FP2 for a level 2 frame in the
214 // set.
215 if (frame_level == 2 &&
216 gf_group->update_type[cpi->gf_frame_index - 1] == INTNL_ARF_UPDATE) {
217 assert(gf_group->frame_parallel_level[cpi->gf_frame_index - 1] == 1);
218 wanted_fb = cpi->wanted_fb;
219 }
220 }
221 }
222 #endif // CONFIG_FPMT_TEST
223 int primary_ref_frame = PRIMARY_REF_NONE;
224 for (int ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
225 if (get_ref_frame_map_idx(cm, ref_frame) == wanted_fb) {
226 primary_ref_frame = ref_frame - LAST_FRAME;
227 }
228 }
229
230 return primary_ref_frame;
231 }
232
adjust_frame_rate(AV1_COMP * cpi,int64_t ts_start,int64_t ts_end)233 static void adjust_frame_rate(AV1_COMP *cpi, int64_t ts_start, int64_t ts_end) {
234 TimeStamps *time_stamps = &cpi->time_stamps;
235 int64_t this_duration;
236 int step = 0;
237
238 // Clear down mmx registers
239
240 if (cpi->ppi->use_svc && cpi->ppi->rtc_ref.set_ref_frame_config &&
241 cpi->svc.number_spatial_layers > 1) {
242 // ts_start is the timestamp for the current frame and ts_end is the
243 // expected next timestamp given the duration passed into codec_encode().
244 // See the setting in encoder_encode() in av1_cx_iface.c:
245 // ts_start = timebase_units_to_ticks(cpi_data.timestamp_ratio, ptsvol),
246 // ts_end = timebase_units_to_ticks(cpi_data.timestamp_ratio, ptsvol +
247 // duration). So the difference ts_end - ts_start is the duration passed
248 // in by the user. For spatial layers SVC set the framerate based directly
249 // on the duration, and bypass the adjustments below.
250 this_duration = ts_end - ts_start;
251 if (this_duration > 0) {
252 cpi->new_framerate = 10000000.0 / this_duration;
253 av1_new_framerate(cpi, cpi->new_framerate);
254 time_stamps->prev_ts_start = ts_start;
255 time_stamps->prev_ts_end = ts_end;
256 return;
257 }
258 }
259
260 if (ts_start == time_stamps->first_ts_start) {
261 this_duration = ts_end - ts_start;
262 step = 1;
263 } else {
264 int64_t last_duration =
265 time_stamps->prev_ts_end - time_stamps->prev_ts_start;
266
267 this_duration = ts_end - time_stamps->prev_ts_end;
268
269 // do a step update if the duration changes by 10%
270 if (last_duration)
271 step = (int)((this_duration - last_duration) * 10 / last_duration);
272 }
273
274 if (this_duration) {
275 if (step) {
276 cpi->new_framerate = 10000000.0 / this_duration;
277 av1_new_framerate(cpi, cpi->new_framerate);
278 } else {
279 // Average this frame's rate into the last second's average
280 // frame rate. If we haven't seen 1 second yet, then average
281 // over the whole interval seen.
282 const double interval =
283 AOMMIN((double)(ts_end - time_stamps->first_ts_start), 10000000.0);
284 double avg_duration = 10000000.0 / cpi->framerate;
285 avg_duration *= (interval - avg_duration + this_duration);
286 avg_duration /= interval;
287 cpi->new_framerate = (10000000.0 / avg_duration);
288 // For parallel frames update cpi->framerate with new_framerate
289 // during av1_post_encode_updates()
290 double framerate =
291 (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0)
292 ? cpi->framerate
293 : cpi->new_framerate;
294 av1_new_framerate(cpi, framerate);
295 }
296 }
297
298 time_stamps->prev_ts_start = ts_start;
299 time_stamps->prev_ts_end = ts_end;
300 }
301
302 // Determine whether there is a forced keyframe pending in the lookahead buffer
is_forced_keyframe_pending(struct lookahead_ctx * lookahead,const int up_to_index,const COMPRESSOR_STAGE compressor_stage)303 int is_forced_keyframe_pending(struct lookahead_ctx *lookahead,
304 const int up_to_index,
305 const COMPRESSOR_STAGE compressor_stage) {
306 for (int i = 0; i <= up_to_index; i++) {
307 const struct lookahead_entry *e =
308 av1_lookahead_peek(lookahead, i, compressor_stage);
309 if (e == NULL) {
310 // We have reached the end of the lookahead buffer and not early-returned
311 // so there isn't a forced key-frame pending.
312 return -1;
313 } else if (e->flags == AOM_EFLAG_FORCE_KF) {
314 return i;
315 } else {
316 continue;
317 }
318 }
319 return -1; // Never reached
320 }
321
322 // Check if we should encode an ARF or internal ARF. If not, try a LAST
323 // Do some setup associated with the chosen source
324 // temporal_filtered, flush, and frame_update_type are outputs.
325 // Return the frame source, or NULL if we couldn't find one
choose_frame_source(AV1_COMP * const cpi,int * const flush,int * pop_lookahead,struct lookahead_entry ** last_source,int * const show_frame)326 static struct lookahead_entry *choose_frame_source(
327 AV1_COMP *const cpi, int *const flush, int *pop_lookahead,
328 struct lookahead_entry **last_source, int *const show_frame) {
329 AV1_COMMON *const cm = &cpi->common;
330 const GF_GROUP *const gf_group = &cpi->ppi->gf_group;
331 struct lookahead_entry *source = NULL;
332
333 // Source index in lookahead buffer.
334 int src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
335
336 // TODO(Aasaipriya): Forced key frames need to be fixed when rc_mode != AOM_Q
337 if (src_index &&
338 (is_forced_keyframe_pending(cpi->ppi->lookahead, src_index,
339 cpi->compressor_stage) != -1) &&
340 cpi->oxcf.rc_cfg.mode != AOM_Q && !is_stat_generation_stage(cpi)) {
341 src_index = 0;
342 *flush = 1;
343 }
344
345 // If the current frame is arf, then we should not pop from the lookahead
346 // buffer. If the current frame is not arf, then pop it. This assumes the
347 // first frame in the GF group is not arf. May need to change if it is not
348 // true.
349 *pop_lookahead = (src_index == 0);
350 // If this is a key frame and keyframe filtering is enabled with overlay,
351 // then do not pop.
352 if (*pop_lookahead && cpi->oxcf.kf_cfg.enable_keyframe_filtering > 1 &&
353 gf_group->update_type[cpi->gf_frame_index] == ARF_UPDATE &&
354 !is_stat_generation_stage(cpi) && cpi->ppi->lookahead) {
355 if (cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz &&
356 (*flush ||
357 cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].sz ==
358 cpi->ppi->lookahead->read_ctxs[cpi->compressor_stage].pop_sz)) {
359 *pop_lookahead = 0;
360 }
361 }
362
363 // LAP stage does not have ARFs or forward key-frames,
364 // hence, always pop_lookahead here.
365 if (is_stat_generation_stage(cpi)) {
366 *pop_lookahead = 1;
367 src_index = 0;
368 }
369
370 *show_frame = *pop_lookahead;
371
372 #if CONFIG_FPMT_TEST
373 if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_ENCODE) {
374 #else
375 {
376 #endif // CONFIG_FPMT_TEST
377 // Future frame in parallel encode set
378 if (gf_group->src_offset[cpi->gf_frame_index] != 0 &&
379 !is_stat_generation_stage(cpi))
380 src_index = gf_group->src_offset[cpi->gf_frame_index];
381 }
382 if (*show_frame) {
383 // show frame, pop from buffer
384 // Get last frame source.
385 if (cm->current_frame.frame_number > 0) {
386 *last_source = av1_lookahead_peek(cpi->ppi->lookahead, src_index - 1,
387 cpi->compressor_stage);
388 }
389 // Read in the source frame.
390 source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
391 cpi->compressor_stage);
392 } else {
393 // no show frames are arf frames
394 source = av1_lookahead_peek(cpi->ppi->lookahead, src_index,
395 cpi->compressor_stage);
396 if (source != NULL) {
397 cm->showable_frame = 1;
398 }
399 }
400 return source;
401 }
402
403 // Don't allow a show_existing_frame to coincide with an error resilient or
404 // S-Frame. An exception can be made in the case of a keyframe, since it does
405 // not depend on any previous frames.
406 static int allow_show_existing(const AV1_COMP *const cpi,
407 unsigned int frame_flags) {
408 if (cpi->common.current_frame.frame_number == 0) return 0;
409
410 const struct lookahead_entry *lookahead_src =
411 av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage);
412 if (lookahead_src == NULL) return 1;
413
414 const int is_error_resilient =
415 cpi->oxcf.tool_cfg.error_resilient_mode ||
416 (lookahead_src->flags & AOM_EFLAG_ERROR_RESILIENT);
417 const int is_s_frame = cpi->oxcf.kf_cfg.enable_sframe ||
418 (lookahead_src->flags & AOM_EFLAG_SET_S_FRAME);
419 const int is_key_frame =
420 (cpi->rc.frames_to_key == 0) || (frame_flags & FRAMEFLAGS_KEY);
421 return !(is_error_resilient || is_s_frame) || is_key_frame;
422 }
423
424 // Update frame_flags to tell the encoder's caller what sort of frame was
425 // encoded.
426 static void update_frame_flags(const AV1_COMMON *const cm,
427 const RefreshFrameInfo *const refresh_frame,
428 unsigned int *frame_flags) {
429 if (encode_show_existing_frame(cm)) {
430 *frame_flags &= ~(uint32_t)FRAMEFLAGS_GOLDEN;
431 *frame_flags &= ~(uint32_t)FRAMEFLAGS_BWDREF;
432 *frame_flags &= ~(uint32_t)FRAMEFLAGS_ALTREF;
433 *frame_flags &= ~(uint32_t)FRAMEFLAGS_KEY;
434 return;
435 }
436
437 if (refresh_frame->golden_frame) {
438 *frame_flags |= FRAMEFLAGS_GOLDEN;
439 } else {
440 *frame_flags &= ~(uint32_t)FRAMEFLAGS_GOLDEN;
441 }
442
443 if (refresh_frame->alt_ref_frame) {
444 *frame_flags |= FRAMEFLAGS_ALTREF;
445 } else {
446 *frame_flags &= ~(uint32_t)FRAMEFLAGS_ALTREF;
447 }
448
449 if (refresh_frame->bwd_ref_frame) {
450 *frame_flags |= FRAMEFLAGS_BWDREF;
451 } else {
452 *frame_flags &= ~(uint32_t)FRAMEFLAGS_BWDREF;
453 }
454
455 if (cm->current_frame.frame_type == KEY_FRAME) {
456 *frame_flags |= FRAMEFLAGS_KEY;
457 } else {
458 *frame_flags &= ~(uint32_t)FRAMEFLAGS_KEY;
459 }
460 }
461
462 #define DUMP_REF_FRAME_IMAGES 0
463
464 #if DUMP_REF_FRAME_IMAGES == 1
465 static int dump_one_image(AV1_COMMON *cm,
466 const YV12_BUFFER_CONFIG *const ref_buf,
467 char *file_name) {
468 int h;
469 FILE *f_ref = NULL;
470
471 if (ref_buf == NULL) {
472 printf("Frame data buffer is NULL.\n");
473 return AOM_CODEC_MEM_ERROR;
474 }
475
476 if ((f_ref = fopen(file_name, "wb")) == NULL) {
477 printf("Unable to open file %s to write.\n", file_name);
478 return AOM_CODEC_MEM_ERROR;
479 }
480
481 // --- Y ---
482 for (h = 0; h < cm->height; ++h) {
483 fwrite(&ref_buf->y_buffer[h * ref_buf->y_stride], 1, cm->width, f_ref);
484 }
485 // --- U ---
486 for (h = 0; h < (cm->height >> 1); ++h) {
487 fwrite(&ref_buf->u_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
488 f_ref);
489 }
490 // --- V ---
491 for (h = 0; h < (cm->height >> 1); ++h) {
492 fwrite(&ref_buf->v_buffer[h * ref_buf->uv_stride], 1, (cm->width >> 1),
493 f_ref);
494 }
495
496 fclose(f_ref);
497
498 return AOM_CODEC_OK;
499 }
500
501 static void dump_ref_frame_images(AV1_COMP *cpi) {
502 AV1_COMMON *const cm = &cpi->common;
503 MV_REFERENCE_FRAME ref_frame;
504
505 for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
506 char file_name[256] = "";
507 snprintf(file_name, sizeof(file_name), "/tmp/enc_F%d_ref_%d.yuv",
508 cm->current_frame.frame_number, ref_frame);
509 dump_one_image(cm, get_ref_frame_yv12_buf(cpi, ref_frame), file_name);
510 }
511 }
512 #endif // DUMP_REF_FRAME_IMAGES == 1
513
514 int av1_get_refresh_ref_frame_map(int refresh_frame_flags) {
515 int ref_map_index;
516
517 for (ref_map_index = 0; ref_map_index < REF_FRAMES; ++ref_map_index)
518 if ((refresh_frame_flags >> ref_map_index) & 1) break;
519
520 if (ref_map_index == REF_FRAMES) ref_map_index = INVALID_IDX;
521 return ref_map_index;
522 }
523
524 static int get_free_ref_map_index(RefFrameMapPair ref_map_pairs[REF_FRAMES]) {
525 for (int idx = 0; idx < REF_FRAMES; ++idx)
526 if (ref_map_pairs[idx].disp_order == -1) return idx;
527 return INVALID_IDX;
528 }
529
530 static int get_refresh_idx(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
531 int update_arf, GF_GROUP *gf_group, int gf_index,
532 int enable_refresh_skip, int cur_frame_disp) {
533 int arf_count = 0;
534 int oldest_arf_order = INT32_MAX;
535 int oldest_arf_idx = -1;
536
537 int oldest_frame_order = INT32_MAX;
538 int oldest_idx = -1;
539
540 for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
541 RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
542 if (ref_pair.disp_order == -1) continue;
543 const int frame_order = ref_pair.disp_order;
544 const int reference_frame_level = ref_pair.pyr_level;
545 // Keep future frames and three closest previous frames in output order.
546 if (frame_order > cur_frame_disp - 3) continue;
547
548 if (enable_refresh_skip) {
549 int skip_frame = 0;
550 // Prevent refreshing a frame in gf_group->skip_frame_refresh.
551 for (int i = 0; i < REF_FRAMES; i++) {
552 int frame_to_skip = gf_group->skip_frame_refresh[gf_index][i];
553 if (frame_to_skip == INVALID_IDX) break;
554 if (frame_order == frame_to_skip) {
555 skip_frame = 1;
556 break;
557 }
558 }
559 if (skip_frame) continue;
560 }
561
562 // Keep track of the oldest level 1 frame if the current frame is also level
563 // 1.
564 if (reference_frame_level == 1) {
565 // If there are more than 2 level 1 frames in the reference list,
566 // discard the oldest.
567 if (frame_order < oldest_arf_order) {
568 oldest_arf_order = frame_order;
569 oldest_arf_idx = map_idx;
570 }
571 arf_count++;
572 continue;
573 }
574
575 // Update the overall oldest reference frame.
576 if (frame_order < oldest_frame_order) {
577 oldest_frame_order = frame_order;
578 oldest_idx = map_idx;
579 }
580 }
581 if (update_arf && arf_count > 2) return oldest_arf_idx;
582 if (oldest_idx >= 0) return oldest_idx;
583 if (oldest_arf_idx >= 0) return oldest_arf_idx;
584 if (oldest_idx == -1) {
585 assert(arf_count > 2 && enable_refresh_skip);
586 return oldest_arf_idx;
587 }
588 assert(0 && "No valid refresh index found");
589 return -1;
590 }
591
592 // Computes the reference refresh index for INTNL_ARF_UPDATE frame.
593 int av1_calc_refresh_idx_for_intnl_arf(
594 AV1_COMP *cpi, RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
595 int gf_index) {
596 GF_GROUP *const gf_group = &cpi->ppi->gf_group;
597
598 // Search for the open slot to store the current frame.
599 int free_fb_index = get_free_ref_map_index(ref_frame_map_pairs);
600
601 // Use a free slot if available.
602 if (free_fb_index != INVALID_IDX) {
603 return free_fb_index;
604 } else {
605 int enable_refresh_skip = !is_one_pass_rt_params(cpi);
606 int refresh_idx =
607 get_refresh_idx(ref_frame_map_pairs, 0, gf_group, gf_index,
608 enable_refresh_skip, gf_group->display_idx[gf_index]);
609 return refresh_idx;
610 }
611 }
612
613 int av1_get_refresh_frame_flags(
614 const AV1_COMP *const cpi, const EncodeFrameParams *const frame_params,
615 FRAME_UPDATE_TYPE frame_update_type, int gf_index, int cur_disp_order,
616 RefFrameMapPair ref_frame_map_pairs[REF_FRAMES]) {
617 const AV1_COMMON *const cm = &cpi->common;
618 const ExtRefreshFrameFlagsInfo *const ext_refresh_frame_flags =
619 &cpi->ext_flags.refresh_frame;
620
621 GF_GROUP *gf_group = &cpi->ppi->gf_group;
622 if (gf_group->refbuf_state[gf_index] == REFBUF_RESET)
623 return SELECT_ALL_BUF_SLOTS;
624
625 // TODO(jingning): Deprecate the following operations.
626 // Switch frames and shown key-frames overwrite all reference slots
627 if (frame_params->frame_type == S_FRAME) return SELECT_ALL_BUF_SLOTS;
628
629 // show_existing_frames don't actually send refresh_frame_flags so set the
630 // flags to 0 to keep things consistent.
631 if (frame_params->show_existing_frame) return 0;
632
633 const RTC_REF *const rtc_ref = &cpi->ppi->rtc_ref;
634 if (is_frame_droppable(rtc_ref, ext_refresh_frame_flags)) return 0;
635
636 #if !CONFIG_REALTIME_ONLY
637 if (cpi->use_ducky_encode &&
638 cpi->ducky_encode_info.frame_info.gop_mode == DUCKY_ENCODE_GOP_MODE_RCL) {
639 int new_fb_map_idx = cpi->ppi->gf_group.update_ref_idx[gf_index];
640 if (new_fb_map_idx == INVALID_IDX) return 0;
641 return 1 << new_fb_map_idx;
642 }
643 #endif // !CONFIG_REALTIME_ONLY
644
645 int refresh_mask = 0;
646 if (ext_refresh_frame_flags->update_pending) {
647 if (rtc_ref->set_ref_frame_config ||
648 use_rtc_reference_structure_one_layer(cpi)) {
649 for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++) {
650 int ref_frame_map_idx = rtc_ref->ref_idx[i];
651 refresh_mask |= rtc_ref->refresh[ref_frame_map_idx]
652 << ref_frame_map_idx;
653 }
654 return refresh_mask;
655 }
656 // Unfortunately the encoder interface reflects the old refresh_*_frame
657 // flags so we have to replicate the old refresh_frame_flags logic here in
658 // order to preserve the behaviour of the flag overrides.
659 int ref_frame_map_idx = get_ref_frame_map_idx(cm, LAST_FRAME);
660 if (ref_frame_map_idx != INVALID_IDX)
661 refresh_mask |= ext_refresh_frame_flags->last_frame << ref_frame_map_idx;
662
663 ref_frame_map_idx = get_ref_frame_map_idx(cm, EXTREF_FRAME);
664 if (ref_frame_map_idx != INVALID_IDX)
665 refresh_mask |= ext_refresh_frame_flags->bwd_ref_frame
666 << ref_frame_map_idx;
667
668 ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF2_FRAME);
669 if (ref_frame_map_idx != INVALID_IDX)
670 refresh_mask |= ext_refresh_frame_flags->alt2_ref_frame
671 << ref_frame_map_idx;
672
673 if (frame_update_type == OVERLAY_UPDATE) {
674 ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
675 if (ref_frame_map_idx != INVALID_IDX)
676 refresh_mask |= ext_refresh_frame_flags->golden_frame
677 << ref_frame_map_idx;
678 } else {
679 ref_frame_map_idx = get_ref_frame_map_idx(cm, GOLDEN_FRAME);
680 if (ref_frame_map_idx != INVALID_IDX)
681 refresh_mask |= ext_refresh_frame_flags->golden_frame
682 << ref_frame_map_idx;
683
684 ref_frame_map_idx = get_ref_frame_map_idx(cm, ALTREF_FRAME);
685 if (ref_frame_map_idx != INVALID_IDX)
686 refresh_mask |= ext_refresh_frame_flags->alt_ref_frame
687 << ref_frame_map_idx;
688 }
689 return refresh_mask;
690 }
691
692 // Search for the open slot to store the current frame.
693 int free_fb_index = get_free_ref_map_index(ref_frame_map_pairs);
694
695 // No refresh necessary for these frame types.
696 if (frame_update_type == OVERLAY_UPDATE ||
697 frame_update_type == INTNL_OVERLAY_UPDATE)
698 return refresh_mask;
699
700 // If there is an open slot, refresh that one instead of replacing a
701 // reference.
702 if (free_fb_index != INVALID_IDX) {
703 refresh_mask = 1 << free_fb_index;
704 return refresh_mask;
705 }
706 const int enable_refresh_skip = !is_one_pass_rt_params(cpi);
707 const int update_arf = frame_update_type == ARF_UPDATE;
708 const int refresh_idx =
709 get_refresh_idx(ref_frame_map_pairs, update_arf, &cpi->ppi->gf_group,
710 gf_index, enable_refresh_skip, cur_disp_order);
711 return 1 << refresh_idx;
712 }
713
714 #if !CONFIG_REALTIME_ONLY
715 // Apply temporal filtering to source frames and encode the filtered frame.
716 // If the current frame does not require filtering, this function is identical
717 // to av1_encode() except that tpl is not performed.
718 static int denoise_and_encode(AV1_COMP *const cpi, uint8_t *const dest,
719 size_t dest_size,
720 EncodeFrameInput *const frame_input,
721 const EncodeFrameParams *const frame_params,
722 size_t *const frame_size) {
723 #if CONFIG_COLLECT_COMPONENT_TIMING
724 if (cpi->oxcf.pass == 2) start_timing(cpi, denoise_and_encode_time);
725 #endif
726 const AV1EncoderConfig *const oxcf = &cpi->oxcf;
727 AV1_COMMON *const cm = &cpi->common;
728
729 GF_GROUP *const gf_group = &cpi->ppi->gf_group;
730 FRAME_UPDATE_TYPE update_type =
731 get_frame_update_type(&cpi->ppi->gf_group, cpi->gf_frame_index);
732 const int is_second_arf =
733 av1_gop_is_second_arf(gf_group, cpi->gf_frame_index);
734
735 // Decide whether to apply temporal filtering to the source frame.
736 int apply_filtering =
737 av1_is_temporal_filter_on(oxcf) && !is_stat_generation_stage(cpi);
738 if (update_type != KF_UPDATE && update_type != ARF_UPDATE && !is_second_arf) {
739 apply_filtering = 0;
740 }
741 if (apply_filtering) {
742 if (frame_params->frame_type == KEY_FRAME) {
743 // TODO(angiebird): Move the noise level check to av1_tf_info_filtering.
744 // Decide whether it is allowed to perform key frame filtering
745 int allow_kf_filtering = oxcf->kf_cfg.enable_keyframe_filtering &&
746 !frame_params->show_existing_frame &&
747 !is_lossless_requested(&oxcf->rc_cfg);
748 if (allow_kf_filtering) {
749 double y_noise_level = 0.0;
750 av1_estimate_noise_level(
751 frame_input->source, &y_noise_level, AOM_PLANE_Y, AOM_PLANE_Y,
752 cm->seq_params->bit_depth, NOISE_ESTIMATION_EDGE_THRESHOLD);
753 apply_filtering = y_noise_level > 0;
754 } else {
755 apply_filtering = 0;
756 }
757 // If we are doing kf filtering, set up a few things.
758 if (apply_filtering) {
759 av1_setup_past_independence(cm);
760 }
761 } else if (is_second_arf) {
762 apply_filtering = cpi->sf.hl_sf.second_alt_ref_filtering;
763 }
764 }
765
766 #if CONFIG_COLLECT_COMPONENT_TIMING
767 if (cpi->oxcf.pass == 2) start_timing(cpi, apply_filtering_time);
768 #endif
769 // Save the pointer to the original source image.
770 YV12_BUFFER_CONFIG *source_buffer = frame_input->source;
771 // apply filtering to frame
772 if (apply_filtering) {
773 int show_existing_alt_ref = 0;
774 FRAME_DIFF frame_diff;
775 int top_index = 0;
776 int bottom_index = 0;
777 const int q_index = av1_rc_pick_q_and_bounds(
778 cpi, cpi->oxcf.frm_dim_cfg.width, cpi->oxcf.frm_dim_cfg.height,
779 cpi->gf_frame_index, &bottom_index, &top_index);
780
781 // TODO(bohanli): figure out why we need frame_type in cm here.
782 cm->current_frame.frame_type = frame_params->frame_type;
783 if (update_type == KF_UPDATE || update_type == ARF_UPDATE) {
784 YV12_BUFFER_CONFIG *tf_buf = av1_tf_info_get_filtered_buf(
785 &cpi->ppi->tf_info, cpi->gf_frame_index, &frame_diff);
786 if (tf_buf != NULL) {
787 frame_input->source = tf_buf;
788 show_existing_alt_ref = av1_check_show_filtered_frame(
789 tf_buf, &frame_diff, q_index, cm->seq_params->bit_depth);
790 if (show_existing_alt_ref) {
791 cpi->common.showable_frame |= 1;
792 } else {
793 cpi->common.showable_frame = 0;
794 }
795 }
796 if (gf_group->frame_type[cpi->gf_frame_index] != KEY_FRAME) {
797 cpi->ppi->show_existing_alt_ref = show_existing_alt_ref;
798 }
799 }
800
801 if (is_second_arf) {
802 // Allocate the memory for tf_buf_second_arf buffer, only when it is
803 // required.
804 int ret = aom_realloc_frame_buffer(
805 &cpi->ppi->tf_info.tf_buf_second_arf, oxcf->frm_dim_cfg.width,
806 oxcf->frm_dim_cfg.height, cm->seq_params->subsampling_x,
807 cm->seq_params->subsampling_y, cm->seq_params->use_highbitdepth,
808 cpi->oxcf.border_in_pixels, cm->features.byte_alignment, NULL, NULL,
809 NULL, cpi->alloc_pyramid, 0);
810 if (ret)
811 aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
812 "Failed to allocate tf_buf_second_arf");
813
814 YV12_BUFFER_CONFIG *tf_buf_second_arf =
815 &cpi->ppi->tf_info.tf_buf_second_arf;
816 // We didn't apply temporal filtering for second arf ahead in
817 // av1_tf_info_filtering().
818 const int arf_src_index = gf_group->arf_src_offset[cpi->gf_frame_index];
819 // Right now, we are still using tf_buf_second_arf due to
820 // implementation complexity.
821 // TODO(angiebird): Reuse tf_info->tf_buf here.
822 av1_temporal_filter(cpi, arf_src_index, cpi->gf_frame_index, &frame_diff,
823 tf_buf_second_arf);
824 show_existing_alt_ref = av1_check_show_filtered_frame(
825 tf_buf_second_arf, &frame_diff, q_index, cm->seq_params->bit_depth);
826 if (show_existing_alt_ref) {
827 aom_extend_frame_borders(tf_buf_second_arf, av1_num_planes(cm));
828 frame_input->source = tf_buf_second_arf;
829 }
830 // Currently INTNL_ARF_UPDATE only do show_existing.
831 cpi->common.showable_frame |= 1;
832 }
833
834 // Copy source metadata to the temporal filtered frame
835 if (source_buffer->metadata &&
836 aom_copy_metadata_to_frame_buffer(frame_input->source,
837 source_buffer->metadata)) {
838 aom_internal_error(
839 cm->error, AOM_CODEC_MEM_ERROR,
840 "Failed to copy source metadata to the temporal filtered frame");
841 }
842 }
843 #if CONFIG_COLLECT_COMPONENT_TIMING
844 if (cpi->oxcf.pass == 2) end_timing(cpi, apply_filtering_time);
845 #endif
846
847 int set_mv_params = frame_params->frame_type == KEY_FRAME ||
848 update_type == ARF_UPDATE || update_type == GF_UPDATE;
849 cm->show_frame = frame_params->show_frame;
850 cm->current_frame.frame_type = frame_params->frame_type;
851 // TODO(bohanli): Why is this? what part of it is necessary?
852 av1_set_frame_size(cpi, cm->width, cm->height);
853 if (set_mv_params) av1_set_mv_search_params(cpi);
854
855 #if CONFIG_RD_COMMAND
856 if (frame_params->frame_type == KEY_FRAME) {
857 char filepath[] = "rd_command.txt";
858 av1_read_rd_command(filepath, &cpi->rd_command);
859 }
860 #endif // CONFIG_RD_COMMAND
861 if (cpi->gf_frame_index == 0 && !is_stat_generation_stage(cpi)) {
862 // perform tpl after filtering
863 int allow_tpl =
864 oxcf->gf_cfg.lag_in_frames > 1 && oxcf->algo_cfg.enable_tpl_model;
865 if (gf_group->size > MAX_LENGTH_TPL_FRAME_STATS) {
866 allow_tpl = 0;
867 }
868 if (frame_params->frame_type != KEY_FRAME) {
869 // In rare case, it's possible to have non ARF/GF update_type here.
870 // We should set allow_tpl to zero in the situation
871 allow_tpl =
872 allow_tpl && (update_type == ARF_UPDATE || update_type == GF_UPDATE ||
873 (cpi->use_ducky_encode &&
874 cpi->ducky_encode_info.frame_info.gop_mode ==
875 DUCKY_ENCODE_GOP_MODE_RCL));
876 }
877
878 if (allow_tpl) {
879 if (!cpi->skip_tpl_setup_stats) {
880 av1_tpl_preload_rc_estimate(cpi, frame_params);
881 av1_tpl_setup_stats(cpi, 0, frame_params);
882 #if CONFIG_BITRATE_ACCURACY && !CONFIG_THREE_PASS
883 assert(cpi->gf_frame_index == 0);
884 av1_vbr_rc_update_q_index_list(&cpi->vbr_rc_info, &cpi->ppi->tpl_data,
885 gf_group, cm->seq_params->bit_depth);
886 #endif
887 }
888 } else {
889 av1_init_tpl_stats(&cpi->ppi->tpl_data);
890 }
891 #if CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS
892 if (cpi->oxcf.pass == AOM_RC_SECOND_PASS &&
893 cpi->second_pass_log_stream != NULL) {
894 TPL_INFO *tpl_info;
895 AOM_CHECK_MEM_ERROR(cm->error, tpl_info, aom_malloc(sizeof(*tpl_info)));
896 av1_pack_tpl_info(tpl_info, gf_group, &cpi->ppi->tpl_data);
897 av1_write_tpl_info(tpl_info, cpi->second_pass_log_stream,
898 cpi->common.error);
899 aom_free(tpl_info);
900 }
901 #endif // CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS
902 }
903
904 if (av1_encode(cpi, dest, dest_size, frame_input, frame_params, frame_size) !=
905 AOM_CODEC_OK) {
906 return AOM_CODEC_ERROR;
907 }
908
909 // Set frame_input source to true source for psnr calculation.
910 if (apply_filtering && is_psnr_calc_enabled(cpi)) {
911 cpi->source = av1_realloc_and_scale_if_required(
912 cm, source_buffer, &cpi->scaled_source, cm->features.interp_filter, 0,
913 false, true, cpi->oxcf.border_in_pixels, cpi->alloc_pyramid);
914 cpi->unscaled_source = source_buffer;
915 }
916 #if CONFIG_COLLECT_COMPONENT_TIMING
917 if (cpi->oxcf.pass == 2) end_timing(cpi, denoise_and_encode_time);
918 #endif
919 return AOM_CODEC_OK;
920 }
921 #endif // !CONFIG_REALTIME_ONLY
922
923 /*!\cond */
924 // Struct to keep track of relevant reference frame data.
925 typedef struct {
926 int map_idx;
927 int disp_order;
928 int pyr_level;
929 int used;
930 } RefBufMapData;
931 /*!\endcond */
932
933 // Comparison function to sort reference frames in ascending display order.
934 static int compare_map_idx_pair_asc(const void *a, const void *b) {
935 if (((RefBufMapData *)a)->disp_order == ((RefBufMapData *)b)->disp_order) {
936 return 0;
937 } else if (((const RefBufMapData *)a)->disp_order >
938 ((const RefBufMapData *)b)->disp_order) {
939 return 1;
940 } else {
941 return -1;
942 }
943 }
944
945 // Checks to see if a particular reference frame is already in the reference
946 // frame map.
947 static int is_in_ref_map(RefBufMapData *map, int disp_order, int n_frames) {
948 for (int i = 0; i < n_frames; i++) {
949 if (disp_order == map[i].disp_order) return 1;
950 }
951 return 0;
952 }
953
954 // Add a reference buffer index to a named reference slot.
955 static void add_ref_to_slot(RefBufMapData *ref, int *const remapped_ref_idx,
956 int frame) {
957 remapped_ref_idx[frame - LAST_FRAME] = ref->map_idx;
958 ref->used = 1;
959 }
960
961 // Threshold dictating when we are allowed to start considering
962 // leaving lowest level frames unmapped.
963 #define LOW_LEVEL_FRAMES_TR 5
964
965 // Find which reference buffer should be left out of the named mapping.
966 // This is because there are 8 reference buffers and only 7 named slots.
967 static void set_unmapped_ref(RefBufMapData *buffer_map, int n_bufs,
968 int n_min_level_refs, int min_level,
969 int cur_frame_disp) {
970 int max_dist = 0;
971 int unmapped_idx = -1;
972 if (n_bufs <= ALTREF_FRAME) return;
973 for (int i = 0; i < n_bufs; i++) {
974 if (buffer_map[i].used) continue;
975 if (buffer_map[i].pyr_level != min_level ||
976 n_min_level_refs >= LOW_LEVEL_FRAMES_TR) {
977 int dist = abs(cur_frame_disp - buffer_map[i].disp_order);
978 if (dist > max_dist) {
979 max_dist = dist;
980 unmapped_idx = i;
981 }
982 }
983 }
984 assert(unmapped_idx >= 0 && "Unmapped reference not found");
985 buffer_map[unmapped_idx].used = 1;
986 }
987
988 void av1_get_ref_frames(RefFrameMapPair ref_frame_map_pairs[REF_FRAMES],
989 int cur_frame_disp, const AV1_COMP *cpi, int gf_index,
990 int is_parallel_encode,
991 int remapped_ref_idx[REF_FRAMES]) {
992 int buf_map_idx = 0;
993
994 // Initialize reference frame mappings.
995 for (int i = 0; i < REF_FRAMES; ++i) remapped_ref_idx[i] = INVALID_IDX;
996
997 #if !CONFIG_REALTIME_ONLY
998 if (cpi->use_ducky_encode &&
999 cpi->ducky_encode_info.frame_info.gop_mode == DUCKY_ENCODE_GOP_MODE_RCL) {
1000 for (int rf = LAST_FRAME; rf < REF_FRAMES; ++rf) {
1001 if (cpi->ppi->gf_group.ref_frame_list[gf_index][rf] != INVALID_IDX) {
1002 remapped_ref_idx[rf - LAST_FRAME] =
1003 cpi->ppi->gf_group.ref_frame_list[gf_index][rf];
1004 }
1005 }
1006
1007 int valid_rf_idx = 0;
1008 static const int ref_frame_type_order[REF_FRAMES - LAST_FRAME] = {
1009 GOLDEN_FRAME, ALTREF_FRAME, LAST_FRAME, BWDREF_FRAME,
1010 ALTREF2_FRAME, LAST2_FRAME, LAST3_FRAME
1011 };
1012 for (int i = 0; i < REF_FRAMES - LAST_FRAME; i++) {
1013 int rf = ref_frame_type_order[i];
1014 if (remapped_ref_idx[rf - LAST_FRAME] != INVALID_IDX) {
1015 valid_rf_idx = remapped_ref_idx[rf - LAST_FRAME];
1016 break;
1017 }
1018 }
1019
1020 for (int i = 0; i < REF_FRAMES; ++i) {
1021 if (remapped_ref_idx[i] == INVALID_IDX) {
1022 remapped_ref_idx[i] = valid_rf_idx;
1023 }
1024 }
1025
1026 return;
1027 }
1028 #endif // !CONFIG_REALTIME_ONLY
1029
1030 RefBufMapData buffer_map[REF_FRAMES];
1031 int n_bufs = 0;
1032 memset(buffer_map, 0, REF_FRAMES * sizeof(buffer_map[0]));
1033 int min_level = MAX_ARF_LAYERS;
1034 int max_level = 0;
1035 GF_GROUP *gf_group = &cpi->ppi->gf_group;
1036 int skip_ref_unmapping = 0;
1037 int is_one_pass_rt = is_one_pass_rt_params(cpi);
1038
1039 // Go through current reference buffers and store display order, pyr level,
1040 // and map index.
1041 for (int map_idx = 0; map_idx < REF_FRAMES; map_idx++) {
1042 // Get reference frame buffer.
1043 RefFrameMapPair ref_pair = ref_frame_map_pairs[map_idx];
1044 if (ref_pair.disp_order == -1) continue;
1045 const int frame_order = ref_pair.disp_order;
1046 // Avoid duplicates.
1047 if (is_in_ref_map(buffer_map, frame_order, n_bufs)) continue;
1048 const int reference_frame_level = ref_pair.pyr_level;
1049
1050 // Keep track of the lowest and highest levels that currently exist.
1051 if (reference_frame_level < min_level) min_level = reference_frame_level;
1052 if (reference_frame_level > max_level) max_level = reference_frame_level;
1053
1054 buffer_map[n_bufs].map_idx = map_idx;
1055 buffer_map[n_bufs].disp_order = frame_order;
1056 buffer_map[n_bufs].pyr_level = reference_frame_level;
1057 buffer_map[n_bufs].used = 0;
1058 n_bufs++;
1059 }
1060
1061 // Sort frames in ascending display order.
1062 qsort(buffer_map, n_bufs, sizeof(buffer_map[0]), compare_map_idx_pair_asc);
1063
1064 int n_min_level_refs = 0;
1065 int closest_past_ref = -1;
1066 int golden_idx = -1;
1067 int altref_idx = -1;
1068
1069 // Find the GOLDEN_FRAME and BWDREF_FRAME.
1070 // Also collect various stats about the reference frames for the remaining
1071 // mappings.
1072 for (int i = n_bufs - 1; i >= 0; i--) {
1073 if (buffer_map[i].pyr_level == min_level) {
1074 // Keep track of the number of lowest level frames.
1075 n_min_level_refs++;
1076 if (buffer_map[i].disp_order < cur_frame_disp && golden_idx == -1 &&
1077 remapped_ref_idx[GOLDEN_FRAME - LAST_FRAME] == INVALID_IDX) {
1078 // Save index for GOLDEN.
1079 golden_idx = i;
1080 } else if (buffer_map[i].disp_order > cur_frame_disp &&
1081 altref_idx == -1 &&
1082 remapped_ref_idx[ALTREF_FRAME - LAST_FRAME] == INVALID_IDX) {
1083 // Save index for ALTREF.
1084 altref_idx = i;
1085 }
1086 } else if (buffer_map[i].disp_order == cur_frame_disp) {
1087 // Map the BWDREF_FRAME if this is the show_existing_frame.
1088 add_ref_to_slot(&buffer_map[i], remapped_ref_idx, BWDREF_FRAME);
1089 }
1090
1091 // During parallel encodes of lower layer frames, exclude the first frame
1092 // (frame_parallel_level 1) from being used for the reference assignment of
1093 // the second frame (frame_parallel_level 2).
1094 if (!is_one_pass_rt && gf_group->frame_parallel_level[gf_index] == 2 &&
1095 gf_group->frame_parallel_level[gf_index - 1] == 1 &&
1096 gf_group->update_type[gf_index - 1] == INTNL_ARF_UPDATE) {
1097 assert(gf_group->update_type[gf_index] == INTNL_ARF_UPDATE);
1098 #if CONFIG_FPMT_TEST
1099 is_parallel_encode = (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_ENCODE)
1100 ? is_parallel_encode
1101 : 0;
1102 #endif // CONFIG_FPMT_TEST
1103 // If parallel cpis are active, use ref_idx_to_skip, else, use display
1104 // index.
1105 assert(IMPLIES(is_parallel_encode, cpi->ref_idx_to_skip != INVALID_IDX));
1106 assert(IMPLIES(!is_parallel_encode,
1107 gf_group->skip_frame_as_ref[gf_index] != INVALID_IDX));
1108 buffer_map[i].used = is_parallel_encode
1109 ? (buffer_map[i].map_idx == cpi->ref_idx_to_skip)
1110 : (buffer_map[i].disp_order ==
1111 gf_group->skip_frame_as_ref[gf_index]);
1112 // In case a ref frame is excluded from being used during assignment,
1113 // skip the call to set_unmapped_ref(). Applicable in steady state.
1114 if (buffer_map[i].used) skip_ref_unmapping = 1;
1115 }
1116
1117 // Keep track of where the frames change from being past frames to future
1118 // frames.
1119 if (buffer_map[i].disp_order < cur_frame_disp && closest_past_ref < 0)
1120 closest_past_ref = i;
1121 }
1122
1123 // Do not map GOLDEN and ALTREF based on their pyramid level if all reference
1124 // frames have the same level.
1125 if (n_min_level_refs <= n_bufs) {
1126 // Map the GOLDEN_FRAME.
1127 if (golden_idx > -1)
1128 add_ref_to_slot(&buffer_map[golden_idx], remapped_ref_idx, GOLDEN_FRAME);
1129 // Map the ALTREF_FRAME.
1130 if (altref_idx > -1)
1131 add_ref_to_slot(&buffer_map[altref_idx], remapped_ref_idx, ALTREF_FRAME);
1132 }
1133
1134 // Find the buffer to be excluded from the mapping.
1135 if (!skip_ref_unmapping)
1136 set_unmapped_ref(buffer_map, n_bufs, n_min_level_refs, min_level,
1137 cur_frame_disp);
1138
1139 // Place past frames in LAST_FRAME, LAST2_FRAME, and LAST3_FRAME.
1140 for (int frame = LAST_FRAME; frame < GOLDEN_FRAME; frame++) {
1141 // Continue if the current ref slot is already full.
1142 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1143 // Find the next unmapped reference buffer
1144 // in decreasing ouptut order relative to current picture.
1145 int next_buf_max = 0;
1146 int next_disp_order = INT_MIN;
1147 for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
1148 if (!buffer_map[buf_map_idx].used &&
1149 buffer_map[buf_map_idx].disp_order < cur_frame_disp &&
1150 buffer_map[buf_map_idx].disp_order > next_disp_order) {
1151 next_disp_order = buffer_map[buf_map_idx].disp_order;
1152 next_buf_max = buf_map_idx;
1153 }
1154 }
1155 buf_map_idx = next_buf_max;
1156 if (buf_map_idx < 0) break;
1157 if (buffer_map[buf_map_idx].used) break;
1158 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1159 }
1160
1161 // Place future frames (if there are any) in BWDREF_FRAME and ALTREF2_FRAME.
1162 for (int frame = BWDREF_FRAME; frame < REF_FRAMES; frame++) {
1163 // Continue if the current ref slot is already full.
1164 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1165 // Find the next unmapped reference buffer
1166 // in increasing ouptut order relative to current picture.
1167 int next_buf_max = 0;
1168 int next_disp_order = INT_MAX;
1169 for (buf_map_idx = n_bufs - 1; buf_map_idx >= 0; buf_map_idx--) {
1170 if (!buffer_map[buf_map_idx].used &&
1171 buffer_map[buf_map_idx].disp_order > cur_frame_disp &&
1172 buffer_map[buf_map_idx].disp_order < next_disp_order) {
1173 next_disp_order = buffer_map[buf_map_idx].disp_order;
1174 next_buf_max = buf_map_idx;
1175 }
1176 }
1177 buf_map_idx = next_buf_max;
1178 if (buf_map_idx < 0) break;
1179 if (buffer_map[buf_map_idx].used) break;
1180 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1181 }
1182
1183 // Place remaining past frames.
1184 buf_map_idx = closest_past_ref;
1185 for (int frame = LAST_FRAME; frame < REF_FRAMES; frame++) {
1186 // Continue if the current ref slot is already full.
1187 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1188 // Find the next unmapped reference buffer.
1189 for (; buf_map_idx >= 0; buf_map_idx--) {
1190 if (!buffer_map[buf_map_idx].used) break;
1191 }
1192 if (buf_map_idx < 0) break;
1193 if (buffer_map[buf_map_idx].used) break;
1194 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1195 }
1196
1197 // Place remaining future frames.
1198 buf_map_idx = n_bufs - 1;
1199 for (int frame = ALTREF_FRAME; frame >= LAST_FRAME; frame--) {
1200 // Continue if the current ref slot is already full.
1201 if (remapped_ref_idx[frame - LAST_FRAME] != INVALID_IDX) continue;
1202 // Find the next unmapped reference buffer.
1203 for (; buf_map_idx > closest_past_ref; buf_map_idx--) {
1204 if (!buffer_map[buf_map_idx].used) break;
1205 }
1206 if (buf_map_idx < 0) break;
1207 if (buffer_map[buf_map_idx].used) break;
1208 add_ref_to_slot(&buffer_map[buf_map_idx], remapped_ref_idx, frame);
1209 }
1210
1211 // Fill any slots that are empty (should only happen for the first 7 frames).
1212 for (int i = 0; i < REF_FRAMES; ++i)
1213 if (remapped_ref_idx[i] == INVALID_IDX) remapped_ref_idx[i] = 0;
1214 }
1215
1216 int av1_encode_strategy(AV1_COMP *const cpi, size_t *const size,
1217 uint8_t *const dest, size_t dest_size,
1218 unsigned int *frame_flags, int64_t *const time_stamp,
1219 int64_t *const time_end,
1220 const aom_rational64_t *const timestamp_ratio,
1221 int *const pop_lookahead, int flush) {
1222 AV1EncoderConfig *const oxcf = &cpi->oxcf;
1223 AV1_COMMON *const cm = &cpi->common;
1224 GF_GROUP *gf_group = &cpi->ppi->gf_group;
1225 ExternalFlags *const ext_flags = &cpi->ext_flags;
1226 GFConfig *const gf_cfg = &oxcf->gf_cfg;
1227
1228 EncodeFrameInput frame_input;
1229 EncodeFrameParams frame_params;
1230 size_t frame_size;
1231 memset(&frame_input, 0, sizeof(frame_input));
1232 memset(&frame_params, 0, sizeof(frame_params));
1233 frame_size = 0;
1234
1235 #if CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS
1236 VBR_RATECTRL_INFO *vbr_rc_info = &cpi->vbr_rc_info;
1237 if (oxcf->pass == AOM_RC_THIRD_PASS && vbr_rc_info->ready == 0) {
1238 THIRD_PASS_FRAME_INFO frame_info[MAX_THIRD_PASS_BUF];
1239 av1_open_second_pass_log(cpi, 1);
1240 FILE *second_pass_log_stream = cpi->second_pass_log_stream;
1241 fseek(second_pass_log_stream, 0, SEEK_END);
1242 size_t file_size = ftell(second_pass_log_stream);
1243 rewind(second_pass_log_stream);
1244 size_t read_size = 0;
1245 while (read_size < file_size) {
1246 THIRD_PASS_GOP_INFO gop_info;
1247 struct aom_internal_error_info *error = cpi->common.error;
1248 // Read in GOP information from the second pass file.
1249 av1_read_second_pass_gop_info(second_pass_log_stream, &gop_info, error);
1250 TPL_INFO *tpl_info;
1251 AOM_CHECK_MEM_ERROR(cm->error, tpl_info, aom_malloc(sizeof(*tpl_info)));
1252 av1_read_tpl_info(tpl_info, second_pass_log_stream, error);
1253 // Read in per-frame info from second-pass encoding
1254 av1_read_second_pass_per_frame_info(second_pass_log_stream, frame_info,
1255 gop_info.num_frames, error);
1256 av1_vbr_rc_append_tpl_info(vbr_rc_info, tpl_info);
1257 read_size = ftell(second_pass_log_stream);
1258 aom_free(tpl_info);
1259 }
1260 av1_close_second_pass_log(cpi);
1261 if (cpi->oxcf.rc_cfg.mode == AOM_Q) {
1262 vbr_rc_info->base_q_index = cpi->oxcf.rc_cfg.cq_level;
1263 av1_vbr_rc_compute_q_indices(
1264 vbr_rc_info->base_q_index, vbr_rc_info->total_frame_count,
1265 vbr_rc_info->qstep_ratio_list, cm->seq_params->bit_depth,
1266 vbr_rc_info->q_index_list);
1267 } else {
1268 vbr_rc_info->base_q_index = av1_vbr_rc_info_estimate_base_q(
1269 vbr_rc_info->total_bit_budget, cm->seq_params->bit_depth,
1270 vbr_rc_info->scale_factors, vbr_rc_info->total_frame_count,
1271 vbr_rc_info->update_type_list, vbr_rc_info->qstep_ratio_list,
1272 vbr_rc_info->txfm_stats_list, vbr_rc_info->q_index_list, NULL);
1273 }
1274 vbr_rc_info->ready = 1;
1275 #if CONFIG_RATECTRL_LOG
1276 rc_log_record_chunk_info(&cpi->rc_log, vbr_rc_info->base_q_index,
1277 vbr_rc_info->total_frame_count);
1278 #endif // CONFIG_RATECTRL_LOG
1279 }
1280 #endif // CONFIG_BITRATE_ACCURACY && CONFIG_THREE_PASS
1281
1282 // Check if we need to stuff more src frames
1283 if (flush == 0) {
1284 int srcbuf_size =
1285 av1_lookahead_depth(cpi->ppi->lookahead, cpi->compressor_stage);
1286 int pop_size =
1287 av1_lookahead_pop_sz(cpi->ppi->lookahead, cpi->compressor_stage);
1288
1289 // Continue buffering look ahead buffer.
1290 if (srcbuf_size < pop_size) return -1;
1291 }
1292
1293 if (!av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage)) {
1294 #if !CONFIG_REALTIME_ONLY
1295 if (flush && oxcf->pass == AOM_RC_FIRST_PASS &&
1296 !cpi->ppi->twopass.first_pass_done) {
1297 av1_end_first_pass(cpi); /* get last stats packet */
1298 cpi->ppi->twopass.first_pass_done = 1;
1299 }
1300 #endif
1301 return -1;
1302 }
1303
1304 // TODO(sarahparker) finish bit allocation for one pass pyramid
1305 if (has_no_stats_stage(cpi)) {
1306 gf_cfg->gf_max_pyr_height =
1307 AOMMIN(gf_cfg->gf_max_pyr_height, USE_ALTREF_FOR_ONE_PASS);
1308 gf_cfg->gf_min_pyr_height =
1309 AOMMIN(gf_cfg->gf_min_pyr_height, gf_cfg->gf_max_pyr_height);
1310 }
1311
1312 // Allocation of mi buffers.
1313 alloc_mb_mode_info_buffers(cpi);
1314
1315 cpi->skip_tpl_setup_stats = 0;
1316 #if !CONFIG_REALTIME_ONLY
1317 if (oxcf->pass != AOM_RC_FIRST_PASS) {
1318 TplParams *const tpl_data = &cpi->ppi->tpl_data;
1319 if (tpl_data->tpl_stats_pool[0] == NULL) {
1320 av1_setup_tpl_buffers(cpi->ppi, &cm->mi_params, oxcf->frm_dim_cfg.width,
1321 oxcf->frm_dim_cfg.height, 0,
1322 oxcf->gf_cfg.lag_in_frames);
1323 }
1324 }
1325 cpi->twopass_frame.this_frame = NULL;
1326 const int use_one_pass_rt_params = is_one_pass_rt_params(cpi);
1327 if (!use_one_pass_rt_params && !is_stat_generation_stage(cpi)) {
1328 #if CONFIG_COLLECT_COMPONENT_TIMING
1329 start_timing(cpi, av1_get_second_pass_params_time);
1330 #endif
1331
1332 // Initialise frame_level_rate_correction_factors with value previous
1333 // to the parallel frames.
1334 if (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) {
1335 for (int i = 0; i < RATE_FACTOR_LEVELS; i++) {
1336 cpi->rc.frame_level_rate_correction_factors[i] =
1337 #if CONFIG_FPMT_TEST
1338 (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE)
1339 ? cpi->ppi->p_rc.temp_rate_correction_factors[i]
1340 :
1341 #endif // CONFIG_FPMT_TEST
1342 cpi->ppi->p_rc.rate_correction_factors[i];
1343 }
1344 }
1345
1346 // copy mv_stats from ppi to frame_level cpi.
1347 cpi->mv_stats = cpi->ppi->mv_stats;
1348 av1_get_second_pass_params(cpi, &frame_params, *frame_flags);
1349 #if CONFIG_COLLECT_COMPONENT_TIMING
1350 end_timing(cpi, av1_get_second_pass_params_time);
1351 #endif
1352 }
1353 #endif
1354
1355 if (!is_stat_generation_stage(cpi)) {
1356 // TODO(jingning): fwd key frame always uses show existing frame?
1357 if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE &&
1358 gf_group->refbuf_state[cpi->gf_frame_index] == REFBUF_RESET) {
1359 frame_params.show_existing_frame = 1;
1360 } else {
1361 frame_params.show_existing_frame =
1362 (cpi->ppi->show_existing_alt_ref &&
1363 gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) ||
1364 gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE;
1365 }
1366 frame_params.show_existing_frame &= allow_show_existing(cpi, *frame_flags);
1367
1368 // Special handling to reset 'show_existing_frame' in case of dropped
1369 // frames.
1370 if (oxcf->rc_cfg.drop_frames_water_mark &&
1371 (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE ||
1372 gf_group->update_type[cpi->gf_frame_index] == INTNL_OVERLAY_UPDATE)) {
1373 // During the encode of an OVERLAY_UPDATE/INTNL_OVERLAY_UPDATE frame, loop
1374 // over the gf group to check if the corresponding
1375 // ARF_UPDATE/INTNL_ARF_UPDATE frame was dropped.
1376 int cur_disp_idx = gf_group->display_idx[cpi->gf_frame_index];
1377 for (int idx = 0; idx < cpi->gf_frame_index; idx++) {
1378 if (cur_disp_idx == gf_group->display_idx[idx]) {
1379 assert(IMPLIES(
1380 gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE,
1381 gf_group->update_type[idx] == ARF_UPDATE));
1382 assert(IMPLIES(gf_group->update_type[cpi->gf_frame_index] ==
1383 INTNL_OVERLAY_UPDATE,
1384 gf_group->update_type[idx] == INTNL_ARF_UPDATE));
1385 // Reset show_existing_frame and set cpi->is_dropped_frame to true if
1386 // the frame was dropped during its first encode.
1387 if (gf_group->is_frame_dropped[idx]) {
1388 frame_params.show_existing_frame = 0;
1389 assert(!cpi->is_dropped_frame);
1390 cpi->is_dropped_frame = true;
1391 }
1392 break;
1393 }
1394 }
1395 }
1396
1397 // Reset show_existing_alt_ref decision to 0 after it is used.
1398 if (gf_group->update_type[cpi->gf_frame_index] == OVERLAY_UPDATE) {
1399 cpi->ppi->show_existing_alt_ref = 0;
1400 }
1401 } else {
1402 frame_params.show_existing_frame = 0;
1403 }
1404
1405 struct lookahead_entry *source = NULL;
1406 struct lookahead_entry *last_source = NULL;
1407 if (frame_params.show_existing_frame) {
1408 source = av1_lookahead_peek(cpi->ppi->lookahead, 0, cpi->compressor_stage);
1409 *pop_lookahead = 1;
1410 frame_params.show_frame = 1;
1411 } else {
1412 source = choose_frame_source(cpi, &flush, pop_lookahead, &last_source,
1413 &frame_params.show_frame);
1414 }
1415
1416 if (source == NULL) { // If no source was found, we can't encode a frame.
1417 #if !CONFIG_REALTIME_ONLY
1418 if (flush && oxcf->pass == AOM_RC_FIRST_PASS &&
1419 !cpi->ppi->twopass.first_pass_done) {
1420 av1_end_first_pass(cpi); /* get last stats packet */
1421 cpi->ppi->twopass.first_pass_done = 1;
1422 }
1423 #endif
1424 return -1;
1425 }
1426
1427 // reset src_offset to allow actual encode call for this frame to get its
1428 // source.
1429 gf_group->src_offset[cpi->gf_frame_index] = 0;
1430
1431 // Source may be changed if temporal filtered later.
1432 frame_input.source = &source->img;
1433 if ((cpi->ppi->use_svc || cpi->rc.prev_frame_is_dropped) &&
1434 last_source != NULL)
1435 av1_svc_set_last_source(cpi, &frame_input, &last_source->img);
1436 else
1437 frame_input.last_source = last_source != NULL ? &last_source->img : NULL;
1438 frame_input.ts_duration = source->ts_end - source->ts_start;
1439 // Save unfiltered source. It is used in av1_get_second_pass_params().
1440 cpi->unfiltered_source = frame_input.source;
1441
1442 *time_stamp = source->ts_start;
1443 *time_end = source->ts_end;
1444 if (source->ts_start < cpi->time_stamps.first_ts_start) {
1445 cpi->time_stamps.first_ts_start = source->ts_start;
1446 cpi->time_stamps.prev_ts_end = source->ts_start;
1447 }
1448
1449 av1_apply_encoding_flags(cpi, source->flags);
1450 *frame_flags = (source->flags & AOM_EFLAG_FORCE_KF) ? FRAMEFLAGS_KEY : 0;
1451
1452 #if CONFIG_FPMT_TEST
1453 if (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) {
1454 if (cpi->ppi->gf_group.frame_parallel_level[cpi->gf_frame_index] > 0) {
1455 cpi->framerate = cpi->temp_framerate;
1456 }
1457 }
1458 #endif // CONFIG_FPMT_TEST
1459
1460 // Shown frames and arf-overlay frames need frame-rate considering
1461 if (frame_params.show_frame)
1462 adjust_frame_rate(cpi, source->ts_start, source->ts_end);
1463
1464 if (!frame_params.show_existing_frame) {
1465 #if !CONFIG_REALTIME_ONLY
1466 if (cpi->film_grain_table) {
1467 cm->cur_frame->film_grain_params_present = aom_film_grain_table_lookup(
1468 cpi->film_grain_table, *time_stamp, *time_end, 0 /* =erase */,
1469 &cm->film_grain_params);
1470 } else {
1471 cm->cur_frame->film_grain_params_present =
1472 cm->seq_params->film_grain_params_present;
1473 }
1474 #endif
1475 // only one operating point supported now
1476 const int64_t pts64 = ticks_to_timebase_units(timestamp_ratio, *time_stamp);
1477 if (pts64 < 0 || pts64 > UINT32_MAX) return AOM_CODEC_ERROR;
1478
1479 cm->frame_presentation_time = (uint32_t)pts64;
1480 }
1481
1482 #if CONFIG_COLLECT_COMPONENT_TIMING
1483 start_timing(cpi, av1_get_one_pass_rt_params_time);
1484 #endif
1485 #if CONFIG_REALTIME_ONLY
1486 av1_get_one_pass_rt_params(cpi, &frame_params.frame_type, &frame_input,
1487 *frame_flags);
1488 if (use_rtc_reference_structure_one_layer(cpi))
1489 av1_set_rtc_reference_structure_one_layer(cpi, cpi->gf_frame_index == 0);
1490 #else
1491 if (use_one_pass_rt_params) {
1492 av1_get_one_pass_rt_params(cpi, &frame_params.frame_type, &frame_input,
1493 *frame_flags);
1494 if (use_rtc_reference_structure_one_layer(cpi))
1495 av1_set_rtc_reference_structure_one_layer(cpi, cpi->gf_frame_index == 0);
1496 }
1497 #endif
1498 #if CONFIG_COLLECT_COMPONENT_TIMING
1499 end_timing(cpi, av1_get_one_pass_rt_params_time);
1500 #endif
1501
1502 FRAME_UPDATE_TYPE frame_update_type =
1503 get_frame_update_type(gf_group, cpi->gf_frame_index);
1504
1505 if (frame_params.show_existing_frame &&
1506 frame_params.frame_type != KEY_FRAME) {
1507 // Force show-existing frames to be INTER, except forward keyframes
1508 frame_params.frame_type = INTER_FRAME;
1509 }
1510
1511 // Per-frame encode speed. In theory this can vary, but things may have
1512 // been written assuming speed-level will not change within a sequence, so
1513 // this parameter should be used with caution.
1514 frame_params.speed = oxcf->speed;
1515
1516 #if !CONFIG_REALTIME_ONLY
1517 // Set forced key frames when necessary. For two-pass encoding / lap mode,
1518 // this is already handled by av1_get_second_pass_params. However when no
1519 // stats are available, we still need to check if the new frame is a keyframe.
1520 // For one pass rt, this is already checked in av1_get_one_pass_rt_params.
1521 if (!use_one_pass_rt_params &&
1522 (is_stat_generation_stage(cpi) || has_no_stats_stage(cpi))) {
1523 // Current frame is coded as a key-frame for any of the following cases:
1524 // 1) First frame of a video
1525 // 2) For all-intra frame encoding
1526 // 3) When a key-frame is forced
1527 const int kf_requested =
1528 (cm->current_frame.frame_number == 0 ||
1529 oxcf->kf_cfg.key_freq_max == 0 || (*frame_flags & FRAMEFLAGS_KEY));
1530 if (kf_requested && frame_update_type != OVERLAY_UPDATE &&
1531 frame_update_type != INTNL_OVERLAY_UPDATE) {
1532 frame_params.frame_type = KEY_FRAME;
1533 } else if (is_stat_generation_stage(cpi)) {
1534 // For stats generation, set the frame type to inter here.
1535 frame_params.frame_type = INTER_FRAME;
1536 }
1537 }
1538 #endif
1539
1540 // Work out some encoding parameters specific to the pass:
1541 if (has_no_stats_stage(cpi) && oxcf->q_cfg.aq_mode == CYCLIC_REFRESH_AQ) {
1542 av1_cyclic_refresh_update_parameters(cpi);
1543 } else if (is_stat_generation_stage(cpi)) {
1544 cpi->td.mb.e_mbd.lossless[0] = is_lossless_requested(&oxcf->rc_cfg);
1545 } else if (is_stat_consumption_stage(cpi)) {
1546 #if CONFIG_MISMATCH_DEBUG
1547 mismatch_move_frame_idx_w();
1548 #endif
1549 #if TXCOEFF_COST_TIMER
1550 cm->txcoeff_cost_timer = 0;
1551 cm->txcoeff_cost_count = 0;
1552 #endif
1553 }
1554
1555 if (!is_stat_generation_stage(cpi))
1556 set_ext_overrides(cm, &frame_params, ext_flags);
1557
1558 // Shown keyframes and S frames refresh all reference buffers
1559 const int force_refresh_all =
1560 ((frame_params.frame_type == KEY_FRAME && frame_params.show_frame) ||
1561 frame_params.frame_type == S_FRAME) &&
1562 !frame_params.show_existing_frame;
1563
1564 av1_configure_buffer_updates(
1565 cpi, &frame_params.refresh_frame, frame_update_type,
1566 gf_group->refbuf_state[cpi->gf_frame_index], force_refresh_all);
1567
1568 if (!is_stat_generation_stage(cpi)) {
1569 const YV12_BUFFER_CONFIG *ref_frame_buf[INTER_REFS_PER_FRAME];
1570
1571 RefFrameMapPair ref_frame_map_pairs[REF_FRAMES];
1572 init_ref_map_pair(cpi, ref_frame_map_pairs);
1573 const int order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
1574 const int cur_frame_disp =
1575 cpi->common.current_frame.frame_number + order_offset;
1576
1577 int get_ref_frames = 0;
1578 #if CONFIG_FPMT_TEST
1579 get_ref_frames =
1580 (cpi->ppi->fpmt_unit_test_cfg == PARALLEL_SIMULATION_ENCODE) ? 1 : 0;
1581 #endif // CONFIG_FPMT_TEST
1582 if (get_ref_frames ||
1583 gf_group->frame_parallel_level[cpi->gf_frame_index] == 0) {
1584 if (!ext_flags->refresh_frame.update_pending) {
1585 av1_get_ref_frames(ref_frame_map_pairs, cur_frame_disp, cpi,
1586 cpi->gf_frame_index, 1, cm->remapped_ref_idx);
1587 } else if (cpi->ppi->rtc_ref.set_ref_frame_config ||
1588 use_rtc_reference_structure_one_layer(cpi)) {
1589 for (unsigned int i = 0; i < INTER_REFS_PER_FRAME; i++)
1590 cm->remapped_ref_idx[i] = cpi->ppi->rtc_ref.ref_idx[i];
1591 }
1592 }
1593
1594 // Get the reference frames
1595 bool has_ref_frames = false;
1596 for (int i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1597 const RefCntBuffer *ref_frame =
1598 get_ref_frame_buf(cm, ref_frame_priority_order[i]);
1599 ref_frame_buf[i] = ref_frame != NULL ? &ref_frame->buf : NULL;
1600 if (ref_frame != NULL) has_ref_frames = true;
1601 }
1602 if (!has_ref_frames && (frame_params.frame_type == INTER_FRAME ||
1603 frame_params.frame_type == S_FRAME)) {
1604 return AOM_CODEC_ERROR;
1605 }
1606
1607 // Work out which reference frame slots may be used.
1608 frame_params.ref_frame_flags =
1609 get_ref_frame_flags(&cpi->sf, is_one_pass_rt_params(cpi), ref_frame_buf,
1610 ext_flags->ref_frame_flags);
1611
1612 // Set primary_ref_frame of non-reference frames as PRIMARY_REF_NONE.
1613 if (cpi->ppi->gf_group.is_frame_non_ref[cpi->gf_frame_index]) {
1614 frame_params.primary_ref_frame = PRIMARY_REF_NONE;
1615 } else {
1616 frame_params.primary_ref_frame =
1617 choose_primary_ref_frame(cpi, &frame_params);
1618 }
1619
1620 frame_params.order_offset = gf_group->arf_src_offset[cpi->gf_frame_index];
1621
1622 // Call av1_get_refresh_frame_flags() if refresh index not available.
1623 if (!cpi->refresh_idx_available) {
1624 frame_params.refresh_frame_flags = av1_get_refresh_frame_flags(
1625 cpi, &frame_params, frame_update_type, cpi->gf_frame_index,
1626 cur_frame_disp, ref_frame_map_pairs);
1627 } else {
1628 assert(cpi->ref_refresh_index != INVALID_IDX);
1629 frame_params.refresh_frame_flags = (1 << cpi->ref_refresh_index);
1630 }
1631
1632 // Make the frames marked as is_frame_non_ref to non-reference frames.
1633 if (gf_group->is_frame_non_ref[cpi->gf_frame_index])
1634 frame_params.refresh_frame_flags = 0;
1635
1636 frame_params.existing_fb_idx_to_show = INVALID_IDX;
1637 // Find the frame buffer to show based on display order.
1638 if (frame_params.show_existing_frame) {
1639 for (int frame = 0; frame < REF_FRAMES; frame++) {
1640 const RefCntBuffer *const buf = cm->ref_frame_map[frame];
1641 if (buf == NULL) continue;
1642 const int frame_order = (int)buf->display_order_hint;
1643 if (frame_order == cur_frame_disp)
1644 frame_params.existing_fb_idx_to_show = frame;
1645 }
1646 }
1647 }
1648
1649 // The way frame_params->remapped_ref_idx is setup is a placeholder.
1650 // Currently, reference buffer assignment is done by update_ref_frame_map()
1651 // which is called by high-level strategy AFTER encoding a frame. It
1652 // modifies cm->remapped_ref_idx. If you want to use an alternative method
1653 // to determine reference buffer assignment, just put your assignments into
1654 // frame_params->remapped_ref_idx here and they will be used when encoding
1655 // this frame. If frame_params->remapped_ref_idx is setup independently of
1656 // cm->remapped_ref_idx then update_ref_frame_map() will have no effect.
1657 memcpy(frame_params.remapped_ref_idx, cm->remapped_ref_idx,
1658 REF_FRAMES * sizeof(*cm->remapped_ref_idx));
1659
1660 cpi->td.mb.rdmult_delta_qindex = cpi->td.mb.delta_qindex = 0;
1661
1662 if (!frame_params.show_existing_frame) {
1663 cm->quant_params.using_qmatrix = oxcf->q_cfg.using_qm;
1664 }
1665
1666 const int is_intra_frame = frame_params.frame_type == KEY_FRAME ||
1667 frame_params.frame_type == INTRA_ONLY_FRAME;
1668 FeatureFlags *const features = &cm->features;
1669 if (!is_stat_generation_stage(cpi) &&
1670 (oxcf->pass == AOM_RC_ONE_PASS || oxcf->pass >= AOM_RC_SECOND_PASS) &&
1671 is_intra_frame) {
1672 av1_set_screen_content_options(cpi, features);
1673 }
1674
1675 #if CONFIG_REALTIME_ONLY
1676 if (av1_encode(cpi, dest, dest_size, &frame_input, &frame_params,
1677 &frame_size) != AOM_CODEC_OK) {
1678 return AOM_CODEC_ERROR;
1679 }
1680 #else
1681 if (has_no_stats_stage(cpi) && oxcf->mode == REALTIME &&
1682 gf_cfg->lag_in_frames == 0) {
1683 if (av1_encode(cpi, dest, dest_size, &frame_input, &frame_params,
1684 &frame_size) != AOM_CODEC_OK) {
1685 return AOM_CODEC_ERROR;
1686 }
1687 } else if (denoise_and_encode(cpi, dest, dest_size, &frame_input,
1688 &frame_params, &frame_size) != AOM_CODEC_OK) {
1689 return AOM_CODEC_ERROR;
1690 }
1691 #endif // CONFIG_REALTIME_ONLY
1692
1693 // This is used in rtc temporal filter case. Use true source in the PSNR
1694 // calculation.
1695 if (is_psnr_calc_enabled(cpi) && cpi->sf.rt_sf.use_rtc_tf) {
1696 assert(cpi->orig_source.buffer_alloc_sz > 0);
1697 cpi->source = &cpi->orig_source;
1698 }
1699
1700 if (!is_stat_generation_stage(cpi)) {
1701 // First pass doesn't modify reference buffer assignment or produce frame
1702 // flags
1703 update_frame_flags(&cpi->common, &cpi->refresh_frame, frame_flags);
1704 set_additional_frame_flags(cm, frame_flags);
1705 }
1706
1707 #if !CONFIG_REALTIME_ONLY
1708 #if TXCOEFF_COST_TIMER
1709 if (!is_stat_generation_stage(cpi)) {
1710 cm->cum_txcoeff_cost_timer += cm->txcoeff_cost_timer;
1711 fprintf(stderr,
1712 "\ntxb coeff cost block number: %ld, frame time: %ld, cum time %ld "
1713 "in us\n",
1714 cm->txcoeff_cost_count, cm->txcoeff_cost_timer,
1715 cm->cum_txcoeff_cost_timer);
1716 }
1717 #endif
1718 #endif // !CONFIG_REALTIME_ONLY
1719
1720 #if CONFIG_TUNE_VMAF
1721 if (!is_stat_generation_stage(cpi) &&
1722 (oxcf->tune_cfg.tuning >= AOM_TUNE_VMAF_WITH_PREPROCESSING &&
1723 oxcf->tune_cfg.tuning <= AOM_TUNE_VMAF_NEG_MAX_GAIN)) {
1724 av1_update_vmaf_curve(cpi);
1725 }
1726 #endif
1727
1728 *size = frame_size;
1729
1730 // Leave a signal for a higher level caller about if this frame is droppable
1731 if (*size > 0) {
1732 cpi->droppable =
1733 is_frame_droppable(&cpi->ppi->rtc_ref, &ext_flags->refresh_frame);
1734 }
1735
1736 // For SVC, or when frame-dropper is enabled:
1737 // keep track of the (unscaled) source corresponding to the refresh of LAST
1738 // reference (base temporal layer - TL0). Copy only for the
1739 // top spatial enhancement layer so all spatial layers of the next
1740 // superframe have last_source to be aligned with previous TL0 superframe.
1741 // Avoid cases where resolution changes for unscaled source (top spatial
1742 // layer). Only needs to be done for frame that are encoded (size > 0).
1743 if (*size > 0 &&
1744 (cpi->ppi->use_svc || cpi->oxcf.rc_cfg.drop_frames_water_mark > 0) &&
1745 cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1 &&
1746 cpi->svc.temporal_layer_id == 0 &&
1747 cpi->unscaled_source->y_width == cpi->svc.source_last_TL0.y_width &&
1748 cpi->unscaled_source->y_height == cpi->svc.source_last_TL0.y_height) {
1749 aom_yv12_copy_y(cpi->unscaled_source, &cpi->svc.source_last_TL0, 1);
1750 aom_yv12_copy_u(cpi->unscaled_source, &cpi->svc.source_last_TL0, 1);
1751 aom_yv12_copy_v(cpi->unscaled_source, &cpi->svc.source_last_TL0, 1);
1752 }
1753
1754 return AOM_CODEC_OK;
1755 }
1756