1 /*
2 * Copyright (c) 2020, Alliance for Open Media. All rights reserved.
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
12 #ifndef AOM_AV1_ENCODER_ENCODER_ALLOC_H_
13 #define AOM_AV1_ENCODER_ENCODER_ALLOC_H_
14
15 #include "av1/encoder/block.h"
16 #include "av1/encoder/encodeframe_utils.h"
17 #include "av1/encoder/encoder.h"
18 #include "av1/encoder/encodetxb.h"
19 #include "av1/encoder/ethread.h"
20 #include "av1/encoder/global_motion_facade.h"
21 #include "av1/encoder/intra_mode_search_utils.h"
22 #include "av1/encoder/pickcdef.h"
23
24 #ifdef __cplusplus
25 extern "C" {
26 #endif
27
dealloc_context_buffers_ext(MBMIExtFrameBufferInfo * mbmi_ext_info)28 static inline void dealloc_context_buffers_ext(
29 MBMIExtFrameBufferInfo *mbmi_ext_info) {
30 aom_free(mbmi_ext_info->frame_base);
31 mbmi_ext_info->frame_base = NULL;
32 mbmi_ext_info->alloc_size = 0;
33 }
34
alloc_context_buffers_ext(AV1_COMMON * cm,MBMIExtFrameBufferInfo * mbmi_ext_info)35 static inline void alloc_context_buffers_ext(
36 AV1_COMMON *cm, MBMIExtFrameBufferInfo *mbmi_ext_info) {
37 const CommonModeInfoParams *const mi_params = &cm->mi_params;
38
39 const int mi_alloc_size_1d = mi_size_wide[mi_params->mi_alloc_bsize];
40 const int mi_alloc_rows =
41 (mi_params->mi_rows + mi_alloc_size_1d - 1) / mi_alloc_size_1d;
42 const int mi_alloc_cols =
43 (mi_params->mi_cols + mi_alloc_size_1d - 1) / mi_alloc_size_1d;
44 const int new_ext_mi_size = mi_alloc_rows * mi_alloc_cols;
45
46 if (new_ext_mi_size > mbmi_ext_info->alloc_size) {
47 dealloc_context_buffers_ext(mbmi_ext_info);
48 CHECK_MEM_ERROR(
49 cm, mbmi_ext_info->frame_base,
50 aom_malloc(new_ext_mi_size * sizeof(*mbmi_ext_info->frame_base)));
51 mbmi_ext_info->alloc_size = new_ext_mi_size;
52 }
53 // The stride needs to be updated regardless of whether new allocation
54 // happened or not.
55 mbmi_ext_info->stride = mi_alloc_cols;
56 }
57
alloc_compressor_data(AV1_COMP * cpi)58 static inline void alloc_compressor_data(AV1_COMP *cpi) {
59 AV1_COMMON *cm = &cpi->common;
60 CommonModeInfoParams *const mi_params = &cm->mi_params;
61
62 // Setup mi_params
63 mi_params->set_mb_mi(mi_params, cm->width, cm->height,
64 cpi->sf.part_sf.default_min_partition_size);
65
66 if (!is_stat_generation_stage(cpi)) av1_alloc_txb_buf(cpi);
67
68 aom_free(cpi->td.mv_costs_alloc);
69 cpi->td.mv_costs_alloc = NULL;
70 // Avoid the memory allocation of 'mv_costs_alloc' for allintra encoding
71 // mode.
72 if (cpi->oxcf.kf_cfg.key_freq_max != 0) {
73 CHECK_MEM_ERROR(cm, cpi->td.mv_costs_alloc,
74 (MvCosts *)aom_calloc(1, sizeof(*cpi->td.mv_costs_alloc)));
75 cpi->td.mb.mv_costs = cpi->td.mv_costs_alloc;
76 }
77
78 av1_setup_shared_coeff_buffer(cm->seq_params, &cpi->td.shared_coeff_buf,
79 cm->error);
80 if (av1_setup_sms_tree(cpi, &cpi->td)) {
81 aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
82 "Failed to allocate SMS tree");
83 }
84 cpi->td.firstpass_ctx =
85 av1_alloc_pmc(cpi, BLOCK_16X16, &cpi->td.shared_coeff_buf);
86 if (!cpi->td.firstpass_ctx)
87 aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
88 "Failed to allocate PICK_MODE_CONTEXT");
89 }
90
91 // Allocate mbmi buffers which are used to store mode information at block
92 // level.
alloc_mb_mode_info_buffers(AV1_COMP * const cpi)93 static inline void alloc_mb_mode_info_buffers(AV1_COMP *const cpi) {
94 AV1_COMMON *const cm = &cpi->common;
95 if (av1_alloc_context_buffers(cm, cm->width, cm->height,
96 cpi->sf.part_sf.default_min_partition_size)) {
97 aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
98 "Failed to allocate context buffers");
99 }
100
101 if (!is_stat_generation_stage(cpi))
102 alloc_context_buffers_ext(cm, &cpi->mbmi_ext_info);
103 }
104
realloc_segmentation_maps(AV1_COMP * cpi)105 static inline void realloc_segmentation_maps(AV1_COMP *cpi) {
106 AV1_COMMON *const cm = &cpi->common;
107 CommonModeInfoParams *const mi_params = &cm->mi_params;
108
109 // Create the encoder segmentation map and set all entries to 0
110 aom_free(cpi->enc_seg.map);
111 CHECK_MEM_ERROR(cm, cpi->enc_seg.map,
112 aom_calloc(mi_params->mi_rows * mi_params->mi_cols, 1));
113
114 // Create a map used for cyclic background refresh.
115 if (cpi->cyclic_refresh) av1_cyclic_refresh_free(cpi->cyclic_refresh);
116 CHECK_MEM_ERROR(
117 cm, cpi->cyclic_refresh,
118 av1_cyclic_refresh_alloc(mi_params->mi_rows, mi_params->mi_cols));
119
120 // Create a map used to mark inactive areas.
121 aom_free(cpi->active_map.map);
122 CHECK_MEM_ERROR(cm, cpi->active_map.map,
123 aom_calloc(mi_params->mi_rows * mi_params->mi_cols, 1));
124 }
125
alloc_obmc_buffers(OBMCBuffer * obmc_buffer,struct aom_internal_error_info * error)126 static inline void alloc_obmc_buffers(OBMCBuffer *obmc_buffer,
127 struct aom_internal_error_info *error) {
128 AOM_CHECK_MEM_ERROR(
129 error, obmc_buffer->wsrc,
130 (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->wsrc)));
131 AOM_CHECK_MEM_ERROR(
132 error, obmc_buffer->mask,
133 (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->mask)));
134 AOM_CHECK_MEM_ERROR(
135 error, obmc_buffer->above_pred,
136 (uint8_t *)aom_memalign(
137 16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->above_pred)));
138 AOM_CHECK_MEM_ERROR(
139 error, obmc_buffer->left_pred,
140 (uint8_t *)aom_memalign(
141 16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->left_pred)));
142 }
143
release_obmc_buffers(OBMCBuffer * obmc_buffer)144 static inline void release_obmc_buffers(OBMCBuffer *obmc_buffer) {
145 aom_free(obmc_buffer->mask);
146 aom_free(obmc_buffer->above_pred);
147 aom_free(obmc_buffer->left_pred);
148 aom_free(obmc_buffer->wsrc);
149
150 obmc_buffer->mask = NULL;
151 obmc_buffer->above_pred = NULL;
152 obmc_buffer->left_pred = NULL;
153 obmc_buffer->wsrc = NULL;
154 }
155
alloc_compound_type_rd_buffers(struct aom_internal_error_info * error,CompoundTypeRdBuffers * const bufs)156 static inline void alloc_compound_type_rd_buffers(
157 struct aom_internal_error_info *error, CompoundTypeRdBuffers *const bufs) {
158 AOM_CHECK_MEM_ERROR(
159 error, bufs->pred0,
160 (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred0)));
161 AOM_CHECK_MEM_ERROR(
162 error, bufs->pred1,
163 (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred1)));
164 AOM_CHECK_MEM_ERROR(
165 error, bufs->residual1,
166 (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->residual1)));
167 AOM_CHECK_MEM_ERROR(
168 error, bufs->diff10,
169 (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->diff10)));
170 AOM_CHECK_MEM_ERROR(error, bufs->tmp_best_mask_buf,
171 (uint8_t *)aom_malloc(2 * MAX_SB_SQUARE *
172 sizeof(*bufs->tmp_best_mask_buf)));
173 }
174
release_compound_type_rd_buffers(CompoundTypeRdBuffers * const bufs)175 static inline void release_compound_type_rd_buffers(
176 CompoundTypeRdBuffers *const bufs) {
177 aom_free(bufs->pred0);
178 aom_free(bufs->pred1);
179 aom_free(bufs->residual1);
180 aom_free(bufs->diff10);
181 aom_free(bufs->tmp_best_mask_buf);
182 av1_zero(*bufs); // Set all pointers to NULL for safety.
183 }
184
dealloc_compressor_data(AV1_COMP * cpi)185 static inline void dealloc_compressor_data(AV1_COMP *cpi) {
186 AV1_COMMON *const cm = &cpi->common;
187 TokenInfo *token_info = &cpi->token_info;
188 AV1EncRowMultiThreadInfo *const enc_row_mt = &cpi->mt_info.enc_row_mt;
189 const int num_planes = av1_num_planes(cm);
190 dealloc_context_buffers_ext(&cpi->mbmi_ext_info);
191
192 aom_free(cpi->tile_data);
193 cpi->tile_data = NULL;
194 cpi->allocated_tiles = 0;
195 enc_row_mt->allocated_tile_cols = 0;
196 enc_row_mt->allocated_tile_rows = 0;
197
198 // Delete sementation map
199 aom_free(cpi->enc_seg.map);
200 cpi->enc_seg.map = NULL;
201
202 av1_cyclic_refresh_free(cpi->cyclic_refresh);
203 cpi->cyclic_refresh = NULL;
204
205 aom_free(cpi->active_map.map);
206 cpi->active_map.map = NULL;
207
208 aom_free(cpi->ssim_rdmult_scaling_factors);
209 cpi->ssim_rdmult_scaling_factors = NULL;
210
211 aom_free(cpi->tpl_rdmult_scaling_factors);
212 cpi->tpl_rdmult_scaling_factors = NULL;
213
214 #if CONFIG_TUNE_VMAF
215 aom_free(cpi->vmaf_info.rdmult_scaling_factors);
216 cpi->vmaf_info.rdmult_scaling_factors = NULL;
217 aom_close_vmaf_model(cpi->vmaf_info.vmaf_model);
218 #endif
219
220 #if CONFIG_TUNE_BUTTERAUGLI
221 aom_free(cpi->butteraugli_info.rdmult_scaling_factors);
222 cpi->butteraugli_info.rdmult_scaling_factors = NULL;
223 aom_free_frame_buffer(&cpi->butteraugli_info.source);
224 aom_free_frame_buffer(&cpi->butteraugli_info.resized_source);
225 #endif
226
227 #if CONFIG_SALIENCY_MAP
228 aom_free(cpi->saliency_map);
229 aom_free(cpi->sm_scaling_factor);
230 #endif
231
232 release_obmc_buffers(&cpi->td.mb.obmc_buffer);
233
234 aom_free(cpi->td.mv_costs_alloc);
235 cpi->td.mv_costs_alloc = NULL;
236 aom_free(cpi->td.dv_costs_alloc);
237 cpi->td.dv_costs_alloc = NULL;
238
239 aom_free(cpi->td.mb.sb_stats_cache);
240 cpi->td.mb.sb_stats_cache = NULL;
241
242 aom_free(cpi->td.mb.sb_fp_stats);
243 cpi->td.mb.sb_fp_stats = NULL;
244
245 #if CONFIG_PARTITION_SEARCH_ORDER
246 aom_free(cpi->td.mb.rdcost);
247 cpi->td.mb.rdcost = NULL;
248 #endif
249
250 av1_free_pc_tree_recursive(cpi->td.pc_root, num_planes, 0, 0,
251 cpi->sf.part_sf.partition_search_type);
252 cpi->td.pc_root = NULL;
253
254 for (int i = 0; i < 2; i++)
255 for (int j = 0; j < 2; j++) {
256 aom_free(cpi->td.mb.intrabc_hash_info.hash_value_buffer[i][j]);
257 cpi->td.mb.intrabc_hash_info.hash_value_buffer[i][j] = NULL;
258 }
259
260 av1_hash_table_destroy(&cpi->td.mb.intrabc_hash_info.intrabc_hash_table);
261
262 aom_free(cm->tpl_mvs);
263 cm->tpl_mvs = NULL;
264
265 aom_free(cpi->td.pixel_gradient_info);
266 cpi->td.pixel_gradient_info = NULL;
267
268 aom_free(cpi->td.src_var_info_of_4x4_sub_blocks);
269 cpi->td.src_var_info_of_4x4_sub_blocks = NULL;
270
271 aom_free(cpi->td.vt64x64);
272 cpi->td.vt64x64 = NULL;
273
274 av1_free_pmc(cpi->td.firstpass_ctx, num_planes);
275 cpi->td.firstpass_ctx = NULL;
276
277 const int is_highbitdepth = cpi->tf_ctx.is_highbitdepth;
278 // This call ensures that the buffers allocated by tf_alloc_and_reset_data()
279 // in av1_temporal_filter() for single-threaded encode are freed in case an
280 // error is encountered during temporal filtering (due to early termination
281 // tf_dealloc_data() in av1_temporal_filter() would not be invoked).
282 tf_dealloc_data(&cpi->td.tf_data, is_highbitdepth);
283
284 // This call ensures that tpl_tmp_buffers for single-threaded encode are freed
285 // in case of an error during tpl.
286 tpl_dealloc_temp_buffers(&cpi->td.tpl_tmp_buffers);
287
288 // This call ensures that the global motion (gm) data buffers for
289 // single-threaded encode are freed in case of an error during gm.
290 gm_dealloc_data(&cpi->td.gm_data);
291
292 // This call ensures that CDEF search context buffers are deallocated in case
293 // of an error during cdef search.
294 av1_cdef_dealloc_data(cpi->cdef_search_ctx);
295 aom_free(cpi->cdef_search_ctx);
296 cpi->cdef_search_ctx = NULL;
297
298 av1_dealloc_mb_data(&cpi->td.mb, num_planes);
299
300 av1_dealloc_mb_wiener_var_pred_buf(&cpi->td);
301
302 av1_free_txb_buf(cpi);
303 av1_free_context_buffers(cm);
304
305 aom_free_frame_buffer(&cpi->last_frame_uf);
306 #if !CONFIG_REALTIME_ONLY
307 av1_free_restoration_buffers(cm);
308 av1_free_firstpass_data(&cpi->firstpass_data);
309 #endif
310
311 if (!is_stat_generation_stage(cpi)) {
312 av1_free_cdef_buffers(cm, &cpi->ppi->p_mt_info.cdef_worker,
313 &cpi->mt_info.cdef_sync);
314 }
315
316 for (int plane = 0; plane < num_planes; plane++) {
317 aom_free(cpi->pick_lr_ctxt.rusi[plane]);
318 cpi->pick_lr_ctxt.rusi[plane] = NULL;
319 }
320 aom_free(cpi->pick_lr_ctxt.dgd_avg);
321 cpi->pick_lr_ctxt.dgd_avg = NULL;
322
323 aom_free_frame_buffer(&cpi->trial_frame_rst);
324 aom_free_frame_buffer(&cpi->scaled_source);
325 aom_free_frame_buffer(&cpi->scaled_last_source);
326 aom_free_frame_buffer(&cpi->orig_source);
327 aom_free_frame_buffer(&cpi->svc.source_last_TL0);
328
329 free_token_info(token_info);
330
331 av1_free_shared_coeff_buffer(&cpi->td.shared_coeff_buf);
332 av1_free_sms_tree(&cpi->td);
333
334 aom_free(cpi->td.mb.palette_buffer);
335 release_compound_type_rd_buffers(&cpi->td.mb.comp_rd_buffer);
336 aom_free(cpi->td.mb.tmp_conv_dst);
337 for (int j = 0; j < 2; ++j) {
338 aom_free(cpi->td.mb.tmp_pred_bufs[j]);
339 }
340
341 #if CONFIG_DENOISE && !CONFIG_REALTIME_ONLY
342 if (cpi->denoise_and_model) {
343 aom_denoise_and_model_free(cpi->denoise_and_model);
344 cpi->denoise_and_model = NULL;
345 }
346 #endif
347 #if !CONFIG_REALTIME_ONLY
348 if (cpi->film_grain_table) {
349 aom_film_grain_table_free(cpi->film_grain_table);
350 aom_free(cpi->film_grain_table);
351 cpi->film_grain_table = NULL;
352 }
353 #endif
354
355 if (cpi->ppi->use_svc) av1_free_svc_cyclic_refresh(cpi);
356 aom_free(cpi->svc.layer_context);
357 cpi->svc.layer_context = NULL;
358
359 aom_free(cpi->consec_zero_mv);
360 cpi->consec_zero_mv = NULL;
361 cpi->consec_zero_mv_alloc_size = 0;
362
363 aom_free(cpi->src_sad_blk_64x64);
364 cpi->src_sad_blk_64x64 = NULL;
365
366 aom_free(cpi->mb_weber_stats);
367 cpi->mb_weber_stats = NULL;
368
369 if (cpi->oxcf.enable_rate_guide_deltaq) {
370 aom_free(cpi->prep_rate_estimates);
371 cpi->prep_rate_estimates = NULL;
372
373 aom_free(cpi->ext_rate_distribution);
374 cpi->ext_rate_distribution = NULL;
375 }
376
377 aom_free(cpi->mb_delta_q);
378 cpi->mb_delta_q = NULL;
379 }
380
allocate_gradient_info_for_hog(AV1_COMP * cpi)381 static inline void allocate_gradient_info_for_hog(AV1_COMP *cpi) {
382 if (!is_gradient_caching_for_hog_enabled(cpi)) return;
383
384 PixelLevelGradientInfo *pixel_gradient_info = cpi->td.pixel_gradient_info;
385 if (!pixel_gradient_info) {
386 const AV1_COMMON *const cm = &cpi->common;
387 const int plane_types = PLANE_TYPES >> cm->seq_params->monochrome;
388 CHECK_MEM_ERROR(
389 cm, pixel_gradient_info,
390 aom_malloc(sizeof(*pixel_gradient_info) * plane_types * MAX_SB_SQUARE));
391 cpi->td.pixel_gradient_info = pixel_gradient_info;
392 }
393
394 cpi->td.mb.pixel_gradient_info = pixel_gradient_info;
395 }
396
allocate_src_var_of_4x4_sub_block_buf(AV1_COMP * cpi)397 static inline void allocate_src_var_of_4x4_sub_block_buf(AV1_COMP *cpi) {
398 if (!is_src_var_for_4x4_sub_blocks_caching_enabled(cpi)) return;
399
400 Block4x4VarInfo *source_variance_info =
401 cpi->td.src_var_info_of_4x4_sub_blocks;
402 if (!source_variance_info) {
403 const AV1_COMMON *const cm = &cpi->common;
404 const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
405 const int mi_count_in_sb = mi_size_wide[sb_size] * mi_size_high[sb_size];
406 CHECK_MEM_ERROR(cm, source_variance_info,
407 aom_malloc(sizeof(*source_variance_info) * mi_count_in_sb));
408 cpi->td.src_var_info_of_4x4_sub_blocks = source_variance_info;
409 }
410
411 cpi->td.mb.src_var_info_of_4x4_sub_blocks = source_variance_info;
412 }
413
variance_partition_alloc(AV1_COMP * cpi)414 static inline void variance_partition_alloc(AV1_COMP *cpi) {
415 AV1_COMMON *const cm = &cpi->common;
416 const int num_64x64_blocks = (cm->seq_params->sb_size == BLOCK_64X64) ? 1 : 4;
417 if (cpi->td.vt64x64) {
418 if (num_64x64_blocks != cpi->td.num_64x64_blocks) {
419 aom_free(cpi->td.vt64x64);
420 cpi->td.vt64x64 = NULL;
421 }
422 }
423 if (!cpi->td.vt64x64) {
424 CHECK_MEM_ERROR(cm, cpi->td.vt64x64,
425 aom_malloc(sizeof(*cpi->td.vt64x64) * num_64x64_blocks));
426 cpi->td.num_64x64_blocks = num_64x64_blocks;
427 }
428 }
429
realloc_and_scale_source(AV1_COMP * cpi,int scaled_width,int scaled_height)430 static inline YV12_BUFFER_CONFIG *realloc_and_scale_source(AV1_COMP *cpi,
431 int scaled_width,
432 int scaled_height) {
433 AV1_COMMON *cm = &cpi->common;
434 const int num_planes = av1_num_planes(cm);
435
436 if (scaled_width == cpi->unscaled_source->y_crop_width &&
437 scaled_height == cpi->unscaled_source->y_crop_height) {
438 return cpi->unscaled_source;
439 }
440
441 if (aom_realloc_frame_buffer(
442 &cpi->scaled_source, scaled_width, scaled_height,
443 cm->seq_params->subsampling_x, cm->seq_params->subsampling_y,
444 cm->seq_params->use_highbitdepth, AOM_BORDER_IN_PIXELS,
445 cm->features.byte_alignment, NULL, NULL, NULL, cpi->alloc_pyramid, 0))
446 aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
447 "Failed to reallocate scaled source buffer");
448 assert(cpi->scaled_source.y_crop_width == scaled_width);
449 assert(cpi->scaled_source.y_crop_height == scaled_height);
450 if (!av1_resize_and_extend_frame_nonnormative(
451 cpi->unscaled_source, &cpi->scaled_source,
452 (int)cm->seq_params->bit_depth, num_planes))
453 aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
454 "Failed to reallocate buffers during resize");
455 return &cpi->scaled_source;
456 }
457
458 // Deallocate allocated thread_data.
free_thread_data(AV1_PRIMARY * ppi)459 static inline void free_thread_data(AV1_PRIMARY *ppi) {
460 PrimaryMultiThreadInfo *const p_mt_info = &ppi->p_mt_info;
461 const int num_tf_workers =
462 AOMMIN(p_mt_info->num_mod_workers[MOD_TF], p_mt_info->num_workers);
463 const int num_tpl_workers =
464 AOMMIN(p_mt_info->num_mod_workers[MOD_TPL], p_mt_info->num_workers);
465 const int is_highbitdepth = ppi->seq_params.use_highbitdepth;
466 const int num_planes = ppi->seq_params.monochrome ? 1 : MAX_MB_PLANE;
467 for (int t = 1; t < p_mt_info->num_workers; ++t) {
468 EncWorkerData *const thread_data = &p_mt_info->tile_thr_data[t];
469 thread_data->td = thread_data->original_td;
470 ThreadData *const td = thread_data->td;
471 if (!td) continue;
472 aom_free(td->tctx);
473 aom_free(td->palette_buffer);
474 aom_free(td->tmp_conv_dst);
475 release_compound_type_rd_buffers(&td->comp_rd_buffer);
476 for (int j = 0; j < 2; ++j) {
477 aom_free(td->tmp_pred_bufs[j]);
478 }
479 aom_free(td->pixel_gradient_info);
480 aom_free(td->src_var_info_of_4x4_sub_blocks);
481 release_obmc_buffers(&td->obmc_buffer);
482 aom_free(td->vt64x64);
483
484 for (int x = 0; x < 2; x++) {
485 for (int y = 0; y < 2; y++) {
486 aom_free(td->hash_value_buffer[x][y]);
487 td->hash_value_buffer[x][y] = NULL;
488 }
489 }
490 aom_free(td->mv_costs_alloc);
491 td->mv_costs_alloc = NULL;
492 aom_free(td->dv_costs_alloc);
493 td->dv_costs_alloc = NULL;
494 aom_free(td->counts);
495 av1_free_pmc(td->firstpass_ctx, num_planes);
496 td->firstpass_ctx = NULL;
497 av1_free_shared_coeff_buffer(&td->shared_coeff_buf);
498 av1_free_sms_tree(td);
499 // This call ensures that the buffers allocated by tf_alloc_and_reset_data()
500 // in prepare_tf_workers() for MT encode are freed in case an error is
501 // encountered during temporal filtering (due to early termination
502 // tf_dealloc_thread_data() in av1_tf_do_filtering_mt() would not be
503 // invoked).
504 if (t < num_tf_workers) tf_dealloc_data(&td->tf_data, is_highbitdepth);
505 // This call ensures that tpl_tmp_buffers for MT encode are freed in case of
506 // an error during tpl.
507 if (t < num_tpl_workers) tpl_dealloc_temp_buffers(&td->tpl_tmp_buffers);
508 // This call ensures that the buffers in gm_data for MT encode are freed in
509 // case of an error during gm.
510 gm_dealloc_data(&td->gm_data);
511 av1_dealloc_mb_data(&td->mb, num_planes);
512 aom_free(td->mb.sb_stats_cache);
513 td->mb.sb_stats_cache = NULL;
514 aom_free(td->mb.sb_fp_stats);
515 td->mb.sb_fp_stats = NULL;
516 #if CONFIG_PARTITION_SEARCH_ORDER
517 aom_free(td->mb.rdcost);
518 td->mb.rdcost = NULL;
519 #endif
520 av1_free_pc_tree_recursive(td->pc_root, num_planes, 0, 0, SEARCH_PARTITION);
521 td->pc_root = NULL;
522 av1_dealloc_mb_wiener_var_pred_buf(td);
523 aom_free(td);
524 thread_data->td = NULL;
525 thread_data->original_td = NULL;
526 }
527 }
528
529 #ifdef __cplusplus
530 } // extern "C"
531 #endif
532
533 #endif // AOM_AV1_ENCODER_ENCODER_ALLOC_H_
534