tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

encoder_alloc.h (19028B)


      1 /*
      2 * Copyright (c) 2020, Alliance for Open Media. All rights reserved.
      3 *
      4 * This source code is subject to the terms of the BSD 2 Clause License and
      5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
      6 * was not distributed with this source code in the LICENSE file, you can
      7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
      8 * Media Patent License 1.0 was not distributed with this source code in the
      9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
     10 */
     11 
     12 #ifndef AOM_AV1_ENCODER_ENCODER_ALLOC_H_
     13 #define AOM_AV1_ENCODER_ENCODER_ALLOC_H_
     14 
     15 #include "av1/encoder/block.h"
     16 #include "av1/encoder/encodeframe_utils.h"
     17 #include "av1/encoder/encoder.h"
     18 #include "av1/encoder/encodetxb.h"
     19 #include "av1/encoder/ethread.h"
     20 #include "av1/encoder/global_motion_facade.h"
     21 #include "av1/encoder/intra_mode_search_utils.h"
     22 #include "av1/encoder/pickcdef.h"
     23 
     24 #ifdef __cplusplus
     25 extern "C" {
     26 #endif
     27 
     28 static inline void dealloc_context_buffers_ext(
     29    MBMIExtFrameBufferInfo *mbmi_ext_info) {
     30  aom_free(mbmi_ext_info->frame_base);
     31  mbmi_ext_info->frame_base = NULL;
     32  mbmi_ext_info->alloc_size = 0;
     33 }
     34 
     35 static inline void alloc_context_buffers_ext(
     36    AV1_COMMON *cm, MBMIExtFrameBufferInfo *mbmi_ext_info) {
     37  const CommonModeInfoParams *const mi_params = &cm->mi_params;
     38 
     39  const int mi_alloc_size_1d = mi_size_wide[mi_params->mi_alloc_bsize];
     40  const int mi_alloc_rows =
     41      (mi_params->mi_rows + mi_alloc_size_1d - 1) / mi_alloc_size_1d;
     42  const int mi_alloc_cols =
     43      (mi_params->mi_cols + mi_alloc_size_1d - 1) / mi_alloc_size_1d;
     44  const int new_ext_mi_size = mi_alloc_rows * mi_alloc_cols;
     45 
     46  if (new_ext_mi_size > mbmi_ext_info->alloc_size) {
     47    dealloc_context_buffers_ext(mbmi_ext_info);
     48    CHECK_MEM_ERROR(
     49        cm, mbmi_ext_info->frame_base,
     50        aom_malloc(new_ext_mi_size * sizeof(*mbmi_ext_info->frame_base)));
     51    mbmi_ext_info->alloc_size = new_ext_mi_size;
     52  }
     53  // The stride needs to be updated regardless of whether new allocation
     54  // happened or not.
     55  mbmi_ext_info->stride = mi_alloc_cols;
     56 }
     57 
     58 static inline void alloc_compressor_data(AV1_COMP *cpi) {
     59  AV1_COMMON *cm = &cpi->common;
     60  CommonModeInfoParams *const mi_params = &cm->mi_params;
     61 
     62  // Setup mi_params
     63  mi_params->set_mb_mi(mi_params, cm->width, cm->height,
     64                       cpi->sf.part_sf.default_min_partition_size);
     65 
     66  if (!is_stat_generation_stage(cpi)) av1_alloc_txb_buf(cpi);
     67 
     68  aom_free(cpi->td.mv_costs_alloc);
     69  cpi->td.mv_costs_alloc = NULL;
     70  // Avoid the memory allocation of 'mv_costs_alloc' for allintra encoding
     71  // mode.
     72  if (cpi->oxcf.kf_cfg.key_freq_max != 0) {
     73    CHECK_MEM_ERROR(cm, cpi->td.mv_costs_alloc,
     74                    (MvCosts *)aom_calloc(1, sizeof(*cpi->td.mv_costs_alloc)));
     75    cpi->td.mb.mv_costs = cpi->td.mv_costs_alloc;
     76  }
     77 
     78  av1_setup_shared_coeff_buffer(cm->seq_params, &cpi->td.shared_coeff_buf,
     79                                cm->error);
     80  if (av1_setup_sms_tree(cpi, &cpi->td)) {
     81    aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
     82                       "Failed to allocate SMS tree");
     83  }
     84  cpi->td.firstpass_ctx =
     85      av1_alloc_pmc(cpi, BLOCK_16X16, &cpi->td.shared_coeff_buf);
     86  if (!cpi->td.firstpass_ctx)
     87    aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
     88                       "Failed to allocate PICK_MODE_CONTEXT");
     89 }
     90 
     91 // Allocate mbmi buffers which are used to store mode information at block
     92 // level.
     93 static inline void alloc_mb_mode_info_buffers(AV1_COMP *const cpi) {
     94  AV1_COMMON *const cm = &cpi->common;
     95  if (av1_alloc_context_buffers(cm, cm->width, cm->height,
     96                                cpi->sf.part_sf.default_min_partition_size)) {
     97    aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
     98                       "Failed to allocate context buffers");
     99  }
    100 
    101  if (!is_stat_generation_stage(cpi))
    102    alloc_context_buffers_ext(cm, &cpi->mbmi_ext_info);
    103 }
    104 
    105 static inline void realloc_segmentation_maps(AV1_COMP *cpi) {
    106  AV1_COMMON *const cm = &cpi->common;
    107  CommonModeInfoParams *const mi_params = &cm->mi_params;
    108 
    109  // Create the encoder segmentation map and set all entries to 0
    110  aom_free(cpi->enc_seg.map);
    111  CHECK_MEM_ERROR(cm, cpi->enc_seg.map,
    112                  aom_calloc(mi_params->mi_rows * mi_params->mi_cols, 1));
    113 
    114  // Create a map used for cyclic background refresh.
    115  if (cpi->cyclic_refresh) av1_cyclic_refresh_free(cpi->cyclic_refresh);
    116  CHECK_MEM_ERROR(
    117      cm, cpi->cyclic_refresh,
    118      av1_cyclic_refresh_alloc(mi_params->mi_rows, mi_params->mi_cols));
    119 
    120  // Create a map used to mark inactive areas.
    121  aom_free(cpi->active_map.map);
    122  CHECK_MEM_ERROR(cm, cpi->active_map.map,
    123                  aom_calloc(mi_params->mi_rows * mi_params->mi_cols, 1));
    124 }
    125 
    126 static inline void alloc_obmc_buffers(OBMCBuffer *obmc_buffer,
    127                                      struct aom_internal_error_info *error) {
    128  AOM_CHECK_MEM_ERROR(
    129      error, obmc_buffer->wsrc,
    130      (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->wsrc)));
    131  AOM_CHECK_MEM_ERROR(
    132      error, obmc_buffer->mask,
    133      (int32_t *)aom_memalign(16, MAX_SB_SQUARE * sizeof(*obmc_buffer->mask)));
    134  AOM_CHECK_MEM_ERROR(
    135      error, obmc_buffer->above_pred,
    136      (uint8_t *)aom_memalign(
    137          16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->above_pred)));
    138  AOM_CHECK_MEM_ERROR(
    139      error, obmc_buffer->left_pred,
    140      (uint8_t *)aom_memalign(
    141          16, MAX_MB_PLANE * MAX_SB_SQUARE * sizeof(*obmc_buffer->left_pred)));
    142 }
    143 
    144 static inline void release_obmc_buffers(OBMCBuffer *obmc_buffer) {
    145  aom_free(obmc_buffer->mask);
    146  aom_free(obmc_buffer->above_pred);
    147  aom_free(obmc_buffer->left_pred);
    148  aom_free(obmc_buffer->wsrc);
    149 
    150  obmc_buffer->mask = NULL;
    151  obmc_buffer->above_pred = NULL;
    152  obmc_buffer->left_pred = NULL;
    153  obmc_buffer->wsrc = NULL;
    154 }
    155 
    156 static inline void alloc_compound_type_rd_buffers(
    157    struct aom_internal_error_info *error, CompoundTypeRdBuffers *const bufs) {
    158  AOM_CHECK_MEM_ERROR(
    159      error, bufs->pred0,
    160      (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred0)));
    161  AOM_CHECK_MEM_ERROR(
    162      error, bufs->pred1,
    163      (uint8_t *)aom_memalign(16, 2 * MAX_SB_SQUARE * sizeof(*bufs->pred1)));
    164  AOM_CHECK_MEM_ERROR(
    165      error, bufs->residual1,
    166      (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->residual1)));
    167  AOM_CHECK_MEM_ERROR(
    168      error, bufs->diff10,
    169      (int16_t *)aom_memalign(32, MAX_SB_SQUARE * sizeof(*bufs->diff10)));
    170  AOM_CHECK_MEM_ERROR(error, bufs->tmp_best_mask_buf,
    171                      (uint8_t *)aom_malloc(2 * MAX_SB_SQUARE *
    172                                            sizeof(*bufs->tmp_best_mask_buf)));
    173 }
    174 
    175 static inline void release_compound_type_rd_buffers(
    176    CompoundTypeRdBuffers *const bufs) {
    177  aom_free(bufs->pred0);
    178  aom_free(bufs->pred1);
    179  aom_free(bufs->residual1);
    180  aom_free(bufs->diff10);
    181  aom_free(bufs->tmp_best_mask_buf);
    182  av1_zero(*bufs);  // Set all pointers to NULL for safety.
    183 }
    184 
    185 static inline void dealloc_compressor_data(AV1_COMP *cpi) {
    186  AV1_COMMON *const cm = &cpi->common;
    187  TokenInfo *token_info = &cpi->token_info;
    188  AV1EncRowMultiThreadInfo *const enc_row_mt = &cpi->mt_info.enc_row_mt;
    189  const int num_planes = av1_num_planes(cm);
    190  dealloc_context_buffers_ext(&cpi->mbmi_ext_info);
    191 
    192  aom_free(cpi->tile_data);
    193  cpi->tile_data = NULL;
    194  cpi->allocated_tiles = 0;
    195  enc_row_mt->allocated_tile_cols = 0;
    196  enc_row_mt->allocated_tile_rows = 0;
    197 
    198  // Delete sementation map
    199  aom_free(cpi->enc_seg.map);
    200  cpi->enc_seg.map = NULL;
    201 
    202  av1_cyclic_refresh_free(cpi->cyclic_refresh);
    203  cpi->cyclic_refresh = NULL;
    204 
    205  aom_free(cpi->active_map.map);
    206  cpi->active_map.map = NULL;
    207 
    208  aom_free(cpi->roi.roi_map);
    209  cpi->roi.roi_map = NULL;
    210 
    211  aom_free(cpi->ssim_rdmult_scaling_factors);
    212  cpi->ssim_rdmult_scaling_factors = NULL;
    213 
    214  aom_free(cpi->tpl_rdmult_scaling_factors);
    215  cpi->tpl_rdmult_scaling_factors = NULL;
    216 
    217 #if CONFIG_TUNE_VMAF
    218  aom_free(cpi->vmaf_info.rdmult_scaling_factors);
    219  cpi->vmaf_info.rdmult_scaling_factors = NULL;
    220  aom_close_vmaf_model(cpi->vmaf_info.vmaf_model);
    221 #endif
    222 
    223 #if CONFIG_TUNE_BUTTERAUGLI
    224  aom_free(cpi->butteraugli_info.rdmult_scaling_factors);
    225  cpi->butteraugli_info.rdmult_scaling_factors = NULL;
    226  aom_free_frame_buffer(&cpi->butteraugli_info.source);
    227  aom_free_frame_buffer(&cpi->butteraugli_info.resized_source);
    228 #endif
    229 
    230 #if CONFIG_SALIENCY_MAP
    231  aom_free(cpi->saliency_map);
    232  aom_free(cpi->sm_scaling_factor);
    233 #endif
    234 
    235  release_obmc_buffers(&cpi->td.mb.obmc_buffer);
    236 
    237  aom_free(cpi->td.mv_costs_alloc);
    238  cpi->td.mv_costs_alloc = NULL;
    239  aom_free(cpi->td.dv_costs_alloc);
    240  cpi->td.dv_costs_alloc = NULL;
    241 
    242  aom_free(cpi->td.mb.sb_stats_cache);
    243  cpi->td.mb.sb_stats_cache = NULL;
    244 
    245  aom_free(cpi->td.mb.sb_fp_stats);
    246  cpi->td.mb.sb_fp_stats = NULL;
    247 
    248 #if CONFIG_PARTITION_SEARCH_ORDER
    249  aom_free(cpi->td.mb.rdcost);
    250  cpi->td.mb.rdcost = NULL;
    251 #endif
    252 
    253  av1_free_pc_tree_recursive(cpi->td.pc_root, num_planes, 0, 0,
    254                             cpi->sf.part_sf.partition_search_type);
    255  cpi->td.pc_root = NULL;
    256 
    257  for (int i = 0; i < 2; i++) {
    258    aom_free(cpi->td.mb.intrabc_hash_info.hash_value_buffer[i]);
    259    cpi->td.mb.intrabc_hash_info.hash_value_buffer[i] = NULL;
    260  }
    261 
    262  av1_hash_table_destroy(&cpi->td.mb.intrabc_hash_info.intrabc_hash_table);
    263 
    264  aom_free(cm->tpl_mvs);
    265  cm->tpl_mvs = NULL;
    266 
    267  aom_free(cpi->td.pixel_gradient_info);
    268  cpi->td.pixel_gradient_info = NULL;
    269 
    270  aom_free(cpi->td.src_var_info_of_4x4_sub_blocks);
    271  cpi->td.src_var_info_of_4x4_sub_blocks = NULL;
    272 
    273  aom_free(cpi->td.vt64x64);
    274  cpi->td.vt64x64 = NULL;
    275 
    276  av1_free_pmc(cpi->td.firstpass_ctx, num_planes);
    277  cpi->td.firstpass_ctx = NULL;
    278 
    279  const int is_highbitdepth = cpi->tf_ctx.is_highbitdepth;
    280  // This call ensures that the buffers allocated by tf_alloc_and_reset_data()
    281  // in av1_temporal_filter() for single-threaded encode are freed in case an
    282  // error is encountered during temporal filtering (due to early termination
    283  // tf_dealloc_data() in av1_temporal_filter() would not be invoked).
    284  tf_dealloc_data(&cpi->td.tf_data, is_highbitdepth);
    285 
    286  // This call ensures that tpl_tmp_buffers for single-threaded encode are freed
    287  // in case of an error during tpl.
    288  tpl_dealloc_temp_buffers(&cpi->td.tpl_tmp_buffers);
    289 
    290  // This call ensures that the global motion (gm) data buffers for
    291  // single-threaded encode are freed in case of an error during gm.
    292  gm_dealloc_data(&cpi->td.gm_data);
    293 
    294  // This call ensures that CDEF search context buffers are deallocated in case
    295  // of an error during cdef search.
    296  av1_cdef_dealloc_data(cpi->cdef_search_ctx);
    297  aom_free(cpi->cdef_search_ctx);
    298  cpi->cdef_search_ctx = NULL;
    299 
    300  av1_dealloc_mb_data(&cpi->td.mb, num_planes);
    301 
    302  av1_dealloc_mb_wiener_var_pred_buf(&cpi->td);
    303 
    304  av1_free_txb_buf(cpi);
    305  av1_free_context_buffers(cm);
    306 
    307  aom_free_frame_buffer(&cpi->last_frame_uf);
    308 #if !CONFIG_REALTIME_ONLY
    309  av1_free_restoration_buffers(cm);
    310  av1_free_firstpass_data(&cpi->firstpass_data);
    311 #endif
    312 
    313  if (!is_stat_generation_stage(cpi)) {
    314    av1_free_cdef_buffers(cm, &cpi->ppi->p_mt_info.cdef_worker,
    315                          &cpi->mt_info.cdef_sync);
    316  }
    317 
    318  for (int plane = 0; plane < num_planes; plane++) {
    319    aom_free(cpi->pick_lr_ctxt.rusi[plane]);
    320    cpi->pick_lr_ctxt.rusi[plane] = NULL;
    321  }
    322  aom_free(cpi->pick_lr_ctxt.dgd_avg);
    323  cpi->pick_lr_ctxt.dgd_avg = NULL;
    324 
    325  aom_free_frame_buffer(&cpi->trial_frame_rst);
    326  aom_free_frame_buffer(&cpi->scaled_source);
    327  aom_free_frame_buffer(&cpi->scaled_last_source);
    328  aom_free_frame_buffer(&cpi->orig_source);
    329  aom_free_frame_buffer(&cpi->svc.source_last_TL0);
    330 
    331  free_token_info(token_info);
    332 
    333  av1_free_shared_coeff_buffer(&cpi->td.shared_coeff_buf);
    334  av1_free_sms_tree(&cpi->td);
    335 
    336  aom_free(cpi->td.mb.palette_buffer);
    337  release_compound_type_rd_buffers(&cpi->td.mb.comp_rd_buffer);
    338  aom_free(cpi->td.mb.tmp_conv_dst);
    339  for (int j = 0; j < 2; ++j) {
    340    aom_free(cpi->td.mb.tmp_pred_bufs[j]);
    341  }
    342 
    343 #if CONFIG_DENOISE && !CONFIG_REALTIME_ONLY
    344  if (cpi->denoise_and_model) {
    345    aom_denoise_and_model_free(cpi->denoise_and_model);
    346    cpi->denoise_and_model = NULL;
    347  }
    348 #endif
    349 #if !CONFIG_REALTIME_ONLY
    350  if (cpi->film_grain_table) {
    351    aom_film_grain_table_free(cpi->film_grain_table);
    352    aom_free(cpi->film_grain_table);
    353    cpi->film_grain_table = NULL;
    354  }
    355 #endif
    356 
    357  if (cpi->ppi->use_svc) av1_free_svc_cyclic_refresh(cpi);
    358  aom_free(cpi->svc.layer_context);
    359  cpi->svc.layer_context = NULL;
    360 
    361  aom_free(cpi->consec_zero_mv);
    362  cpi->consec_zero_mv = NULL;
    363  cpi->consec_zero_mv_alloc_size = 0;
    364 
    365  aom_free(cpi->src_sad_blk_64x64);
    366  cpi->src_sad_blk_64x64 = NULL;
    367 
    368  aom_free(cpi->mb_weber_stats);
    369  cpi->mb_weber_stats = NULL;
    370 
    371  if (cpi->oxcf.enable_rate_guide_deltaq) {
    372    aom_free(cpi->prep_rate_estimates);
    373    cpi->prep_rate_estimates = NULL;
    374 
    375    aom_free(cpi->ext_rate_distribution);
    376    cpi->ext_rate_distribution = NULL;
    377  }
    378 
    379  aom_free(cpi->mb_delta_q);
    380  cpi->mb_delta_q = NULL;
    381 }
    382 
    383 static inline void allocate_gradient_info_for_hog(AV1_COMP *cpi) {
    384  if (!is_gradient_caching_for_hog_enabled(cpi)) return;
    385 
    386  PixelLevelGradientInfo *pixel_gradient_info = cpi->td.pixel_gradient_info;
    387  if (!pixel_gradient_info) {
    388    const AV1_COMMON *const cm = &cpi->common;
    389    const int plane_types = PLANE_TYPES >> cm->seq_params->monochrome;
    390    CHECK_MEM_ERROR(
    391        cm, pixel_gradient_info,
    392        aom_malloc(sizeof(*pixel_gradient_info) * plane_types * MAX_SB_SQUARE));
    393    cpi->td.pixel_gradient_info = pixel_gradient_info;
    394  }
    395 
    396  cpi->td.mb.pixel_gradient_info = pixel_gradient_info;
    397 }
    398 
    399 static inline void allocate_src_var_of_4x4_sub_block_buf(AV1_COMP *cpi) {
    400  if (!is_src_var_for_4x4_sub_blocks_caching_enabled(cpi)) return;
    401 
    402  Block4x4VarInfo *source_variance_info =
    403      cpi->td.src_var_info_of_4x4_sub_blocks;
    404  if (!source_variance_info) {
    405    const AV1_COMMON *const cm = &cpi->common;
    406    const BLOCK_SIZE sb_size = cm->seq_params->sb_size;
    407    const int mi_count_in_sb = mi_size_wide[sb_size] * mi_size_high[sb_size];
    408    CHECK_MEM_ERROR(cm, source_variance_info,
    409                    aom_malloc(sizeof(*source_variance_info) * mi_count_in_sb));
    410    cpi->td.src_var_info_of_4x4_sub_blocks = source_variance_info;
    411  }
    412 
    413  cpi->td.mb.src_var_info_of_4x4_sub_blocks = source_variance_info;
    414 }
    415 
    416 static inline void variance_partition_alloc(AV1_COMP *cpi) {
    417  AV1_COMMON *const cm = &cpi->common;
    418  const int num_64x64_blocks = (cm->seq_params->sb_size == BLOCK_64X64) ? 1 : 4;
    419  if (cpi->td.vt64x64) {
    420    if (num_64x64_blocks != cpi->td.num_64x64_blocks) {
    421      aom_free(cpi->td.vt64x64);
    422      cpi->td.vt64x64 = NULL;
    423    }
    424  }
    425  if (!cpi->td.vt64x64) {
    426    CHECK_MEM_ERROR(cm, cpi->td.vt64x64,
    427                    aom_malloc(sizeof(*cpi->td.vt64x64) * num_64x64_blocks));
    428    cpi->td.num_64x64_blocks = num_64x64_blocks;
    429  }
    430 }
    431 
    432 static inline YV12_BUFFER_CONFIG *realloc_and_scale_source(AV1_COMP *cpi,
    433                                                           int scaled_width,
    434                                                           int scaled_height) {
    435  AV1_COMMON *cm = &cpi->common;
    436  const int num_planes = av1_num_planes(cm);
    437 
    438  if (scaled_width == cpi->unscaled_source->y_crop_width &&
    439      scaled_height == cpi->unscaled_source->y_crop_height) {
    440    return cpi->unscaled_source;
    441  }
    442 
    443  if (aom_realloc_frame_buffer(
    444          &cpi->scaled_source, scaled_width, scaled_height,
    445          cm->seq_params->subsampling_x, cm->seq_params->subsampling_y,
    446          cm->seq_params->use_highbitdepth, AOM_BORDER_IN_PIXELS,
    447          cm->features.byte_alignment, NULL, NULL, NULL, cpi->alloc_pyramid, 0))
    448    aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
    449                       "Failed to reallocate scaled source buffer");
    450  assert(cpi->scaled_source.y_crop_width == scaled_width);
    451  assert(cpi->scaled_source.y_crop_height == scaled_height);
    452  if (!av1_resize_and_extend_frame_nonnormative(
    453          cpi->unscaled_source, &cpi->scaled_source,
    454          (int)cm->seq_params->bit_depth, num_planes))
    455    aom_internal_error(cm->error, AOM_CODEC_MEM_ERROR,
    456                       "Failed to reallocate buffers during resize");
    457  return &cpi->scaled_source;
    458 }
    459 
    460 // Deallocate allocated thread_data.
    461 static inline void free_thread_data(AV1_PRIMARY *ppi) {
    462  PrimaryMultiThreadInfo *const p_mt_info = &ppi->p_mt_info;
    463  const int num_tf_workers =
    464      AOMMIN(p_mt_info->num_mod_workers[MOD_TF], p_mt_info->num_workers);
    465  const int num_tpl_workers =
    466      AOMMIN(p_mt_info->num_mod_workers[MOD_TPL], p_mt_info->num_workers);
    467  const int is_highbitdepth = ppi->seq_params.use_highbitdepth;
    468  const int num_planes = ppi->seq_params.monochrome ? 1 : MAX_MB_PLANE;
    469  for (int t = 1; t < p_mt_info->num_workers; ++t) {
    470    EncWorkerData *const thread_data = &p_mt_info->tile_thr_data[t];
    471    thread_data->td = thread_data->original_td;
    472    ThreadData *const td = thread_data->td;
    473    if (!td) continue;
    474    aom_free(td->tctx);
    475    aom_free(td->palette_buffer);
    476    aom_free(td->tmp_conv_dst);
    477    release_compound_type_rd_buffers(&td->comp_rd_buffer);
    478    for (int j = 0; j < 2; ++j) {
    479      aom_free(td->tmp_pred_bufs[j]);
    480    }
    481    aom_free(td->pixel_gradient_info);
    482    aom_free(td->src_var_info_of_4x4_sub_blocks);
    483    release_obmc_buffers(&td->obmc_buffer);
    484    aom_free(td->vt64x64);
    485 
    486    for (int x = 0; x < 2; x++) {
    487      aom_free(td->hash_value_buffer[x]);
    488      td->hash_value_buffer[x] = NULL;
    489    }
    490    aom_free(td->mv_costs_alloc);
    491    td->mv_costs_alloc = NULL;
    492    aom_free(td->dv_costs_alloc);
    493    td->dv_costs_alloc = NULL;
    494    aom_free(td->counts);
    495    av1_free_pmc(td->firstpass_ctx, num_planes);
    496    td->firstpass_ctx = NULL;
    497    av1_free_shared_coeff_buffer(&td->shared_coeff_buf);
    498    av1_free_sms_tree(td);
    499    // This call ensures that the buffers allocated by tf_alloc_and_reset_data()
    500    // in prepare_tf_workers() for MT encode are freed in case an error is
    501    // encountered during temporal filtering (due to early termination
    502    // tf_dealloc_thread_data() in av1_tf_do_filtering_mt() would not be
    503    // invoked).
    504    if (t < num_tf_workers) tf_dealloc_data(&td->tf_data, is_highbitdepth);
    505    // This call ensures that tpl_tmp_buffers for MT encode are freed in case of
    506    // an error during tpl.
    507    if (t < num_tpl_workers) tpl_dealloc_temp_buffers(&td->tpl_tmp_buffers);
    508    // This call ensures that the buffers in gm_data for MT encode are freed in
    509    // case of an error during gm.
    510    gm_dealloc_data(&td->gm_data);
    511    av1_dealloc_mb_data(&td->mb, num_planes);
    512    aom_free(td->mb.sb_stats_cache);
    513    td->mb.sb_stats_cache = NULL;
    514    aom_free(td->mb.sb_fp_stats);
    515    td->mb.sb_fp_stats = NULL;
    516 #if CONFIG_PARTITION_SEARCH_ORDER
    517    aom_free(td->mb.rdcost);
    518    td->mb.rdcost = NULL;
    519 #endif
    520    av1_free_pc_tree_recursive(td->pc_root, num_planes, 0, 0, SEARCH_PARTITION);
    521    td->pc_root = NULL;
    522    av1_dealloc_mb_wiener_var_pred_buf(td);
    523    aom_free(td);
    524    thread_data->td = NULL;
    525    thread_data->original_td = NULL;
    526  }
    527 }
    528 
    529 #ifdef __cplusplus
    530 }  // extern "C"
    531 #endif
    532 
    533 #endif  // AOM_AV1_ENCODER_ENCODER_ALLOC_H_