1 /**************************************************************************
2 *
3 * Copyright 2017 Advanced Micro Devices, Inc.
4 *
5 * SPDX-License-Identifier: MIT
6 *
7 **************************************************************************/
8
9 #include "radeon_vcn_enc.h"
10 #include "ac_vcn_enc_av1_default_cdf.h"
11
12 #include "pipe/p_video_codec.h"
13 #include "radeon_video.h"
14 #include "radeonsi/si_pipe.h"
15 #include "util/u_memory.h"
16 #include "util/u_video.h"
17 #include "vl/vl_video_buffer.h"
18
19 static const unsigned index_to_shifts[4] = {24, 16, 8, 0};
20
21 /* set quality modes from the input */
radeon_vcn_enc_quality_modes(struct radeon_encoder * enc,struct pipe_enc_quality_modes * in)22 static void radeon_vcn_enc_quality_modes(struct radeon_encoder *enc,
23 struct pipe_enc_quality_modes *in)
24 {
25 rvcn_enc_quality_modes_t *p = &enc->enc_pic.quality_modes;
26
27 p->preset_mode = in->preset_mode > RENCODE_PRESET_MODE_HIGH_QUALITY
28 ? RENCODE_PRESET_MODE_HIGH_QUALITY
29 : in->preset_mode;
30
31 if (u_reduce_video_profile(enc->base.profile) != PIPE_VIDEO_FORMAT_AV1 &&
32 p->preset_mode == RENCODE_PRESET_MODE_HIGH_QUALITY)
33 p->preset_mode = RENCODE_PRESET_MODE_QUALITY;
34
35 p->pre_encode_mode = in->pre_encode_mode ? RENCODE_PREENCODE_MODE_4X
36 : RENCODE_PREENCODE_MODE_NONE;
37
38 if (enc->enc_pic.rc_session_init.rate_control_method == RENCODE_RATE_CONTROL_METHOD_QUALITY_VBR)
39 p->pre_encode_mode = RENCODE_PREENCODE_MODE_4X;
40
41 p->vbaq_mode = in->vbaq_mode ? RENCODE_VBAQ_AUTO : RENCODE_VBAQ_NONE;
42
43 if (enc->enc_pic.rc_session_init.rate_control_method == RENCODE_RATE_CONTROL_METHOD_NONE)
44 p->vbaq_mode = RENCODE_VBAQ_NONE;
45
46 enc->enc_pic.quality_params.vbaq_mode = p->vbaq_mode;
47 enc->enc_pic.quality_params.scene_change_sensitivity = 0;
48 enc->enc_pic.quality_params.scene_change_min_idr_interval = 0;
49 enc->enc_pic.quality_params.two_pass_search_center_map_mode =
50 (enc->enc_pic.quality_modes.pre_encode_mode &&
51 !enc->enc_pic.spec_misc.b_picture_enabled) ? 1 : 0;
52 enc->enc_pic.quality_params.vbaq_strength = 0;
53 }
54
55 /* to process invalid frame rate */
radeon_vcn_enc_invalid_frame_rate(uint32_t * den,uint32_t * num)56 static void radeon_vcn_enc_invalid_frame_rate(uint32_t *den, uint32_t *num)
57 {
58 if (*den == 0 || *num == 0) {
59 *den = 1;
60 *num = 30;
61 }
62 }
63
radeon_vcn_per_frame_integer(uint32_t bitrate,uint32_t den,uint32_t num)64 static uint32_t radeon_vcn_per_frame_integer(uint32_t bitrate, uint32_t den, uint32_t num)
65 {
66 uint64_t rate_den = (uint64_t)bitrate * (uint64_t)den;
67
68 return (uint32_t)(rate_den/num);
69 }
70
radeon_vcn_per_frame_frac(uint32_t bitrate,uint32_t den,uint32_t num)71 static uint32_t radeon_vcn_per_frame_frac(uint32_t bitrate, uint32_t den, uint32_t num)
72 {
73 uint64_t rate_den = (uint64_t)bitrate * (uint64_t)den;
74 uint64_t remainder = rate_den % num;
75
76 return (uint32_t)((remainder << 32) / num);
77 }
78
79 /* block length for av1 and hevc is the same, 64, for avc 16 */
radeon_vcn_enc_blocks_in_frame(struct radeon_encoder * enc,uint32_t * width_in_block,uint32_t * height_in_block)80 static uint32_t radeon_vcn_enc_blocks_in_frame(struct radeon_encoder *enc,
81 uint32_t *width_in_block,
82 uint32_t *height_in_block)
83 {
84 bool is_h264 = u_reduce_video_profile(enc->base.profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC;
85 uint32_t block_length = is_h264 ? PIPE_H264_MB_SIZE : PIPE_H265_ENC_CTB_SIZE;
86
87 *width_in_block = PIPE_ALIGN_IN_BLOCK_SIZE(enc->base.width, block_length);
88 *height_in_block = PIPE_ALIGN_IN_BLOCK_SIZE(enc->base.height, block_length);
89
90 return block_length;
91 }
92
radeon_vcn_enc_get_intra_refresh_param(struct radeon_encoder * enc,bool need_filter_overlap,struct pipe_enc_intra_refresh * intra_refresh)93 static void radeon_vcn_enc_get_intra_refresh_param(struct radeon_encoder *enc,
94 bool need_filter_overlap,
95 struct pipe_enc_intra_refresh *intra_refresh)
96 {
97 uint32_t width_in_block, height_in_block;
98
99 enc->enc_pic.intra_refresh.intra_refresh_mode = RENCODE_INTRA_REFRESH_MODE_NONE;
100 /* some exceptions where intra-refresh is disabled:
101 * 1. if B frame is enabled
102 * 2. if SVC (number of temproal layers is larger than 1) is enabled
103 */
104 if (enc->enc_pic.spec_misc.b_picture_enabled || enc->enc_pic.num_temporal_layers > 1) {
105 enc->enc_pic.intra_refresh.region_size = 0;
106 enc->enc_pic.intra_refresh.offset = 0;
107 enc->enc_pic.need_sequence_header = 0;
108 return;
109 }
110
111 radeon_vcn_enc_blocks_in_frame(enc, &width_in_block, &height_in_block);
112
113 switch(intra_refresh->mode) {
114 case INTRA_REFRESH_MODE_UNIT_ROWS:
115 if (intra_refresh->offset < height_in_block)
116 enc->enc_pic.intra_refresh.intra_refresh_mode
117 = RENCODE_INTRA_REFRESH_MODE_CTB_MB_ROWS;
118 break;
119 case INTRA_REFRESH_MODE_UNIT_COLUMNS:
120 if (intra_refresh->offset < width_in_block)
121 enc->enc_pic.intra_refresh.intra_refresh_mode
122 = RENCODE_INTRA_REFRESH_MODE_CTB_MB_COLUMNS;
123 break;
124 case INTRA_REFRESH_MODE_NONE:
125 default:
126 break;
127 };
128
129 /* with loop filters (avc/hevc/av1) enabled the region_size has to increase 1 to
130 * get overlapped (av1 is enabling it all the time). The region_size and offset
131 * require to be in unit of MB or CTB or SB according to different codecs.
132 */
133 if (enc->enc_pic.intra_refresh.intra_refresh_mode != RENCODE_INTRA_REFRESH_MODE_NONE) {
134 enc->enc_pic.intra_refresh.region_size = (need_filter_overlap) ?
135 intra_refresh->region_size + 1 :
136 intra_refresh->region_size;
137 enc->enc_pic.intra_refresh.offset = intra_refresh->offset;
138 enc->enc_pic.need_sequence_header = !!(intra_refresh->need_sequence_header);
139 } else {
140 enc->enc_pic.intra_refresh.region_size = 0;
141 enc->enc_pic.intra_refresh.offset = 0;
142 enc->enc_pic.need_sequence_header = 0;
143 }
144 }
145
radeon_vcn_enc_get_roi_param(struct radeon_encoder * enc,struct pipe_enc_roi * roi)146 static void radeon_vcn_enc_get_roi_param(struct radeon_encoder *enc,
147 struct pipe_enc_roi *roi)
148 {
149 struct si_screen *sscreen = (struct si_screen *)enc->screen;
150 bool is_av1 = u_reduce_video_profile(enc->base.profile)
151 == PIPE_VIDEO_FORMAT_AV1;
152 rvcn_enc_qp_map_t *qp_map = &enc->enc_pic.enc_qp_map;
153
154 if (!roi->num)
155 enc->enc_pic.enc_qp_map.qp_map_type = RENCODE_QP_MAP_TYPE_NONE;
156 else {
157 uint32_t width_in_block, height_in_block;
158 uint32_t block_length;
159 int32_t i, j, pa_format = 0;
160
161 qp_map->version = sscreen->info.vcn_ip_version >= VCN_5_0_0
162 ? RENCODE_QP_MAP_VCN5 : RENCODE_QP_MAP_LEGACY;
163
164 /* rate control is using a different qp map type, in case of below
165 * vcn_5_0_0 */
166 if (enc->enc_pic.rc_session_init.rate_control_method &&
167 (qp_map->version == RENCODE_QP_MAP_LEGACY)) {
168 enc->enc_pic.enc_qp_map.qp_map_type = RENCODE_QP_MAP_TYPE_MAP_PA;
169 pa_format = 1;
170 }
171 else
172 enc->enc_pic.enc_qp_map.qp_map_type = RENCODE_QP_MAP_TYPE_DELTA;
173
174 block_length = radeon_vcn_enc_blocks_in_frame(enc, &width_in_block, &height_in_block);
175
176 qp_map->width_in_block = width_in_block;
177 qp_map->height_in_block = height_in_block;
178
179 for (i = RENCODE_QP_MAP_MAX_REGIONS; i >= roi->num; i--)
180 enc->enc_pic.enc_qp_map.map[i].is_valid = false;
181
182 /* reverse the map sequence */
183 for (j = 0; i >= 0; i--, j++) {
184 struct rvcn_enc_qp_map_region *map = &enc->enc_pic.enc_qp_map.map[j];
185 struct pipe_enc_region_in_roi *region = &roi->region[i];
186
187 map->is_valid = region->valid;
188 if (region->valid) {
189 int32_t av1_qi_value;
190 /* mapped av1 qi into the legacy qp range by dividing by 5 and
191 * rounding up in any rate control mode.
192 */
193 if (is_av1 && (pa_format || (qp_map->version == RENCODE_QP_MAP_VCN5))) {
194 if (region->qp_value > 0)
195 av1_qi_value = (region->qp_value + 2) / 5;
196 else if (region->qp_value < 0)
197 av1_qi_value = (region->qp_value - 2) / 5;
198 else
199 av1_qi_value = region->qp_value;
200 map->qp_delta = av1_qi_value;
201 } else
202 map->qp_delta = region->qp_value;
203
204 map->x_in_unit = CLAMP((region->x / block_length), 0, width_in_block - 1);
205 map->y_in_unit = CLAMP((region->y / block_length), 0, height_in_block - 1);
206 map->width_in_unit = CLAMP((region->width / block_length), 0, width_in_block);
207 map->height_in_unit = CLAMP((region->height / block_length), 0, width_in_block);
208 }
209 }
210 }
211 }
212
radeon_vcn_enc_get_latency_param(struct radeon_encoder * enc)213 static void radeon_vcn_enc_get_latency_param(struct radeon_encoder *enc)
214 {
215 struct si_screen *sscreen = (struct si_screen *)enc->screen;
216
217 enc->enc_pic.enc_latency.encode_latency =
218 sscreen->debug_flags & DBG(LOW_LATENCY_ENCODE) ? 1000 : 0;
219 }
220
radeon_vcn_enc_h264_get_cropping_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)221 static void radeon_vcn_enc_h264_get_cropping_param(struct radeon_encoder *enc,
222 struct pipe_h264_enc_picture_desc *pic)
223 {
224 if (pic->seq.enc_frame_cropping_flag) {
225 enc->enc_pic.crop_left = pic->seq.enc_frame_crop_left_offset;
226 enc->enc_pic.crop_right = pic->seq.enc_frame_crop_right_offset;
227 enc->enc_pic.crop_top = pic->seq.enc_frame_crop_top_offset;
228 enc->enc_pic.crop_bottom = pic->seq.enc_frame_crop_bottom_offset;
229 } else {
230 enc->enc_pic.crop_left = 0;
231 enc->enc_pic.crop_right = 0;
232 enc->enc_pic.crop_top = 0;
233 enc->enc_pic.crop_bottom = 0;
234 }
235 }
236
radeon_vcn_enc_h264_get_dbk_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)237 static void radeon_vcn_enc_h264_get_dbk_param(struct radeon_encoder *enc,
238 struct pipe_h264_enc_picture_desc *pic)
239 {
240 enc->enc_pic.h264_deblock.disable_deblocking_filter_idc =
241 CLAMP(pic->dbk.disable_deblocking_filter_idc, 0, 2);
242 enc->enc_pic.h264_deblock.alpha_c0_offset_div2 = pic->dbk.alpha_c0_offset_div2;
243 enc->enc_pic.h264_deblock.beta_offset_div2 = pic->dbk.beta_offset_div2;
244 enc->enc_pic.h264_deblock.cb_qp_offset = pic->pic_ctrl.chroma_qp_index_offset;
245 enc->enc_pic.h264_deblock.cr_qp_offset = pic->pic_ctrl.second_chroma_qp_index_offset;
246 }
247
radeon_vcn_enc_h264_get_spec_misc_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)248 static void radeon_vcn_enc_h264_get_spec_misc_param(struct radeon_encoder *enc,
249 struct pipe_h264_enc_picture_desc *pic)
250 {
251 struct si_screen *sscreen = (struct si_screen *)enc->screen;
252
253 enc->enc_pic.spec_misc.profile_idc = u_get_h264_profile_idc(enc->base.profile);
254 if (enc->enc_pic.spec_misc.profile_idc >= PIPE_VIDEO_PROFILE_MPEG4_AVC_MAIN &&
255 enc->enc_pic.spec_misc.profile_idc != PIPE_VIDEO_PROFILE_MPEG4_AVC_EXTENDED)
256 enc->enc_pic.spec_misc.cabac_enable = pic->pic_ctrl.enc_cabac_enable;
257 else
258 enc->enc_pic.spec_misc.cabac_enable = false;
259
260 enc->enc_pic.spec_misc.cabac_init_idc = enc->enc_pic.spec_misc.cabac_enable ?
261 pic->pic_ctrl.enc_cabac_init_idc : 0;
262 enc->enc_pic.spec_misc.deblocking_filter_control_present_flag =
263 pic->pic_ctrl.deblocking_filter_control_present_flag;
264 enc->enc_pic.spec_misc.redundant_pic_cnt_present_flag =
265 pic->pic_ctrl.redundant_pic_cnt_present_flag;
266 enc->enc_pic.spec_misc.b_picture_enabled = !!pic->seq.max_num_reorder_frames;
267 enc->enc_pic.spec_misc.constrained_intra_pred_flag =
268 pic->pic_ctrl.constrained_intra_pred_flag;
269 enc->enc_pic.spec_misc.half_pel_enabled = 1;
270 enc->enc_pic.spec_misc.quarter_pel_enabled = 1;
271 enc->enc_pic.spec_misc.weighted_bipred_idc = 0;
272 enc->enc_pic.spec_misc.transform_8x8_mode =
273 sscreen->info.vcn_ip_version >= VCN_5_0_0 &&
274 pic->pic_ctrl.transform_8x8_mode_flag;
275 enc->enc_pic.spec_misc.level_idc = pic->seq.level_idc;
276 }
277
radeon_vcn_enc_h264_get_rc_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)278 static void radeon_vcn_enc_h264_get_rc_param(struct radeon_encoder *enc,
279 struct pipe_h264_enc_picture_desc *pic)
280 {
281 uint32_t frame_rate_den, frame_rate_num, max_qp;
282
283 enc->enc_pic.num_temporal_layers = pic->seq.num_temporal_layers ? pic->seq.num_temporal_layers : 1;
284 for (int i = 0; i < enc->enc_pic.num_temporal_layers; i++) {
285 enc->enc_pic.rc_layer_init[i].target_bit_rate = pic->rate_ctrl[i].target_bitrate;
286 enc->enc_pic.rc_layer_init[i].peak_bit_rate = pic->rate_ctrl[i].peak_bitrate;
287 frame_rate_den = pic->rate_ctrl[i].frame_rate_den;
288 frame_rate_num = pic->rate_ctrl[i].frame_rate_num;
289 radeon_vcn_enc_invalid_frame_rate(&frame_rate_den, &frame_rate_num);
290 enc->enc_pic.rc_layer_init[i].frame_rate_den = frame_rate_den;
291 enc->enc_pic.rc_layer_init[i].frame_rate_num = frame_rate_num;
292 enc->enc_pic.rc_layer_init[i].vbv_buffer_size = pic->rate_ctrl[i].vbv_buffer_size;
293 enc->enc_pic.rc_layer_init[i].avg_target_bits_per_picture =
294 radeon_vcn_per_frame_integer(pic->rate_ctrl[i].target_bitrate,
295 frame_rate_den,
296 frame_rate_num);
297 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_integer =
298 radeon_vcn_per_frame_integer(pic->rate_ctrl[i].peak_bitrate,
299 frame_rate_den,
300 frame_rate_num);
301 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_fractional =
302 radeon_vcn_per_frame_frac(pic->rate_ctrl[i].peak_bitrate,
303 frame_rate_den,
304 frame_rate_num);
305 }
306 enc->enc_pic.rc_session_init.vbv_buffer_level = pic->rate_ctrl[0].vbv_buf_lv;
307 enc->enc_pic.rc_per_pic.qp_obs = pic->quant_i_frames;
308 enc->enc_pic.rc_per_pic.min_qp_app_obs = pic->rate_ctrl[0].min_qp;
309 enc->enc_pic.rc_per_pic.max_qp_app_obs = pic->rate_ctrl[0].max_qp ?
310 pic->rate_ctrl[0].max_qp : 51;
311 enc->enc_pic.rc_per_pic.qp_i = pic->quant_i_frames;
312 enc->enc_pic.rc_per_pic.qp_p = pic->quant_p_frames;
313 enc->enc_pic.rc_per_pic.qp_b = pic->quant_b_frames;
314 enc->enc_pic.rc_per_pic.min_qp_i = pic->rate_ctrl[0].min_qp;
315 enc->enc_pic.rc_per_pic.min_qp_p = pic->rate_ctrl[0].min_qp;
316 enc->enc_pic.rc_per_pic.min_qp_b = pic->rate_ctrl[0].min_qp;
317 max_qp = pic->rate_ctrl[0].max_qp ? pic->rate_ctrl[0].max_qp : 51;
318 enc->enc_pic.rc_per_pic.max_qp_i = max_qp;
319 enc->enc_pic.rc_per_pic.max_qp_p = max_qp;
320 enc->enc_pic.rc_per_pic.max_qp_b = max_qp;
321 enc->enc_pic.rc_per_pic.enabled_filler_data = 0;
322 enc->enc_pic.rc_per_pic.skip_frame_enable = pic->rate_ctrl[0].skip_frame_enable;
323 enc->enc_pic.rc_per_pic.enforce_hrd = pic->rate_ctrl[0].enforce_hrd;
324 enc->enc_pic.rc_per_pic.qvbr_quality_level = pic->rate_ctrl[0].vbr_quality_factor;
325
326 switch (pic->rate_ctrl[0].rate_ctrl_method) {
327 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_DISABLE:
328 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
329 break;
330 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP:
331 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT:
332 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_CBR;
333 enc->enc_pic.rc_per_pic.enabled_filler_data = pic->rate_ctrl[0].fill_data_enable;
334 break;
335 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE_SKIP:
336 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE:
337 enc->enc_pic.rc_session_init.rate_control_method =
338 RENCODE_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR;
339 break;
340 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_QUALITY_VARIABLE:
341 enc->enc_pic.rc_session_init.rate_control_method =
342 RENCODE_RATE_CONTROL_METHOD_QUALITY_VBR;
343 break;
344 default:
345 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
346 }
347 enc->enc_pic.rc_per_pic.max_au_size_obs = pic->rate_ctrl[0].max_au_size;
348 enc->enc_pic.rc_per_pic.max_au_size_i = pic->rate_ctrl[0].max_au_size;
349 enc->enc_pic.rc_per_pic.max_au_size_p = pic->rate_ctrl[0].max_au_size;
350 enc->enc_pic.rc_per_pic.max_au_size_b = pic->rate_ctrl[0].max_au_size;
351 }
352
radeon_vcn_enc_h264_get_slice_ctrl_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)353 static void radeon_vcn_enc_h264_get_slice_ctrl_param(struct radeon_encoder *enc,
354 struct pipe_h264_enc_picture_desc *pic)
355 {
356 uint32_t num_mbs_total, num_mbs_in_slice;
357
358 num_mbs_total =
359 PIPE_ALIGN_IN_BLOCK_SIZE(enc->base.width, PIPE_H264_MB_SIZE) *
360 PIPE_ALIGN_IN_BLOCK_SIZE(enc->base.height, PIPE_H264_MB_SIZE);
361
362 if (pic->num_slice_descriptors <= 1) {
363 num_mbs_in_slice = num_mbs_total;
364 } else {
365 bool use_app_config = true;
366 num_mbs_in_slice = pic->slices_descriptors[0].num_macroblocks;
367
368 /* All slices must have equal size */
369 for (unsigned i = 1; i < pic->num_slice_descriptors - 1; i++) {
370 if (num_mbs_in_slice != pic->slices_descriptors[i].num_macroblocks)
371 use_app_config = false;
372 }
373 /* Except last one can be smaller */
374 if (pic->slices_descriptors[pic->num_slice_descriptors - 1].num_macroblocks > num_mbs_in_slice)
375 use_app_config = false;
376
377 if (!use_app_config) {
378 assert(num_mbs_total >= pic->num_slice_descriptors);
379 num_mbs_in_slice =
380 (num_mbs_total + pic->num_slice_descriptors - 1) / pic->num_slice_descriptors;
381 }
382 }
383
384 num_mbs_in_slice = MAX2(4, num_mbs_in_slice);
385
386 enc->enc_pic.slice_ctrl.slice_control_mode = RENCODE_H264_SLICE_CONTROL_MODE_FIXED_MBS;
387 enc->enc_pic.slice_ctrl.num_mbs_per_slice = num_mbs_in_slice;
388 }
389
radeon_vcn_enc_get_output_format_param(struct radeon_encoder * enc,bool full_range)390 static void radeon_vcn_enc_get_output_format_param(struct radeon_encoder *enc, bool full_range)
391 {
392 switch (enc->enc_pic.bit_depth_luma_minus8) {
393 case 2: /* 10 bits */
394 enc->enc_pic.enc_output_format.output_color_volume = RENCODE_COLOR_VOLUME_G22_BT709;
395 enc->enc_pic.enc_output_format.output_color_range = full_range ?
396 RENCODE_COLOR_RANGE_FULL : RENCODE_COLOR_RANGE_STUDIO;
397 enc->enc_pic.enc_output_format.output_chroma_location = RENCODE_CHROMA_LOCATION_INTERSTITIAL;
398 enc->enc_pic.enc_output_format.output_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_10_BIT;
399 break;
400 default: /* 8 bits */
401 enc->enc_pic.enc_output_format.output_color_volume = RENCODE_COLOR_VOLUME_G22_BT709;
402 enc->enc_pic.enc_output_format.output_color_range = full_range ?
403 RENCODE_COLOR_RANGE_FULL : RENCODE_COLOR_RANGE_STUDIO;
404 enc->enc_pic.enc_output_format.output_chroma_location = RENCODE_CHROMA_LOCATION_INTERSTITIAL;
405 enc->enc_pic.enc_output_format.output_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_8_BIT;
406 break;
407 }
408 }
409
radeon_vcn_enc_get_input_format_param(struct radeon_encoder * enc,struct pipe_picture_desc * pic_base)410 static void radeon_vcn_enc_get_input_format_param(struct radeon_encoder *enc,
411 struct pipe_picture_desc *pic_base)
412 {
413 switch (pic_base->input_format) {
414 case PIPE_FORMAT_P010:
415 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_10_BIT;
416 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_P010;
417 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_2_0;
418 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_YUV;
419 break;
420 case PIPE_FORMAT_B8G8R8A8_UNORM:
421 case PIPE_FORMAT_B8G8R8X8_UNORM:
422 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_8_BIT;
423 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_4_4;
424 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_A8R8G8B8;
425 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_RGB;
426 break;
427 case PIPE_FORMAT_R8G8B8A8_UNORM:
428 case PIPE_FORMAT_R8G8B8X8_UNORM:
429 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_8_BIT;
430 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_4_4;
431 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_A8B8G8R8;
432 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_RGB;
433 break;
434 case PIPE_FORMAT_B10G10R10A2_UNORM:
435 case PIPE_FORMAT_B10G10R10X2_UNORM:
436 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_10_BIT;
437 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_4_4;
438 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_A2R10G10B10;
439 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_RGB;
440 break;
441 case PIPE_FORMAT_R10G10B10A2_UNORM:
442 case PIPE_FORMAT_R10G10B10X2_UNORM:
443 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_10_BIT;
444 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_4_4;
445 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_A2B10G10R10;
446 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_RGB;
447 break;
448 case PIPE_FORMAT_NV12: /* FALL THROUGH */
449 default:
450 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_8_BIT;
451 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_NV12;
452 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_2_0;
453 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_YUV;
454 break;
455 }
456
457 enc->enc_pic.enc_input_format.input_color_volume = RENCODE_COLOR_VOLUME_G22_BT709;
458 enc->enc_pic.enc_input_format.input_color_range = pic_base->input_full_range ?
459 RENCODE_COLOR_RANGE_FULL : RENCODE_COLOR_RANGE_STUDIO;
460 enc->enc_pic.enc_input_format.input_chroma_location = RENCODE_CHROMA_LOCATION_INTERSTITIAL;
461 }
462
radeon_vcn_enc_h264_get_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)463 static void radeon_vcn_enc_h264_get_param(struct radeon_encoder *enc,
464 struct pipe_h264_enc_picture_desc *pic)
465 {
466 bool use_filter;
467
468 enc->enc_pic.h264.desc = pic;
469 enc->enc_pic.picture_type = pic->picture_type;
470 enc->enc_pic.bit_depth_luma_minus8 = 0;
471 enc->enc_pic.bit_depth_chroma_minus8 = 0;
472 enc->enc_pic.enc_params.reference_picture_index =
473 pic->ref_list0[0] == PIPE_H2645_LIST_REF_INVALID_ENTRY ? 0xffffffff : pic->ref_list0[0];
474 enc->enc_pic.h264_enc_params.l1_reference_picture0_index =
475 pic->ref_list1[0] == PIPE_H2645_LIST_REF_INVALID_ENTRY ? 0xffffffff : pic->ref_list1[0];
476 enc->enc_pic.h264_enc_params.input_picture_structure = RENCODE_H264_PICTURE_STRUCTURE_FRAME;
477 enc->enc_pic.h264_enc_params.interlaced_mode = RENCODE_H264_INTERLACING_MODE_PROGRESSIVE;
478 enc->enc_pic.h264_enc_params.l0_reference_picture1_index = 0xffffffff;
479 enc->enc_pic.enc_params.reconstructed_picture_index = pic->dpb_curr_pic;
480 enc->enc_pic.h264_enc_params.is_reference = !pic->not_referenced;
481 enc->enc_pic.h264_enc_params.is_long_term = pic->is_ltr;
482 enc->enc_pic.not_referenced = pic->not_referenced;
483
484 radeon_vcn_enc_h264_get_cropping_param(enc, pic);
485 radeon_vcn_enc_h264_get_dbk_param(enc, pic);
486 radeon_vcn_enc_h264_get_rc_param(enc, pic);
487 radeon_vcn_enc_h264_get_spec_misc_param(enc, pic);
488 radeon_vcn_enc_h264_get_slice_ctrl_param(enc, pic);
489 radeon_vcn_enc_get_input_format_param(enc, &pic->base);
490 radeon_vcn_enc_get_output_format_param(enc, pic->seq.video_full_range_flag);
491
492 use_filter = enc->enc_pic.h264_deblock.disable_deblocking_filter_idc != 1;
493 radeon_vcn_enc_get_intra_refresh_param(enc, use_filter, &pic->intra_refresh);
494 radeon_vcn_enc_get_roi_param(enc, &pic->roi);
495 radeon_vcn_enc_get_latency_param(enc);
496 radeon_vcn_enc_quality_modes(enc, &pic->quality_modes);
497 }
498
radeon_vcn_enc_hevc_get_cropping_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)499 static void radeon_vcn_enc_hevc_get_cropping_param(struct radeon_encoder *enc,
500 struct pipe_h265_enc_picture_desc *pic)
501 {
502 if (pic->seq.conformance_window_flag) {
503 enc->enc_pic.crop_left = pic->seq.conf_win_left_offset;
504 enc->enc_pic.crop_right = pic->seq.conf_win_right_offset;
505 enc->enc_pic.crop_top = pic->seq.conf_win_top_offset;
506 enc->enc_pic.crop_bottom = pic->seq.conf_win_bottom_offset;
507 } else {
508 enc->enc_pic.crop_left = 0;
509 enc->enc_pic.crop_right = 0;
510 enc->enc_pic.crop_top = 0;
511 enc->enc_pic.crop_bottom = 0;
512 }
513 }
514
radeon_vcn_enc_hevc_get_dbk_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)515 static void radeon_vcn_enc_hevc_get_dbk_param(struct radeon_encoder *enc,
516 struct pipe_h265_enc_picture_desc *pic)
517 {
518 struct si_screen *sscreen = (struct si_screen *)enc->screen;
519
520 enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled =
521 pic->pic.pps_loop_filter_across_slices_enabled_flag;
522 enc->enc_pic.hevc_deblock.deblocking_filter_disabled =
523 pic->slice.slice_deblocking_filter_disabled_flag;
524 enc->enc_pic.hevc_deblock.beta_offset_div2 = pic->slice.slice_beta_offset_div2;
525 enc->enc_pic.hevc_deblock.tc_offset_div2 = pic->slice.slice_tc_offset_div2;
526 enc->enc_pic.hevc_deblock.cb_qp_offset = pic->slice.slice_cb_qp_offset;
527 enc->enc_pic.hevc_deblock.cr_qp_offset = pic->slice.slice_cr_qp_offset;
528 enc->enc_pic.hevc_deblock.disable_sao =
529 sscreen->info.vcn_ip_version < VCN_2_0_0 ||
530 !pic->seq.sample_adaptive_offset_enabled_flag;
531 }
532
radeon_vcn_enc_hevc_get_spec_misc_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)533 static void radeon_vcn_enc_hevc_get_spec_misc_param(struct radeon_encoder *enc,
534 struct pipe_h265_enc_picture_desc *pic)
535 {
536 struct si_screen *sscreen = (struct si_screen *)enc->screen;
537
538 enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3 =
539 pic->seq.log2_min_luma_coding_block_size_minus3;
540 enc->enc_pic.hevc_spec_misc.amp_disabled = !pic->seq.amp_enabled_flag;
541 enc->enc_pic.hevc_spec_misc.strong_intra_smoothing_enabled =
542 pic->seq.strong_intra_smoothing_enabled_flag;
543 enc->enc_pic.hevc_spec_misc.constrained_intra_pred_flag =
544 pic->pic.constrained_intra_pred_flag;
545 enc->enc_pic.hevc_spec_misc.cabac_init_flag = pic->slice.cabac_init_flag;
546 enc->enc_pic.hevc_spec_misc.half_pel_enabled = 1;
547 enc->enc_pic.hevc_spec_misc.quarter_pel_enabled = 1;
548 enc->enc_pic.hevc_spec_misc.transform_skip_disabled =
549 sscreen->info.vcn_ip_version < VCN_3_0_0 ||
550 !pic->pic.transform_skip_enabled_flag;
551 enc->enc_pic.hevc_spec_misc.cu_qp_delta_enabled_flag =
552 (sscreen->info.vcn_ip_version >= VCN_2_0_0 &&
553 pic->pic.cu_qp_delta_enabled_flag) ||
554 enc->enc_pic.enc_qp_map.qp_map_type ||
555 enc->enc_pic.rc_session_init.rate_control_method;
556 }
557
radeon_vcn_enc_hevc_get_rc_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)558 static void radeon_vcn_enc_hevc_get_rc_param(struct radeon_encoder *enc,
559 struct pipe_h265_enc_picture_desc *pic)
560 {
561 uint32_t frame_rate_den, frame_rate_num, max_qp;
562
563 enc->enc_pic.rc_layer_init[0].target_bit_rate = pic->rc[0].target_bitrate;
564 enc->enc_pic.rc_layer_init[0].peak_bit_rate = pic->rc[0].peak_bitrate;
565 frame_rate_den = pic->rc[0].frame_rate_den;
566 frame_rate_num = pic->rc[0].frame_rate_num;
567 radeon_vcn_enc_invalid_frame_rate(&frame_rate_den, &frame_rate_num);
568 enc->enc_pic.rc_layer_init[0].frame_rate_den = frame_rate_den;
569 enc->enc_pic.rc_layer_init[0].frame_rate_num = frame_rate_num;
570 enc->enc_pic.rc_layer_init[0].vbv_buffer_size = pic->rc[0].vbv_buffer_size;
571 enc->enc_pic.rc_layer_init[0].avg_target_bits_per_picture =
572 radeon_vcn_per_frame_integer(pic->rc[0].target_bitrate,
573 frame_rate_den,
574 frame_rate_num);
575 enc->enc_pic.rc_layer_init[0].peak_bits_per_picture_integer =
576 radeon_vcn_per_frame_integer(pic->rc[0].peak_bitrate,
577 frame_rate_den,
578 frame_rate_num);
579 enc->enc_pic.rc_layer_init[0].peak_bits_per_picture_fractional =
580 radeon_vcn_per_frame_frac(pic->rc[0].peak_bitrate,
581 frame_rate_den,
582 frame_rate_num);
583 enc->enc_pic.rc_session_init.vbv_buffer_level = pic->rc[0].vbv_buf_lv;
584 enc->enc_pic.rc_per_pic.qp_obs = pic->rc[0].quant_i_frames;
585 enc->enc_pic.rc_per_pic.min_qp_app_obs = pic->rc[0].min_qp;
586 enc->enc_pic.rc_per_pic.max_qp_app_obs = pic->rc[0].max_qp ? pic->rc[0].max_qp : 51;
587 enc->enc_pic.rc_per_pic.qp_i = pic->rc[0].quant_i_frames;
588 enc->enc_pic.rc_per_pic.qp_p = pic->rc[0].quant_p_frames;
589 enc->enc_pic.rc_per_pic.min_qp_i = pic->rc[0].min_qp;
590 enc->enc_pic.rc_per_pic.min_qp_p = pic->rc[0].min_qp;
591 max_qp = pic->rc[0].max_qp ? pic->rc[0].max_qp : 51;
592 enc->enc_pic.rc_per_pic.max_qp_i = max_qp;
593 enc->enc_pic.rc_per_pic.max_qp_p = max_qp;
594 enc->enc_pic.rc_per_pic.enabled_filler_data = 0;
595 enc->enc_pic.rc_per_pic.skip_frame_enable = pic->rc[0].skip_frame_enable;
596 enc->enc_pic.rc_per_pic.enforce_hrd = pic->rc[0].enforce_hrd;
597 enc->enc_pic.rc_per_pic.qvbr_quality_level = pic->rc[0].vbr_quality_factor;
598 switch (pic->rc[0].rate_ctrl_method) {
599 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_DISABLE:
600 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
601 break;
602 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP:
603 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT:
604 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_CBR;
605 enc->enc_pic.rc_per_pic.enabled_filler_data = pic->rc[0].fill_data_enable;
606 break;
607 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE_SKIP:
608 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE:
609 enc->enc_pic.rc_session_init.rate_control_method =
610 RENCODE_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR;
611 break;
612 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_QUALITY_VARIABLE:
613 enc->enc_pic.rc_session_init.rate_control_method =
614 RENCODE_RATE_CONTROL_METHOD_QUALITY_VBR;
615 break;
616 default:
617 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
618 }
619 enc->enc_pic.rc_per_pic.max_au_size_obs = pic->rc[0].max_au_size;
620 enc->enc_pic.rc_per_pic.max_au_size_i = pic->rc[0].max_au_size;
621 enc->enc_pic.rc_per_pic.max_au_size_p = pic->rc[0].max_au_size;
622 }
623
radeon_vcn_enc_hevc_get_slice_ctrl_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)624 static void radeon_vcn_enc_hevc_get_slice_ctrl_param(struct radeon_encoder *enc,
625 struct pipe_h265_enc_picture_desc *pic)
626 {
627 uint32_t num_ctbs_total, num_ctbs_in_slice;
628
629 num_ctbs_total =
630 PIPE_ALIGN_IN_BLOCK_SIZE(pic->seq.pic_width_in_luma_samples, PIPE_H265_ENC_CTB_SIZE) *
631 PIPE_ALIGN_IN_BLOCK_SIZE(pic->seq.pic_height_in_luma_samples, PIPE_H265_ENC_CTB_SIZE);
632
633 if (pic->num_slice_descriptors <= 1) {
634 num_ctbs_in_slice = num_ctbs_total;
635 } else {
636 bool use_app_config = true;
637 num_ctbs_in_slice = pic->slices_descriptors[0].num_ctu_in_slice;
638
639 /* All slices must have equal size */
640 for (unsigned i = 1; i < pic->num_slice_descriptors - 1; i++) {
641 if (num_ctbs_in_slice != pic->slices_descriptors[i].num_ctu_in_slice)
642 use_app_config = false;
643 }
644 /* Except last one can be smaller */
645 if (pic->slices_descriptors[pic->num_slice_descriptors - 1].num_ctu_in_slice > num_ctbs_in_slice)
646 use_app_config = false;
647
648 if (!use_app_config) {
649 assert(num_ctbs_total >= pic->num_slice_descriptors);
650 num_ctbs_in_slice =
651 (num_ctbs_total + pic->num_slice_descriptors - 1) / pic->num_slice_descriptors;
652 }
653 }
654
655 num_ctbs_in_slice = MAX2(4, num_ctbs_in_slice);
656
657 enc->enc_pic.hevc_slice_ctrl.slice_control_mode = RENCODE_HEVC_SLICE_CONTROL_MODE_FIXED_CTBS;
658 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice =
659 num_ctbs_in_slice;
660 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice_segment =
661 num_ctbs_in_slice;
662 }
663
radeon_vcn_enc_hevc_get_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)664 static void radeon_vcn_enc_hevc_get_param(struct radeon_encoder *enc,
665 struct pipe_h265_enc_picture_desc *pic)
666 {
667 enc->enc_pic.hevc.desc = pic;
668 enc->enc_pic.picture_type = pic->picture_type;
669 enc->enc_pic.enc_params.reference_picture_index =
670 pic->ref_list0[0] == PIPE_H2645_LIST_REF_INVALID_ENTRY ? 0xffffffff : pic->ref_list0[0];
671 enc->enc_pic.enc_params.reconstructed_picture_index = pic->dpb_curr_pic;
672 enc->enc_pic.num_temporal_layers = 1;
673 enc->enc_pic.bit_depth_luma_minus8 = pic->seq.bit_depth_luma_minus8;
674 enc->enc_pic.bit_depth_chroma_minus8 = pic->seq.bit_depth_chroma_minus8;
675 enc->enc_pic.nal_unit_type = pic->pic.nal_unit_type;
676 enc->enc_pic.temporal_id = pic->pic.temporal_id;
677
678 radeon_vcn_enc_hevc_get_cropping_param(enc, pic);
679 radeon_vcn_enc_hevc_get_dbk_param(enc, pic);
680 radeon_vcn_enc_hevc_get_rc_param(enc, pic);
681 radeon_vcn_enc_hevc_get_slice_ctrl_param(enc, pic);
682 radeon_vcn_enc_get_input_format_param(enc, &pic->base);
683 radeon_vcn_enc_get_output_format_param(enc, pic->seq.video_full_range_flag);
684 radeon_vcn_enc_get_intra_refresh_param(enc,
685 !(enc->enc_pic.hevc_deblock.deblocking_filter_disabled),
686 &pic->intra_refresh);
687 radeon_vcn_enc_get_roi_param(enc, &pic->roi);
688 radeon_vcn_enc_hevc_get_spec_misc_param(enc, pic);
689 radeon_vcn_enc_get_latency_param(enc);
690 radeon_vcn_enc_quality_modes(enc, &pic->quality_modes);
691 }
692
radeon_vcn_enc_av1_get_spec_misc_param(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)693 static void radeon_vcn_enc_av1_get_spec_misc_param(struct radeon_encoder *enc,
694 struct pipe_av1_enc_picture_desc *pic)
695 {
696 enc->enc_pic.av1_spec_misc.cdef_mode = pic->seq.seq_bits.enable_cdef;
697 enc->enc_pic.av1_spec_misc.disable_cdf_update = pic->disable_cdf_update;
698 enc->enc_pic.av1_spec_misc.disable_frame_end_update_cdf = pic->disable_frame_end_update_cdf;
699 enc->enc_pic.av1_spec_misc.palette_mode_enable = pic->palette_mode_enable;
700 enc->enc_pic.av1_spec_misc.cdef_bits = pic->cdef.cdef_bits;
701 enc->enc_pic.av1_spec_misc.cdef_damping_minus3 = pic->cdef.cdef_damping_minus_3;
702 for (int i = 0; i < (pic->cdef.cdef_bits << 1); i++ ){
703 enc->enc_pic.av1_spec_misc.cdef_y_pri_strength[i] = (pic->cdef.cdef_y_strengths[i] >> 2);
704 enc->enc_pic.av1_spec_misc.cdef_y_sec_strength[i] = (pic->cdef.cdef_y_strengths[i] & 0x3);
705 enc->enc_pic.av1_spec_misc.cdef_uv_pri_strength[i] = (pic->cdef.cdef_uv_strengths[i] >> 2);
706 enc->enc_pic.av1_spec_misc.cdef_uv_sec_strength[i] = (pic->cdef.cdef_uv_strengths[i] & 0x3);
707 }
708
709 enc->enc_pic.av1_spec_misc.delta_q_y_dc = pic->quantization.y_dc_delta_q;
710 enc->enc_pic.av1_spec_misc.delta_q_u_dc = pic->quantization.u_dc_delta_q;
711 enc->enc_pic.av1_spec_misc.delta_q_u_ac = pic->quantization.u_ac_delta_q;
712 enc->enc_pic.av1_spec_misc.delta_q_v_dc = pic->quantization.v_dc_delta_q;
713 enc->enc_pic.av1_spec_misc.delta_q_v_ac = pic->quantization.v_ac_delta_q;
714
715 if (enc->enc_pic.frame_type == PIPE_AV1_ENC_FRAME_TYPE_KEY)
716 enc->enc_pic.av1_spec_misc.separate_delta_q =
717 (pic->quantization.u_dc_delta_q != pic->quantization.v_dc_delta_q) ||
718 (pic->quantization.u_ac_delta_q != pic->quantization.v_ac_delta_q);
719
720 if (enc->enc_pic.disable_screen_content_tools) {
721 enc->enc_pic.force_integer_mv = 0;
722 enc->enc_pic.av1_spec_misc.palette_mode_enable = 0;
723 }
724
725 if (enc->enc_pic.force_integer_mv)
726 enc->enc_pic.av1_spec_misc.mv_precision = RENCODE_AV1_MV_PRECISION_FORCE_INTEGER_MV;
727 else
728 enc->enc_pic.av1_spec_misc.mv_precision = RENCODE_AV1_MV_PRECISION_ALLOW_HIGH_PRECISION;
729 }
730
radeon_vcn_enc_av1_timing_info(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)731 static void radeon_vcn_enc_av1_timing_info(struct radeon_encoder *enc,
732 struct pipe_av1_enc_picture_desc *pic)
733 {
734 if (pic->seq.seq_bits.timing_info_present_flag)
735 {
736 enc->enc_pic.av1_timing_info.num_units_in_display_tick =
737 pic->seq.num_units_in_display_tick;
738 enc->enc_pic.av1_timing_info.time_scale = pic->seq.time_scale;
739 enc->enc_pic.av1_timing_info.num_tick_per_picture_minus1 =
740 pic->seq.num_tick_per_picture_minus1;
741 }
742 }
743
radeon_vcn_enc_av1_color_description(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)744 static void radeon_vcn_enc_av1_color_description(struct radeon_encoder *enc,
745 struct pipe_av1_enc_picture_desc *pic)
746 {
747 if (pic->seq.seq_bits.color_description_present_flag)
748 {
749 enc->enc_pic.av1_color_description.color_primaries = pic->seq.color_config.color_primaries;
750 enc->enc_pic.av1_color_description.transfer_characteristics = pic->seq.color_config.transfer_characteristics;
751 enc->enc_pic.av1_color_description.maxtrix_coefficients = pic->seq.color_config.matrix_coefficients;
752 }
753 enc->enc_pic.av1_color_description.color_range = pic->seq.color_config.color_range;
754 enc->enc_pic.av1_color_description.chroma_sample_position = pic->seq.color_config.chroma_sample_position;
755 }
756
radeon_vcn_enc_av1_get_rc_param(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)757 static void radeon_vcn_enc_av1_get_rc_param(struct radeon_encoder *enc,
758 struct pipe_av1_enc_picture_desc *pic)
759 {
760 uint32_t frame_rate_den, frame_rate_num, min_qp, max_qp;
761
762 for (int i = 0; i < ARRAY_SIZE(enc->enc_pic.rc_layer_init); i++) {
763 enc->enc_pic.rc_layer_init[i].target_bit_rate = pic->rc[i].target_bitrate;
764 enc->enc_pic.rc_layer_init[i].peak_bit_rate = pic->rc[i].peak_bitrate;
765 frame_rate_den = pic->rc[i].frame_rate_den;
766 frame_rate_num = pic->rc[i].frame_rate_num;
767 radeon_vcn_enc_invalid_frame_rate(&frame_rate_den, &frame_rate_num);
768 enc->enc_pic.rc_layer_init[i].frame_rate_den = frame_rate_den;
769 enc->enc_pic.rc_layer_init[i].frame_rate_num = frame_rate_num;
770 enc->enc_pic.rc_layer_init[i].vbv_buffer_size = pic->rc[i].vbv_buffer_size;
771 enc->enc_pic.rc_layer_init[i].avg_target_bits_per_picture =
772 radeon_vcn_per_frame_integer(pic->rc[i].target_bitrate,
773 frame_rate_den,
774 frame_rate_num);
775 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_integer =
776 radeon_vcn_per_frame_integer(pic->rc[i].peak_bitrate,
777 frame_rate_den,
778 frame_rate_num);
779 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_fractional =
780 radeon_vcn_per_frame_frac(pic->rc[i].peak_bitrate,
781 frame_rate_den,
782 frame_rate_num);
783 }
784 enc->enc_pic.rc_session_init.vbv_buffer_level = pic->rc[0].vbv_buf_lv;
785 enc->enc_pic.rc_per_pic.qp_obs = pic->rc[0].qp;
786 enc->enc_pic.rc_per_pic.min_qp_app_obs = pic->rc[0].min_qp ? pic->rc[0].min_qp : 1;
787 enc->enc_pic.rc_per_pic.max_qp_app_obs = pic->rc[0].max_qp ? pic->rc[0].max_qp : 255;
788 enc->enc_pic.rc_per_pic.qp_i = pic->rc[0].qp;
789 enc->enc_pic.rc_per_pic.qp_p = pic->rc[0].qp_inter;
790 min_qp = pic->rc[0].min_qp ? pic->rc[0].min_qp : 1;
791 enc->enc_pic.rc_per_pic.min_qp_i = min_qp;
792 enc->enc_pic.rc_per_pic.min_qp_p = min_qp;
793 max_qp = pic->rc[0].max_qp ? pic->rc[0].max_qp : 255;
794 enc->enc_pic.rc_per_pic.max_qp_i = max_qp;
795 enc->enc_pic.rc_per_pic.max_qp_p = max_qp;
796 enc->enc_pic.rc_per_pic.enabled_filler_data = 0;
797 enc->enc_pic.rc_per_pic.skip_frame_enable = pic->rc[0].skip_frame_enable;
798 enc->enc_pic.rc_per_pic.enforce_hrd = pic->rc[0].enforce_hrd;
799 enc->enc_pic.rc_per_pic.qvbr_quality_level = (pic->rc[0].vbr_quality_factor + 2) / 5;
800 switch (pic->rc[0].rate_ctrl_method) {
801 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_DISABLE:
802 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
803 break;
804 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP:
805 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT:
806 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_CBR;
807 enc->enc_pic.rc_per_pic.enabled_filler_data = pic->rc[0].fill_data_enable;
808 break;
809 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE_SKIP:
810 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE:
811 enc->enc_pic.rc_session_init.rate_control_method =
812 RENCODE_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR;
813 break;
814 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_QUALITY_VARIABLE:
815 enc->enc_pic.rc_session_init.rate_control_method =
816 RENCODE_RATE_CONTROL_METHOD_QUALITY_VBR;
817 break;
818 default:
819 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
820 }
821 enc->enc_pic.rc_per_pic.max_au_size_obs = pic->rc[0].max_au_size;
822 enc->enc_pic.rc_per_pic.max_au_size_i = pic->rc[0].max_au_size;
823 enc->enc_pic.rc_per_pic.max_au_size_p = pic->rc[0].max_au_size;
824 }
825
radeon_vcn_enc_av1_get_tile_config(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)826 static void radeon_vcn_enc_av1_get_tile_config(struct radeon_encoder *enc,
827 struct pipe_av1_enc_picture_desc *pic)
828 {
829 uint32_t num_tile_cols, num_tile_rows;
830
831 num_tile_cols = MIN2(RENCODE_AV1_TILE_CONFIG_MAX_NUM_COLS, pic->tile_cols);
832 num_tile_rows = MIN2(RENCODE_AV1_TILE_CONFIG_MAX_NUM_ROWS, pic->tile_rows);
833
834 enc->enc_pic.av1_tile_config.uniform_tile_spacing = !!(pic->uniform_tile_spacing);
835 enc->enc_pic.av1_tile_config.num_tile_cols = pic->tile_cols;
836 enc->enc_pic.av1_tile_config.num_tile_rows = pic->tile_rows;
837 enc->enc_pic.av1_tile_config.num_tile_groups = pic->num_tile_groups;
838 for (int i = 0; i < num_tile_cols; i++ )
839 enc->enc_pic.av1_tile_config.tile_widths[i] = pic->width_in_sbs_minus_1[i] + 1;
840 for (int i = 0; i < num_tile_rows; i++ )
841 enc->enc_pic.av1_tile_config.tile_height[i] = pic->height_in_sbs_minus_1[i] + 1;
842 for (int i = 0; i < num_tile_cols * num_tile_rows; i++ ) {
843 enc->enc_pic.av1_tile_config.tile_groups[i].start =
844 (uint32_t)pic->tile_groups[i].tile_group_start;
845 enc->enc_pic.av1_tile_config.tile_groups[i].end =
846 (uint32_t)pic->tile_groups[i].tile_group_end;
847 }
848 enc->enc_pic.av1_tile_config.context_update_tile_id = pic->context_update_tile_id;
849 }
850
radeon_vcn_enc_av1_get_meta_param(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)851 static void radeon_vcn_enc_av1_get_meta_param(struct radeon_encoder *enc,
852 struct pipe_av1_enc_picture_desc *pic)
853 {
854 memset (&enc->enc_pic.enc_sei, 0, sizeof(rvcn_enc_seidata_t));
855
856 if (!pic->metadata_flags.value) {
857 enc->enc_pic.enc_sei.flags.value = 0;
858 return;
859 }
860
861 if (pic->metadata_flags.hdr_cll) {
862 enc->enc_pic.enc_sei.flags.hdr_cll = 1;
863 enc->enc_pic.enc_sei.hdr_cll = (rvcn_enc_sei_hdr_cll_t) {
864 .max_cll = pic->metadata_hdr_cll.max_cll,
865 .max_fall = pic->metadata_hdr_cll.max_fall
866 };
867 }
868
869 if (pic->metadata_flags.hdr_mdcv) {
870 enc->enc_pic.enc_sei.flags.hdr_mdcv = 1;
871 for (int32_t i = 0; i < 3; i++) {
872 enc->enc_pic.enc_sei.hdr_mdcv.primary_chromaticity_x[i]
873 = pic->metadata_hdr_mdcv.primary_chromaticity_x[i];
874 enc->enc_pic.enc_sei.hdr_mdcv.primary_chromaticity_y[i]
875 = pic->metadata_hdr_mdcv.primary_chromaticity_y[i];
876 }
877 enc->enc_pic.enc_sei.hdr_mdcv.white_point_chromaticity_x =
878 pic->metadata_hdr_mdcv.white_point_chromaticity_x;
879 enc->enc_pic.enc_sei.hdr_mdcv.white_point_chromaticity_y =
880 pic->metadata_hdr_mdcv.white_point_chromaticity_y;
881 enc->enc_pic.enc_sei.hdr_mdcv.luminance_max =
882 pic->metadata_hdr_mdcv.luminance_max;
883 enc->enc_pic.enc_sei.hdr_mdcv.luminance_min =
884 pic->metadata_hdr_mdcv.luminance_min;
885 }
886 }
887
radeon_vcn_enc_av1_get_param(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)888 static void radeon_vcn_enc_av1_get_param(struct radeon_encoder *enc,
889 struct pipe_av1_enc_picture_desc *pic)
890 {
891 struct radeon_enc_pic *enc_pic = &enc->enc_pic;
892 enc_pic->frame_type = pic->frame_type;
893 enc_pic->frame_num = pic->frame_num;
894 enc_pic->bit_depth_luma_minus8 = enc_pic->bit_depth_chroma_minus8 =
895 pic->seq.bit_depth_minus8;
896 enc_pic->pic_width_in_luma_samples = pic->seq.pic_width_in_luma_samples;
897 enc_pic->pic_height_in_luma_samples = pic->seq.pic_height_in_luma_samples;
898 enc_pic->general_profile_idc = pic->seq.profile;
899 enc_pic->general_level_idc = pic->seq.level;
900 enc_pic->general_tier_flag = pic->seq.tier;
901
902 enc_pic->num_temporal_layers =
903 pic->seq.num_temporal_layers <= RENCODE_MAX_NUM_TEMPORAL_LAYERS ?
904 pic->seq.num_temporal_layers : RENCODE_MAX_NUM_TEMPORAL_LAYERS;
905
906 /* 1, 2 layer needs 1 reference, and 3, 4 layer needs 2 references */
907 enc->base.max_references = (enc_pic->num_temporal_layers + 1) / 2
908 + RENCODE_VCN4_AV1_MAX_NUM_LTR;
909 for (int i = 0; i < RENCDOE_AV1_REFS_PER_FRAME; i++)
910 enc_pic->av1_ref_frame_idx[i] = pic->ref_frame_idx[i];
911
912 for (int i = 0; i < RENCDOE_AV1_NUM_REF_FRAMES; i++)
913 enc_pic->av1_ref_list[i] = pic->ref_list[i];
914
915 enc_pic->av1_recon_frame = pic->recon_frame;
916 enc_pic->av1_ref_frame_ctrl_l0 = pic->ref_frame_ctrl_l0;
917
918 enc_pic->frame_id_numbers_present = pic->seq.seq_bits.frame_id_number_present_flag;
919 enc_pic->enable_error_resilient_mode = pic->error_resilient_mode;
920 enc_pic->force_integer_mv = pic->force_integer_mv;
921 enc_pic->enable_order_hint = pic->seq.seq_bits.enable_order_hint;
922 enc_pic->order_hint_bits = pic->seq.order_hint_bits;
923 enc_pic->enable_render_size = pic->enable_render_size;
924 enc_pic->render_width = pic->render_width;
925 enc_pic->render_height = pic->render_height;
926 enc_pic->enable_color_description = pic->seq.seq_bits.color_description_present_flag;
927 enc_pic->timing_info_present = pic->seq.seq_bits.timing_info_present_flag;
928 enc_pic->timing_info_equal_picture_interval = pic->seq.seq_bits.equal_picture_interval;
929 enc_pic->disable_screen_content_tools = !pic->allow_screen_content_tools;
930 enc_pic->is_obu_frame = pic->enable_frame_obu;
931 enc_pic->need_av1_seq = (pic->frame_type == PIPE_AV1_ENC_FRAME_TYPE_KEY);
932 enc_pic->av1_mark_long_term_reference = pic->long_term_reference;
933
934 radeon_vcn_enc_av1_get_spec_misc_param(enc, pic);
935 radeon_vcn_enc_av1_timing_info(enc, pic);
936 radeon_vcn_enc_av1_color_description(enc, pic);
937 radeon_vcn_enc_av1_get_rc_param(enc, pic);
938 radeon_vcn_enc_av1_get_tile_config(enc, pic);
939 radeon_vcn_enc_get_input_format_param(enc, &pic->base);
940 radeon_vcn_enc_get_output_format_param(enc, pic->seq.color_config.color_range);
941 /* loop filter enabled all the time */
942 radeon_vcn_enc_get_intra_refresh_param(enc,
943 true,
944 &pic->intra_refresh);
945 radeon_vcn_enc_get_roi_param(enc, &pic->roi);
946 radeon_vcn_enc_get_latency_param(enc);
947 radeon_vcn_enc_av1_get_meta_param(enc, pic);
948 radeon_vcn_enc_quality_modes(enc, &pic->quality_modes);
949 }
950
radeon_vcn_enc_get_param(struct radeon_encoder * enc,struct pipe_picture_desc * picture)951 static void radeon_vcn_enc_get_param(struct radeon_encoder *enc, struct pipe_picture_desc *picture)
952 {
953 enc->enc_pic.enc_params.allowed_max_bitstream_size = enc->bs_size - enc->bs_offset;
954
955 if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC)
956 radeon_vcn_enc_h264_get_param(enc, (struct pipe_h264_enc_picture_desc *)picture);
957 else if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_HEVC)
958 radeon_vcn_enc_hevc_get_param(enc, (struct pipe_h265_enc_picture_desc *)picture);
959 else if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_AV1)
960 radeon_vcn_enc_av1_get_param(enc, (struct pipe_av1_enc_picture_desc *)picture);
961 }
962
flush(struct radeon_encoder * enc,unsigned flags,struct pipe_fence_handle ** fence)963 static int flush(struct radeon_encoder *enc, unsigned flags, struct pipe_fence_handle **fence)
964 {
965 return enc->ws->cs_flush(&enc->cs, flags, fence);
966 }
967
radeon_enc_flush(struct pipe_video_codec * encoder)968 static void radeon_enc_flush(struct pipe_video_codec *encoder)
969 {
970 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
971 flush(enc, PIPE_FLUSH_ASYNC, NULL);
972 }
973
radeon_enc_cs_flush(void * ctx,unsigned flags,struct pipe_fence_handle ** fence)974 static void radeon_enc_cs_flush(void *ctx, unsigned flags, struct pipe_fence_handle **fence)
975 {
976 // just ignored
977 }
978
979 /* configure reconstructed picture offset */
radeon_enc_rec_offset(rvcn_enc_reconstructed_picture_t * recon,uint32_t * offset,uint32_t luma_size,uint32_t chroma_size,bool is_av1)980 static void radeon_enc_rec_offset(rvcn_enc_reconstructed_picture_t *recon,
981 uint32_t *offset,
982 uint32_t luma_size,
983 uint32_t chroma_size,
984 bool is_av1)
985 {
986 if (offset) {
987 recon->luma_offset = *offset;
988 *offset += luma_size;
989 recon->chroma_offset = *offset;
990 *offset += chroma_size;
991 if (is_av1) {
992 recon->av1.av1_cdf_frame_context_offset = *offset;
993 *offset += RENCODE_AV1_FRAME_CONTEXT_CDF_TABLE_SIZE;
994 recon->av1.av1_cdef_algorithm_context_offset = *offset;
995 *offset += RENCODE_AV1_CDEF_ALGORITHM_FRAME_CONTEXT_SIZE;
996 }
997 } else {
998 recon->luma_offset = 0;
999 recon->chroma_offset = 0;
1000 recon->av1.av1_cdf_frame_context_offset = 0;
1001 recon->av1.av1_cdef_algorithm_context_offset = 0;
1002 }
1003 recon->chroma_v_offset = 0;
1004 }
1005
1006 /* configure reconstructed picture offset */
radeon_enc_rec_meta_offset(rvcn_enc_reconstructed_picture_t * recon,uint32_t * offset,uint32_t total_coloc_size,uint32_t alignment,bool has_b,bool is_h264,bool is_av1)1007 static void radeon_enc_rec_meta_offset(rvcn_enc_reconstructed_picture_t *recon,
1008 uint32_t *offset,
1009 uint32_t total_coloc_size,
1010 uint32_t alignment,
1011 bool has_b,
1012 bool is_h264,
1013 bool is_av1)
1014 {
1015 uint32_t context_offset = 0;
1016
1017 if (offset) {
1018 recon->frame_context_buffer_offset = *offset;
1019 recon->encode_metadata_offset = context_offset;
1020 context_offset += RENCODE_MAX_METADATA_BUFFER_SIZE_PER_FRAME;
1021 if (is_h264) {
1022 if (has_b) {
1023 recon->h264.colloc_buffer_offset = context_offset;
1024 context_offset += total_coloc_size;
1025 } else
1026 recon->h264.colloc_buffer_offset = RENCODE_INVALID_COLOC_OFFSET;
1027 }
1028
1029 if (is_av1) {
1030 recon->av1.av1_cdf_frame_context_offset = context_offset;
1031 context_offset += RENCODE_AV1_FRAME_CONTEXT_CDF_TABLE_SIZE;
1032 recon->av1.av1_cdef_algorithm_context_offset = context_offset;
1033 context_offset += RENCODE_AV1_CDEF_ALGORITHM_FRAME_CONTEXT_SIZE;
1034 }
1035 context_offset = align(context_offset, alignment);
1036 *offset += context_offset;
1037 } else {
1038 recon->frame_context_buffer_offset = 0;
1039 recon->encode_metadata_offset = 0;
1040 recon->av1.av1_cdf_frame_context_offset = 0;
1041 recon->av1.av1_cdef_algorithm_context_offset = 0;
1042 }
1043 }
1044
setup_cdf(struct radeon_encoder * enc)1045 static int setup_cdf(struct radeon_encoder *enc)
1046 {
1047 unsigned char *p_cdf = NULL;
1048
1049 if (!enc->cdf ||
1050 !si_vid_create_buffer(enc->screen,
1051 enc->cdf,
1052 VCN_ENC_AV1_DEFAULT_CDF_SIZE,
1053 PIPE_USAGE_DYNAMIC)) {
1054 RVID_ERR("Can't create CDF buffer.\n");
1055 goto error;
1056 }
1057
1058 p_cdf = enc->ws->buffer_map(enc->ws,
1059 enc->cdf->res->buf,
1060 &enc->cs,
1061 PIPE_MAP_READ_WRITE | RADEON_MAP_TEMPORARY);
1062 if (!p_cdf)
1063 goto error;
1064
1065 memcpy(p_cdf, rvcn_av1_cdf_default_table, VCN_ENC_AV1_DEFAULT_CDF_SIZE);
1066 enc->ws->buffer_unmap(enc->ws, enc->cdf->res->buf);
1067
1068 return 0;
1069
1070 error:
1071 return -1;
1072 }
1073
pre_encode_size(struct radeon_encoder * enc,uint32_t * offset)1074 static void pre_encode_size(struct radeon_encoder *enc,
1075 uint32_t *offset)
1076 {
1077 bool is_h264 = u_reduce_video_profile(enc->base.profile)
1078 == PIPE_VIDEO_FORMAT_MPEG4_AVC;
1079 uint32_t rec_alignment = is_h264 ? 16 : 64;
1080 uint32_t aligned_width = align(enc->base.width, rec_alignment);
1081 uint32_t aligned_height = align(enc->base.height, rec_alignment);
1082 struct radeon_enc_pic *enc_pic = &enc->enc_pic;
1083 bool has_b = enc_pic->spec_misc.b_picture_enabled; /* for h264 only */
1084 uint32_t pre_size = DIV_ROUND_UP((aligned_width >> 2), rec_alignment) *
1085 DIV_ROUND_UP((aligned_height >> 2), rec_alignment);
1086 uint32_t full_size = DIV_ROUND_UP(aligned_width, rec_alignment) *
1087 DIV_ROUND_UP(aligned_height, rec_alignment);
1088
1089 enc_pic->ctx_buf.two_pass_search_center_map_offset = *offset;
1090 if (is_h264 && !has_b)
1091 *offset += align((pre_size * 4 + full_size) * sizeof(uint32_t), enc->alignment);
1092 else if (!is_h264)
1093 *offset += align((pre_size * 52 + full_size) * sizeof(uint32_t), enc->alignment);
1094 }
1095
setup_dpb(struct radeon_encoder * enc,uint32_t num_reconstructed_pictures)1096 static int setup_dpb(struct radeon_encoder *enc, uint32_t num_reconstructed_pictures)
1097 {
1098 struct si_screen *sscreen = (struct si_screen *)enc->screen;
1099 bool is_h264 = u_reduce_video_profile(enc->base.profile)
1100 == PIPE_VIDEO_FORMAT_MPEG4_AVC;
1101 bool is_av1 = u_reduce_video_profile(enc->base.profile)
1102 == PIPE_VIDEO_FORMAT_AV1;
1103 uint32_t rec_alignment = is_h264 ? 16 : 64;
1104 uint32_t aligned_width = align(enc->base.width, rec_alignment);
1105 uint32_t aligned_height = align(enc->base.height, rec_alignment);
1106 uint32_t pitch = align(aligned_width, enc->alignment);
1107 uint32_t luma_size, chroma_size, offset;
1108 struct radeon_enc_pic *enc_pic = &enc->enc_pic;
1109 int i;
1110 bool has_b = enc_pic->spec_misc.b_picture_enabled; /* for h264 only */
1111 uint32_t aligned_dpb_height = MAX2(256, aligned_height);
1112 uint32_t total_coloc_bytes = (align((aligned_width / 16), 64) / 2)
1113 * (aligned_height / 16);
1114
1115 luma_size = align(pitch * aligned_dpb_height , enc->alignment);
1116 chroma_size = align(luma_size / 2 , enc->alignment);
1117 if (enc_pic->bit_depth_luma_minus8 || enc_pic->bit_depth_chroma_minus8) {
1118 luma_size *= 2;
1119 chroma_size *= 2;
1120 }
1121
1122 assert(num_reconstructed_pictures <= RENCODE_MAX_NUM_RECONSTRUCTED_PICTURES);
1123
1124 enc_pic->ctx_buf.rec_luma_pitch = pitch;
1125 enc_pic->ctx_buf.pre_encode_picture_luma_pitch = pitch;
1126 enc_pic->ctx_buf.num_reconstructed_pictures = num_reconstructed_pictures;
1127
1128 offset = 0;
1129 enc->metadata_size = 0;
1130 if (sscreen->info.vcn_ip_version < VCN_5_0_0) {
1131 enc_pic->ctx_buf.rec_chroma_pitch = pitch;
1132 enc_pic->ctx_buf.pre_encode_picture_chroma_pitch = pitch;
1133 if (has_b) {
1134 enc_pic->ctx_buf.colloc_buffer_offset = offset;
1135 offset += total_coloc_bytes;
1136 } else
1137 enc_pic->ctx_buf.colloc_buffer_offset = 0;
1138
1139 if (enc_pic->quality_modes.pre_encode_mode)
1140 pre_encode_size(enc, &offset);
1141 else
1142 enc_pic->ctx_buf.two_pass_search_center_map_offset = 0;
1143
1144 if (enc_pic->quality_modes.pre_encode_mode) {
1145 enc_pic->ctx_buf.pre_encode_input_picture.rgb.red_offset = offset;
1146 offset += luma_size;
1147 enc_pic->ctx_buf.pre_encode_input_picture.rgb.green_offset = offset;
1148 offset += luma_size;
1149 enc_pic->ctx_buf.pre_encode_input_picture.rgb.blue_offset = offset;
1150 offset += luma_size;
1151 }
1152
1153 if (is_av1) {
1154 enc_pic->ctx_buf.av1.av1_sdb_intermediate_context_offset = offset;
1155 offset += RENCODE_AV1_SDB_FRAME_CONTEXT_SIZE;
1156 }
1157
1158 for (i = 0; i < num_reconstructed_pictures; i++) {
1159 radeon_enc_rec_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1160 &offset, luma_size, chroma_size, is_av1);
1161
1162 if (enc_pic->quality_modes.pre_encode_mode)
1163 radeon_enc_rec_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1164 &offset, luma_size, chroma_size, is_av1);
1165 }
1166
1167 for (; i < RENCODE_MAX_NUM_RECONSTRUCTED_PICTURES; i++) {
1168 radeon_enc_rec_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1169 NULL, 0, 0, false);
1170 if (enc_pic->quality_modes.pre_encode_mode)
1171 radeon_enc_rec_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1172 NULL, 0, 0, false);
1173 }
1174
1175 enc->dpb_size = offset;
1176 } else { /* vcn 5.0 */
1177 enc_pic->ctx_buf.rec_chroma_pitch = pitch / 2;
1178 enc_pic->ctx_buf.pre_encode_picture_chroma_pitch = pitch / 2;
1179 /* dpb buffer */
1180 if (is_av1) {
1181 enc_pic->ctx_buf.av1.av1_sdb_intermediate_context_offset = offset;
1182 offset += RENCODE_AV1_SDB_FRAME_CONTEXT_SIZE;
1183 } else
1184 enc_pic->ctx_buf.av1.av1_sdb_intermediate_context_offset = 0;
1185
1186 if (enc_pic->quality_modes.pre_encode_mode) {
1187 enc_pic->ctx_buf.pre_encode_input_picture.rgb.red_offset = offset;
1188 offset += luma_size;
1189 enc_pic->ctx_buf.pre_encode_input_picture.rgb.green_offset = offset;
1190 offset += luma_size;
1191 enc_pic->ctx_buf.pre_encode_input_picture.rgb.blue_offset = offset;
1192 offset += luma_size;
1193 }
1194
1195 for (i = 0; i < num_reconstructed_pictures; i++) {
1196 radeon_enc_rec_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1197 &offset, luma_size, chroma_size, false);
1198
1199 if (enc_pic->quality_modes.pre_encode_mode)
1200 radeon_enc_rec_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1201 &offset, luma_size, chroma_size, false);
1202 }
1203
1204 for (; i < RENCODE_MAX_NUM_RECONSTRUCTED_PICTURES; i++) {
1205 radeon_enc_rec_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1206 NULL, 0, 0, false);
1207 if (enc_pic->quality_modes.pre_encode_mode)
1208 radeon_enc_rec_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1209 NULL, 0, 0, false);
1210 }
1211
1212 enc->dpb_size = offset;
1213
1214 /* meta buffer*/
1215 offset = 0;
1216 if (enc_pic->quality_modes.pre_encode_mode)
1217 pre_encode_size(enc, &offset);
1218 else
1219 enc_pic->ctx_buf.two_pass_search_center_map_offset = 0;
1220
1221 for (i = 0; i < num_reconstructed_pictures; i++) {
1222 radeon_enc_rec_meta_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1223 &offset, total_coloc_bytes, enc->alignment, has_b, is_h264, is_av1);
1224 if (enc_pic->quality_modes.pre_encode_mode)
1225 radeon_enc_rec_meta_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1226 &offset, total_coloc_bytes, enc->alignment, has_b, is_h264, is_av1);
1227 }
1228 for (; i < RENCODE_MAX_NUM_RECONSTRUCTED_PICTURES; i++) {
1229 radeon_enc_rec_meta_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1230 NULL, 0, 0, false, false, false);
1231 if (enc_pic->quality_modes.pre_encode_mode)
1232 radeon_enc_rec_meta_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1233 NULL, 0, 0, false, false, false);
1234 }
1235 enc->metadata_size = offset;
1236 }
1237
1238 enc->dpb_slots = num_reconstructed_pictures;
1239
1240 return offset;
1241 }
1242
1243 /* each block (MB/CTB/SB) has one QP/QI value */
roi_buffer_size(struct radeon_encoder * enc)1244 static uint32_t roi_buffer_size(struct radeon_encoder *enc)
1245 {
1246 uint32_t pitch_size_in_dword = 0;
1247 rvcn_enc_qp_map_t *qp_map = &enc->enc_pic.enc_qp_map;
1248
1249 if ( qp_map->version == RENCODE_QP_MAP_LEGACY){
1250 pitch_size_in_dword = qp_map->width_in_block;
1251 qp_map->qp_map_pitch = qp_map->width_in_block;
1252 } else {
1253 /* two units merge into 1 dword */
1254 pitch_size_in_dword = DIV_ROUND_UP(qp_map->width_in_block, 2);
1255 qp_map->qp_map_pitch = pitch_size_in_dword * 2;
1256 }
1257
1258 return pitch_size_in_dword * qp_map->height_in_block * sizeof(uint32_t);
1259 }
1260
arrange_qp_map(void * start,struct rvcn_enc_qp_map_region * regin,rvcn_enc_qp_map_t * map)1261 static void arrange_qp_map(void *start,
1262 struct rvcn_enc_qp_map_region *regin,
1263 rvcn_enc_qp_map_t *map)
1264 {
1265 uint32_t i, j;
1266 uint32_t offset;
1267 uint32_t num_in_x = MIN2(regin->x_in_unit + regin->width_in_unit, map->width_in_block)
1268 - regin->x_in_unit;
1269 uint32_t num_in_y = MIN2(regin->y_in_unit + regin->height_in_unit, map->height_in_block)
1270 - regin->y_in_unit;;
1271
1272 for (j = 0; j < num_in_y; j++) {
1273 for (i = 0; i < num_in_x; i++) {
1274 offset = regin->x_in_unit + i + (regin->y_in_unit + j) * map->qp_map_pitch;
1275 if (map->version == RENCODE_QP_MAP_LEGACY)
1276 *((uint32_t *)start + offset) = (int32_t)regin->qp_delta;
1277 else
1278 *((int16_t *)start + offset) =
1279 (int16_t)(regin->qp_delta << RENCODE_QP_MAP_UNIFIED_QP_BITS_SHIFT);
1280 }
1281 }
1282 }
1283
1284 /* Arrange roi map values according to the input regions.
1285 * The arrangment will consider the lower sequence region
1286 * higher priority and that could overlap the higher sequence
1287 * map region. */
generate_roi_map(struct radeon_encoder * enc)1288 static int generate_roi_map(struct radeon_encoder *enc)
1289 {
1290 uint32_t width_in_block, height_in_block;
1291 uint32_t i;
1292 void *p_roi = NULL;
1293
1294 radeon_vcn_enc_blocks_in_frame(enc, &width_in_block, &height_in_block);
1295
1296 p_roi = enc->ws->buffer_map(enc->ws,
1297 enc->roi->res->buf,
1298 &enc->cs,
1299 PIPE_MAP_READ_WRITE | RADEON_MAP_TEMPORARY);
1300 if (!p_roi)
1301 goto error;
1302
1303 memset(p_roi, 0, enc->roi_size);
1304
1305 for (i = 0; i < ARRAY_SIZE(enc->enc_pic.enc_qp_map.map); i++) {
1306 struct rvcn_enc_qp_map_region *region = &enc->enc_pic.enc_qp_map.map[i];
1307 if (region->is_valid)
1308 arrange_qp_map(p_roi, region, &enc->enc_pic.enc_qp_map);
1309 }
1310
1311 enc->ws->buffer_unmap(enc->ws, enc->roi->res->buf);
1312 return 0;
1313 error:
1314 return -1;
1315 }
1316
radeon_enc_begin_frame(struct pipe_video_codec * encoder,struct pipe_video_buffer * source,struct pipe_picture_desc * picture)1317 static void radeon_enc_begin_frame(struct pipe_video_codec *encoder,
1318 struct pipe_video_buffer *source,
1319 struct pipe_picture_desc *picture)
1320 {
1321 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1322 struct si_screen *sscreen = (struct si_screen *)enc->screen;
1323 struct vl_video_buffer *vid_buf = (struct vl_video_buffer *)source;
1324 unsigned dpb_slots = 0;
1325
1326 enc->need_rate_control = false;
1327 enc->need_rc_per_pic = false;
1328
1329 if (u_reduce_video_profile(enc->base.profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC) {
1330 struct pipe_h264_enc_picture_desc *pic = (struct pipe_h264_enc_picture_desc *)picture;
1331 dpb_slots = MAX2(pic->seq.max_num_ref_frames + 1, pic->dpb_size);
1332 enc->need_rate_control =
1333 (enc->enc_pic.rc_layer_init[0].target_bit_rate != pic->rate_ctrl[0].target_bitrate) ||
1334 (enc->enc_pic.rc_layer_init[0].frame_rate_num != pic->rate_ctrl[0].frame_rate_num) ||
1335 (enc->enc_pic.rc_layer_init[0].frame_rate_den != pic->rate_ctrl[0].frame_rate_den);
1336
1337 if (enc->need_rate_control) {
1338 enc->enc_pic.rc_per_pic.qp_i = 0;
1339 enc->enc_pic.rc_per_pic.qp_p = 0;
1340 enc->enc_pic.rc_per_pic.qp_b = 0;
1341 }
1342
1343 enc->need_rc_per_pic =
1344 (!enc->enc_pic.rc_per_pic.qp_i && enc->enc_pic.rc_per_pic.qp_i != pic->quant_i_frames) ||
1345 (!enc->enc_pic.rc_per_pic.qp_p && enc->enc_pic.rc_per_pic.qp_p != pic->quant_p_frames) ||
1346 (!enc->enc_pic.rc_per_pic.qp_b && enc->enc_pic.rc_per_pic.qp_b != pic->quant_b_frames) ||
1347 (enc->enc_pic.rc_per_pic.qvbr_quality_level != pic->rate_ctrl[0].vbr_quality_factor);
1348 } else if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_HEVC) {
1349 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
1350 dpb_slots = MAX2(pic->pic.num_ref_idx_l0_default_active_minus1 + 2, pic->dpb_size);
1351 enc->need_rate_control =
1352 (enc->enc_pic.rc_layer_init[0].target_bit_rate != pic->rc[0].target_bitrate) ||
1353 (enc->enc_pic.rc_layer_init[0].frame_rate_num != pic->rc[0].frame_rate_num) ||
1354 (enc->enc_pic.rc_layer_init[0].frame_rate_den != pic->rc[0].frame_rate_den);
1355
1356 if (enc->need_rate_control) {
1357 enc->enc_pic.rc_per_pic.qp_i = 0;
1358 enc->enc_pic.rc_per_pic.qp_p = 0;
1359 }
1360
1361 enc->need_rc_per_pic =
1362 (!enc->enc_pic.rc_per_pic.qp_i && enc->enc_pic.rc_per_pic.qp_i != pic->rc[0].quant_i_frames) ||
1363 (!enc->enc_pic.rc_per_pic.qp_p && enc->enc_pic.rc_per_pic.qp_p != pic->rc[0].quant_p_frames) ||
1364 (enc->enc_pic.rc_per_pic.qvbr_quality_level != pic->rc[0].vbr_quality_factor);
1365 } else if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_AV1) {
1366 struct pipe_av1_enc_picture_desc *pic = (struct pipe_av1_enc_picture_desc *)picture;
1367 enc->need_rate_control =
1368 (enc->enc_pic.rc_layer_init[0].target_bit_rate != pic->rc[0].target_bitrate) ||
1369 (enc->enc_pic.rc_layer_init[0].frame_rate_num != pic->rc[0].frame_rate_num) ||
1370 (enc->enc_pic.rc_layer_init[0].frame_rate_den != pic->rc[0].frame_rate_den);
1371
1372 if (enc->need_rate_control) {
1373 enc->enc_pic.rc_per_pic.qp_i = 0;
1374 enc->enc_pic.rc_per_pic.qp_p = 0;
1375 }
1376
1377 enc->need_rc_per_pic =
1378 (!enc->enc_pic.rc_per_pic.qp_i && enc->enc_pic.rc_per_pic.qp_i != pic->rc[0].qp) ||
1379 (!enc->enc_pic.rc_per_pic.qp_p && enc->enc_pic.rc_per_pic.qp_p != pic->rc[0].qp_inter) ||
1380 (enc->enc_pic.rc_per_pic.qvbr_quality_level != pic->rc[0].vbr_quality_factor);
1381
1382 if (!enc->cdf) {
1383 enc->cdf = CALLOC_STRUCT(rvid_buffer);
1384 if (setup_cdf(enc)) {
1385 RVID_ERR("Can't create cdf buffer.\n");
1386 goto error;
1387 }
1388 }
1389 }
1390
1391 radeon_vcn_enc_get_param(enc, picture);
1392 if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_AV1)
1393 dpb_slots = enc->base.max_references + 1;
1394 if (!enc->dpb) {
1395 enc->dpb = CALLOC_STRUCT(rvid_buffer);
1396 setup_dpb(enc, dpb_slots);
1397 if (!enc->dpb ||
1398 !si_vid_create_buffer(enc->screen, enc->dpb, enc->dpb_size, PIPE_USAGE_DEFAULT)) {
1399 RVID_ERR("Can't create DPB buffer.\n");
1400 goto error;
1401 }
1402 }
1403
1404 if ((sscreen->info.vcn_ip_version >= VCN_5_0_0) && enc->metadata_size && !enc->meta) {
1405 enc->meta = CALLOC_STRUCT(rvid_buffer);
1406 if (!enc->meta ||
1407 !si_vid_create_buffer(enc->screen, enc->meta, enc->metadata_size, PIPE_USAGE_DEFAULT)) {
1408 RVID_ERR("Can't create meta buffer.\n");
1409 goto error;
1410 }
1411 }
1412
1413 if (dpb_slots > enc->dpb_slots) {
1414 setup_dpb(enc, dpb_slots);
1415 if (!si_vid_resize_buffer(enc->base.context, &enc->cs, enc->dpb, enc->dpb_size, NULL)) {
1416 RVID_ERR("Can't resize DPB buffer.\n");
1417 goto error;
1418 }
1419 if (sscreen->info.vcn_ip_version >= VCN_5_0_0 && enc->metadata_size &&
1420 !si_vid_resize_buffer(enc->base.context, &enc->cs, enc->meta, enc->metadata_size, NULL)) {
1421 RVID_ERR("Can't resize meta buffer.\n");
1422 goto error;
1423 }
1424 }
1425
1426 /* qp map buffer could be created here, and release at the end */
1427 if (enc->enc_pic.enc_qp_map.qp_map_type != RENCODE_QP_MAP_TYPE_NONE) {
1428 if (!enc->roi) {
1429 enc->roi = CALLOC_STRUCT(rvid_buffer);
1430 enc->roi_size = roi_buffer_size(enc);
1431 if (!enc->roi || !enc->roi_size ||
1432 !si_vid_create_buffer(enc->screen, enc->roi, enc->roi_size, PIPE_USAGE_DYNAMIC)) {
1433 RVID_ERR("Can't create ROI buffer.\n");
1434 goto error;
1435 }
1436 }
1437 if(generate_roi_map(enc)) {
1438 RVID_ERR("Can't form roi map.\n");
1439 goto error;
1440 }
1441 }
1442
1443 if (source->buffer_format == PIPE_FORMAT_NV12 ||
1444 source->buffer_format == PIPE_FORMAT_P010 ||
1445 source->buffer_format == PIPE_FORMAT_P016) {
1446 enc->get_buffer(vid_buf->resources[0], &enc->handle, &enc->luma);
1447 enc->get_buffer(vid_buf->resources[1], NULL, &enc->chroma);
1448 }
1449 else {
1450 enc->get_buffer(vid_buf->resources[0], &enc->handle, &enc->luma);
1451 enc->chroma = NULL;
1452 }
1453
1454 enc->need_feedback = false;
1455
1456 if (!enc->stream_handle) {
1457 struct rvid_buffer fb;
1458 enc->stream_handle = si_vid_alloc_stream_handle();
1459 enc->si = CALLOC_STRUCT(rvid_buffer);
1460 if (!enc->si ||
1461 !enc->stream_handle ||
1462 !si_vid_create_buffer(enc->screen, enc->si, 128 * 1024, PIPE_USAGE_DEFAULT)) {
1463 RVID_ERR("Can't create session buffer.\n");
1464 goto error;
1465 }
1466 si_vid_create_buffer(enc->screen, &fb, 4096, PIPE_USAGE_STAGING);
1467 enc->fb = &fb;
1468 enc->begin(enc);
1469 flush(enc, PIPE_FLUSH_ASYNC, NULL);
1470 si_vid_destroy_buffer(&fb);
1471 enc->need_rate_control = false;
1472 enc->need_rc_per_pic = false;
1473 }
1474
1475 return;
1476
1477 error:
1478 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->dpb);
1479 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->si);
1480 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->cdf);
1481 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->roi);
1482 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->meta);
1483 }
1484
radeon_vcn_enc_encode_h264_header(struct radeon_encoder * enc,struct pipe_enc_raw_header * header,uint8_t * out)1485 static uint32_t radeon_vcn_enc_encode_h264_header(struct radeon_encoder *enc,
1486 struct pipe_enc_raw_header *header,
1487 uint8_t *out)
1488 {
1489 switch (header->type) {
1490 case PIPE_H264_NAL_SPS:
1491 return radeon_enc_write_sps(enc, out);
1492 case PIPE_H264_NAL_PPS:
1493 return radeon_enc_write_pps(enc, out);
1494 default:
1495 assert(header->buffer);
1496 memcpy(out, header->buffer, header->size);
1497 return header->size;
1498 }
1499 }
1500
radeon_vcn_enc_encode_hevc_header(struct radeon_encoder * enc,struct pipe_enc_raw_header * header,uint8_t * out)1501 static uint32_t radeon_vcn_enc_encode_hevc_header(struct radeon_encoder *enc,
1502 struct pipe_enc_raw_header *header,
1503 uint8_t *out)
1504 {
1505 switch (header->type) {
1506 case PIPE_H265_NAL_VPS:
1507 return radeon_enc_write_vps(enc, out);
1508 case PIPE_H265_NAL_SPS:
1509 return radeon_enc_write_sps_hevc(enc, out);
1510 case PIPE_H265_NAL_PPS:
1511 return radeon_enc_write_pps_hevc(enc, out);
1512 default:
1513 assert(header->buffer);
1514 memcpy(out, header->buffer, header->size);
1515 return header->size;
1516 }
1517 }
1518
radeon_vcn_enc_encode_headers(struct radeon_encoder * enc)1519 static void *radeon_vcn_enc_encode_headers(struct radeon_encoder *enc)
1520 {
1521 const bool is_h264 = u_reduce_video_profile(enc->base.profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC;
1522 const bool is_hevc = u_reduce_video_profile(enc->base.profile) == PIPE_VIDEO_FORMAT_HEVC;
1523 struct util_dynarray *headers;
1524 unsigned num_slices = 0, num_headers = 0;
1525
1526 if (is_h264)
1527 headers = &enc->enc_pic.h264.desc->raw_headers;
1528 else if (is_hevc)
1529 headers = &enc->enc_pic.hevc.desc->raw_headers;
1530 else
1531 return NULL;
1532
1533 util_dynarray_foreach(headers, struct pipe_enc_raw_header, header) {
1534 if (header->is_slice)
1535 num_slices++;
1536 num_headers++;
1537 }
1538
1539 if (!num_headers || !num_slices || num_headers == num_slices)
1540 return NULL;
1541
1542 size_t segments_size =
1543 sizeof(struct rvcn_enc_output_unit_segment) * (num_headers - num_slices + 1);
1544 struct rvcn_enc_feedback_data *data =
1545 CALLOC_VARIANT_LENGTH_STRUCT(rvcn_enc_feedback_data, segments_size);
1546 if (!data)
1547 return NULL;
1548
1549 uint8_t *ptr = enc->ws->buffer_map(enc->ws, enc->bs_handle, &enc->cs,
1550 PIPE_MAP_WRITE | RADEON_MAP_TEMPORARY);
1551 if (!ptr) {
1552 RVID_ERR("Can't map bs buffer.\n");
1553 return NULL;
1554 }
1555
1556 unsigned offset = 0;
1557 struct rvcn_enc_output_unit_segment *slice_segment = NULL;
1558
1559 util_dynarray_foreach(headers, struct pipe_enc_raw_header, header) {
1560 if (header->is_slice) {
1561 if (slice_segment)
1562 continue;
1563 slice_segment = &data->segments[data->num_segments];
1564 slice_segment->is_slice = true;
1565 } else {
1566 unsigned size = 0;
1567 if (is_h264)
1568 size = radeon_vcn_enc_encode_h264_header(enc, header, ptr + offset);
1569 else if (is_hevc)
1570 size = radeon_vcn_enc_encode_hevc_header(enc, header, ptr + offset);
1571 data->segments[data->num_segments].size = size;
1572 data->segments[data->num_segments].offset = offset;
1573 offset += size;
1574 }
1575 data->num_segments++;
1576 }
1577
1578 enc->bs_offset = align(offset, 16);
1579 assert(enc->bs_offset < enc->bs_size);
1580
1581 assert(slice_segment);
1582 slice_segment->offset = enc->bs_offset;
1583
1584 enc->ws->buffer_unmap(enc->ws, enc->bs_handle);
1585
1586 return data;
1587 }
1588
radeon_enc_encode_bitstream(struct pipe_video_codec * encoder,struct pipe_video_buffer * source,struct pipe_resource * destination,void ** fb)1589 static void radeon_enc_encode_bitstream(struct pipe_video_codec *encoder,
1590 struct pipe_video_buffer *source,
1591 struct pipe_resource *destination, void **fb)
1592 {
1593 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1594 struct vl_video_buffer *vid_buf = (struct vl_video_buffer *)source;
1595
1596 enc->get_buffer(destination, &enc->bs_handle, NULL);
1597 enc->bs_size = destination->width0;
1598 enc->bs_offset = 0;
1599
1600 *fb = enc->fb = CALLOC_STRUCT(rvid_buffer);
1601
1602 if (!si_vid_create_buffer(enc->screen, enc->fb, 4096, PIPE_USAGE_STAGING)) {
1603 RVID_ERR("Can't create feedback buffer.\n");
1604 return;
1605 }
1606
1607 enc->fb->user_data = radeon_vcn_enc_encode_headers(enc);
1608
1609 if (vid_buf->base.statistics_data) {
1610 enc->get_buffer(vid_buf->base.statistics_data, &enc->stats, NULL);
1611 if (enc->stats->size < sizeof(rvcn_encode_stats_type_0_t)) {
1612 RVID_ERR("Encoder statistics output buffer is too small.\n");
1613 enc->stats = NULL;
1614 }
1615 vid_buf->base.statistics_data = NULL;
1616 }
1617 else
1618 enc->stats = NULL;
1619
1620 enc->need_feedback = true;
1621 enc->encode(enc);
1622 }
1623
radeon_enc_end_frame(struct pipe_video_codec * encoder,struct pipe_video_buffer * source,struct pipe_picture_desc * picture)1624 static int radeon_enc_end_frame(struct pipe_video_codec *encoder, struct pipe_video_buffer *source,
1625 struct pipe_picture_desc *picture)
1626 {
1627 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1628 return flush(enc, picture->flush_flags, picture->fence);
1629 }
1630
radeon_enc_destroy(struct pipe_video_codec * encoder)1631 static void radeon_enc_destroy(struct pipe_video_codec *encoder)
1632 {
1633 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1634
1635 if (enc->stream_handle) {
1636 struct rvid_buffer fb;
1637 enc->need_feedback = false;
1638 si_vid_create_buffer(enc->screen, &fb, 512, PIPE_USAGE_STAGING);
1639 enc->fb = &fb;
1640 enc->destroy(enc);
1641 flush(enc, PIPE_FLUSH_ASYNC, NULL);
1642 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->si);
1643 si_vid_destroy_buffer(&fb);
1644 }
1645
1646 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->dpb);
1647 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->cdf);
1648 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->roi);
1649 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->meta);
1650 enc->ws->cs_destroy(&enc->cs);
1651 if (enc->ectx)
1652 enc->ectx->destroy(enc->ectx);
1653
1654 FREE(enc);
1655 }
1656
radeon_enc_get_feedback(struct pipe_video_codec * encoder,void * feedback,unsigned * size,struct pipe_enc_feedback_metadata * metadata)1657 static void radeon_enc_get_feedback(struct pipe_video_codec *encoder, void *feedback,
1658 unsigned *size, struct pipe_enc_feedback_metadata *metadata)
1659 {
1660 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1661 struct rvid_buffer *fb = feedback;
1662
1663 if (size) {
1664 uint32_t *ptr = enc->ws->buffer_map(enc->ws, fb->res->buf, &enc->cs,
1665 PIPE_MAP_READ_WRITE | RADEON_MAP_TEMPORARY);
1666 if (ptr[1])
1667 *size = ptr[6] - ptr[8];
1668 else
1669 *size = 0;
1670 enc->ws->buffer_unmap(enc->ws, fb->res->buf);
1671 }
1672
1673 metadata->present_metadata = PIPE_VIDEO_FEEDBACK_METADATA_TYPE_CODEC_UNIT_LOCATION;
1674
1675 if (fb->user_data) {
1676 struct rvcn_enc_feedback_data *data = fb->user_data;
1677 metadata->codec_unit_metadata_count = data->num_segments;
1678 for (unsigned i = 0; i < data->num_segments; i++) {
1679 metadata->codec_unit_metadata[i].offset = data->segments[i].offset;
1680 if (data->segments[i].is_slice) {
1681 metadata->codec_unit_metadata[i].size = *size;
1682 metadata->codec_unit_metadata[i].flags = 0;
1683 } else {
1684 metadata->codec_unit_metadata[i].size = data->segments[i].size;
1685 metadata->codec_unit_metadata[i].flags = PIPE_VIDEO_CODEC_UNIT_LOCATION_FLAG_SINGLE_NALU;
1686 }
1687 }
1688 FREE(fb->user_data);
1689 fb->user_data = NULL;
1690 } else {
1691 metadata->codec_unit_metadata_count = 1;
1692 metadata->codec_unit_metadata[0].offset = 0;
1693 metadata->codec_unit_metadata[0].size = *size;
1694 metadata->codec_unit_metadata[0].flags = 0;
1695 }
1696
1697 RADEON_ENC_DESTROY_VIDEO_BUFFER(fb);
1698 }
1699
radeon_enc_get_feedback_fence(struct pipe_video_codec * encoder,struct pipe_fence_handle * fence,uint64_t timeout)1700 static int radeon_enc_get_feedback_fence(struct pipe_video_codec *encoder,
1701 struct pipe_fence_handle *fence,
1702 uint64_t timeout)
1703 {
1704 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1705
1706 return enc->ws->fence_wait(enc->ws, fence, timeout);
1707 }
1708
radeon_enc_destroy_fence(struct pipe_video_codec * encoder,struct pipe_fence_handle * fence)1709 static void radeon_enc_destroy_fence(struct pipe_video_codec *encoder,
1710 struct pipe_fence_handle *fence)
1711 {
1712 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1713
1714 enc->ws->fence_reference(enc->ws, &fence, NULL);
1715 }
1716
radeon_create_encoder(struct pipe_context * context,const struct pipe_video_codec * templ,struct radeon_winsys * ws,radeon_enc_get_buffer get_buffer)1717 struct pipe_video_codec *radeon_create_encoder(struct pipe_context *context,
1718 const struct pipe_video_codec *templ,
1719 struct radeon_winsys *ws,
1720 radeon_enc_get_buffer get_buffer)
1721 {
1722 struct si_screen *sscreen = (struct si_screen *)context->screen;
1723 struct si_context *sctx = (struct si_context *)context;
1724 struct radeon_encoder *enc;
1725
1726 enc = CALLOC_STRUCT(radeon_encoder);
1727
1728 if (!enc)
1729 return NULL;
1730
1731 if (sctx->vcn_has_ctx) {
1732 enc->ectx = pipe_create_multimedia_context(context->screen);
1733 if (!enc->ectx)
1734 sctx->vcn_has_ctx = false;
1735 }
1736
1737 enc->alignment = 256;
1738 enc->base = *templ;
1739 enc->base.context = (sctx->vcn_has_ctx)? enc->ectx : context;
1740 enc->base.destroy = radeon_enc_destroy;
1741 enc->base.begin_frame = radeon_enc_begin_frame;
1742 enc->base.encode_bitstream = radeon_enc_encode_bitstream;
1743 enc->base.end_frame = radeon_enc_end_frame;
1744 enc->base.flush = radeon_enc_flush;
1745 enc->base.get_feedback = radeon_enc_get_feedback;
1746 enc->base.get_feedback_fence = radeon_enc_get_feedback_fence;
1747 enc->base.destroy_fence = radeon_enc_destroy_fence;
1748 enc->get_buffer = get_buffer;
1749 enc->bits_in_shifter = 0;
1750 enc->screen = context->screen;
1751 enc->ws = ws;
1752
1753 if (!ws->cs_create(&enc->cs,
1754 (sctx->vcn_has_ctx) ? ((struct si_context *)enc->ectx)->ctx : sctx->ctx,
1755 AMD_IP_VCN_ENC, radeon_enc_cs_flush, enc)) {
1756 RVID_ERR("Can't get command submission context.\n");
1757 goto error;
1758 }
1759
1760 enc->enc_pic.use_rc_per_pic_ex = false;
1761
1762 if (sscreen->info.vcn_ip_version >= VCN_5_0_0) {
1763 radeon_enc_5_0_init(enc);
1764 if (sscreen->info.vcn_ip_version == VCN_5_0_0) {
1765 /* this limits tile splitting scheme to use legacy method */
1766 enc->enc_pic.av1_tile_splitting_legacy_flag = true;
1767 }
1768 }
1769 else if (sscreen->info.vcn_ip_version >= VCN_4_0_0) {
1770 if (sscreen->info.vcn_enc_minor_version >= 1)
1771 enc->enc_pic.use_rc_per_pic_ex = true;
1772 radeon_enc_4_0_init(enc);
1773 }
1774 else if (sscreen->info.vcn_ip_version >= VCN_3_0_0) {
1775 if (sscreen->info.vcn_enc_minor_version >= 24)
1776 enc->enc_pic.use_rc_per_pic_ex = true;
1777 radeon_enc_3_0_init(enc);
1778 }
1779 else if (sscreen->info.vcn_ip_version >= VCN_2_0_0) {
1780 if (sscreen->info.vcn_enc_minor_version >= 18)
1781 enc->enc_pic.use_rc_per_pic_ex = true;
1782 radeon_enc_2_0_init(enc);
1783 }
1784 else {
1785 if (sscreen->info.vcn_enc_minor_version >= 15)
1786 enc->enc_pic.use_rc_per_pic_ex = true;
1787 radeon_enc_1_2_init(enc);
1788 }
1789
1790 return &enc->base;
1791
1792 error:
1793 enc->ws->cs_destroy(&enc->cs);
1794 FREE(enc);
1795 return NULL;
1796 }
1797
radeon_enc_add_buffer(struct radeon_encoder * enc,struct pb_buffer_lean * buf,unsigned usage,enum radeon_bo_domain domain,signed offset)1798 void radeon_enc_add_buffer(struct radeon_encoder *enc, struct pb_buffer_lean *buf,
1799 unsigned usage, enum radeon_bo_domain domain, signed offset)
1800 {
1801 enc->ws->cs_add_buffer(&enc->cs, buf, usage | RADEON_USAGE_SYNCHRONIZED, domain);
1802 uint64_t addr;
1803 addr = enc->ws->buffer_get_virtual_address(buf);
1804 addr = addr + offset;
1805 RADEON_ENC_CS(addr >> 32);
1806 RADEON_ENC_CS(addr);
1807 }
1808
radeon_enc_set_emulation_prevention(struct radeon_encoder * enc,bool set)1809 void radeon_enc_set_emulation_prevention(struct radeon_encoder *enc, bool set)
1810 {
1811 if (set != enc->emulation_prevention) {
1812 enc->emulation_prevention = set;
1813 enc->num_zeros = 0;
1814 }
1815 }
1816
radeon_enc_set_output_buffer(struct radeon_encoder * enc,uint8_t * buffer)1817 void radeon_enc_set_output_buffer(struct radeon_encoder *enc, uint8_t *buffer)
1818 {
1819 enc->bits_buf = buffer;
1820 enc->bits_buf_pos = 0;
1821 }
1822
radeon_enc_output_one_byte(struct radeon_encoder * enc,unsigned char byte)1823 void radeon_enc_output_one_byte(struct radeon_encoder *enc, unsigned char byte)
1824 {
1825 if (enc->bits_buf) {
1826 enc->bits_buf[enc->bits_buf_pos++] = byte;
1827 return;
1828 }
1829
1830 if (enc->byte_index == 0)
1831 enc->cs.current.buf[enc->cs.current.cdw] = 0;
1832 enc->cs.current.buf[enc->cs.current.cdw] |=
1833 ((unsigned int)(byte) << index_to_shifts[enc->byte_index]);
1834 enc->byte_index++;
1835
1836 if (enc->byte_index >= 4) {
1837 enc->byte_index = 0;
1838 enc->cs.current.cdw++;
1839 }
1840 }
1841
radeon_enc_emulation_prevention(struct radeon_encoder * enc,unsigned char byte)1842 void radeon_enc_emulation_prevention(struct radeon_encoder *enc, unsigned char byte)
1843 {
1844 if (enc->emulation_prevention) {
1845 if ((enc->num_zeros >= 2) && ((byte == 0x00) || (byte == 0x01) ||
1846 (byte == 0x02) || (byte == 0x03))) {
1847 radeon_enc_output_one_byte(enc, 0x03);
1848 enc->bits_output += 8;
1849 enc->num_zeros = 0;
1850 }
1851 enc->num_zeros = (byte == 0 ? (enc->num_zeros + 1) : 0);
1852 }
1853 }
1854
radeon_enc_code_fixed_bits(struct radeon_encoder * enc,unsigned int value,unsigned int num_bits)1855 void radeon_enc_code_fixed_bits(struct radeon_encoder *enc, unsigned int value,
1856 unsigned int num_bits)
1857 {
1858 unsigned int bits_to_pack = 0;
1859 enc->bits_size += num_bits;
1860
1861 while (num_bits > 0) {
1862 unsigned int value_to_pack = value & (0xffffffff >> (32 - num_bits));
1863 bits_to_pack =
1864 num_bits > (32 - enc->bits_in_shifter) ? (32 - enc->bits_in_shifter) : num_bits;
1865
1866 if (bits_to_pack < num_bits)
1867 value_to_pack = value_to_pack >> (num_bits - bits_to_pack);
1868
1869 enc->shifter |= value_to_pack << (32 - enc->bits_in_shifter - bits_to_pack);
1870 num_bits -= bits_to_pack;
1871 enc->bits_in_shifter += bits_to_pack;
1872
1873 while (enc->bits_in_shifter >= 8) {
1874 unsigned char output_byte = (unsigned char)(enc->shifter >> 24);
1875 enc->shifter <<= 8;
1876 radeon_enc_emulation_prevention(enc, output_byte);
1877 radeon_enc_output_one_byte(enc, output_byte);
1878 enc->bits_in_shifter -= 8;
1879 enc->bits_output += 8;
1880 }
1881 }
1882 }
1883
radeon_enc_code_uvlc(struct radeon_encoder * enc,unsigned int value)1884 void radeon_enc_code_uvlc(struct radeon_encoder *enc, unsigned int value)
1885 {
1886 uint32_t num_bits = 0;
1887 uint64_t value_plus1 = (uint64_t)value + 1;
1888 uint32_t num_leading_zeros = 0;
1889
1890 while ((uint64_t)1 << num_bits <= value_plus1)
1891 num_bits++;
1892
1893 num_leading_zeros = num_bits - 1;
1894 radeon_enc_code_fixed_bits(enc, 0, num_leading_zeros);
1895 radeon_enc_code_fixed_bits(enc, 1, 1);
1896 radeon_enc_code_fixed_bits(enc, (uint32_t)value_plus1, num_leading_zeros);
1897 }
1898
radeon_enc_code_leb128(uint8_t * buf,uint32_t value,uint32_t num_bytes)1899 void radeon_enc_code_leb128(uint8_t *buf, uint32_t value,
1900 uint32_t num_bytes)
1901 {
1902 uint8_t leb128_byte = 0;
1903 uint32_t i = 0;
1904
1905 do {
1906 leb128_byte = (value & 0x7f);
1907 value >>= 7;
1908 if (num_bytes > 1)
1909 leb128_byte |= 0x80;
1910
1911 *(buf + i) = leb128_byte;
1912 num_bytes--;
1913 i++;
1914 } while((leb128_byte & 0x80));
1915 }
1916
radeon_enc_reset(struct radeon_encoder * enc)1917 void radeon_enc_reset(struct radeon_encoder *enc)
1918 {
1919 enc->emulation_prevention = false;
1920 enc->shifter = 0;
1921 enc->bits_in_shifter = 0;
1922 enc->bits_output = 0;
1923 enc->num_zeros = 0;
1924 enc->byte_index = 0;
1925 enc->bits_size = 0;
1926 enc->bits_buf = NULL;
1927 enc->bits_buf_pos = 0;
1928 }
1929
radeon_enc_byte_align(struct radeon_encoder * enc)1930 void radeon_enc_byte_align(struct radeon_encoder *enc)
1931 {
1932 unsigned int num_padding_zeros = (32 - enc->bits_in_shifter) % 8;
1933
1934 if (num_padding_zeros > 0)
1935 radeon_enc_code_fixed_bits(enc, 0, num_padding_zeros);
1936 }
1937
radeon_enc_flush_headers(struct radeon_encoder * enc)1938 void radeon_enc_flush_headers(struct radeon_encoder *enc)
1939 {
1940 if (enc->bits_in_shifter != 0) {
1941 unsigned char output_byte = (unsigned char)(enc->shifter >> 24);
1942 radeon_enc_emulation_prevention(enc, output_byte);
1943 radeon_enc_output_one_byte(enc, output_byte);
1944 enc->bits_output += enc->bits_in_shifter;
1945 enc->shifter = 0;
1946 enc->bits_in_shifter = 0;
1947 enc->num_zeros = 0;
1948 }
1949
1950 if (enc->byte_index > 0) {
1951 enc->cs.current.cdw++;
1952 enc->byte_index = 0;
1953 }
1954 }
1955
radeon_enc_code_ue(struct radeon_encoder * enc,unsigned int value)1956 void radeon_enc_code_ue(struct radeon_encoder *enc, unsigned int value)
1957 {
1958 unsigned int x = 0;
1959 unsigned int ue_code = value + 1;
1960 value += 1;
1961
1962 while (value) {
1963 value = (value >> 1);
1964 x += 1;
1965 }
1966
1967 if (x > 1)
1968 radeon_enc_code_fixed_bits(enc, 0, x - 1);
1969 radeon_enc_code_fixed_bits(enc, ue_code, x);
1970 }
1971
radeon_enc_code_se(struct radeon_encoder * enc,int value)1972 void radeon_enc_code_se(struct radeon_encoder *enc, int value)
1973 {
1974 unsigned int v = 0;
1975
1976 if (value != 0)
1977 v = (value < 0 ? ((unsigned int)(0 - value) << 1) : (((unsigned int)(value) << 1) - 1));
1978
1979 radeon_enc_code_ue(enc, v);
1980 }
1981
radeon_enc_av1_tile_log2(unsigned int blk_size,unsigned int max)1982 unsigned int radeon_enc_av1_tile_log2(unsigned int blk_size, unsigned int max)
1983 {
1984 unsigned int k;
1985
1986 assert(blk_size);
1987 for (k = 0; (blk_size << k) < max; k++) {}
1988
1989 return k;
1990 }
1991
radeon_enc_code_ns(struct radeon_encoder * enc,unsigned int value,unsigned int max)1992 void radeon_enc_code_ns(struct radeon_encoder *enc, unsigned int value, unsigned int max)
1993 {
1994 unsigned w = 0;
1995 unsigned m;
1996 unsigned max_num = max;
1997
1998 while ( max_num ) {
1999 max_num >>= 1;
2000 w++;
2001 }
2002
2003 m = ( 1 << w ) - max;
2004
2005 assert(w > 1);
2006
2007 if ( value < m )
2008 radeon_enc_code_fixed_bits(enc, value, (w - 1));
2009 else {
2010 unsigned diff = value - m;
2011 unsigned out = (((diff >> 1) + m) << 1) | (diff & 0x1);
2012 radeon_enc_code_fixed_bits(enc, out, w);
2013 }
2014 }
2015
2016 /* dummy function for re-using the same pipeline */
radeon_enc_dummy(struct radeon_encoder * enc)2017 void radeon_enc_dummy(struct radeon_encoder *enc) {}
2018
2019 /* this function has to be in pair with AV1 header copy instruction type at the end */
radeon_enc_av1_bs_copy_end(struct radeon_encoder * enc,uint32_t bits)2020 static void radeon_enc_av1_bs_copy_end(struct radeon_encoder *enc, uint32_t bits)
2021 {
2022 assert(bits > 0);
2023 /* it must be dword aligned at the end */
2024 *enc->enc_pic.copy_start = DIV_ROUND_UP(bits, 32) * 4 + 12;
2025 *(enc->enc_pic.copy_start + 2) = bits;
2026 }
2027
2028 /* av1 bitstream instruction type */
radeon_enc_av1_bs_instruction_type(struct radeon_encoder * enc,uint32_t inst,uint32_t obu_type)2029 void radeon_enc_av1_bs_instruction_type(struct radeon_encoder *enc,
2030 uint32_t inst,
2031 uint32_t obu_type)
2032 {
2033 radeon_enc_flush_headers(enc);
2034
2035 if (enc->bits_output)
2036 radeon_enc_av1_bs_copy_end(enc, enc->bits_output);
2037
2038 enc->enc_pic.copy_start = &enc->cs.current.buf[enc->cs.current.cdw++];
2039 RADEON_ENC_CS(inst);
2040
2041 if (inst != RENCODE_HEADER_INSTRUCTION_COPY) {
2042 *enc->enc_pic.copy_start = 8;
2043 if (inst == RENCODE_AV1_BITSTREAM_INSTRUCTION_OBU_START) {
2044 *enc->enc_pic.copy_start += 4;
2045 RADEON_ENC_CS(obu_type);
2046 }
2047 } else
2048 RADEON_ENC_CS(0); /* allocate a dword for number of bits */
2049
2050 radeon_enc_reset(enc);
2051 }
2052
radeon_enc_value_bits(uint32_t value)2053 uint32_t radeon_enc_value_bits(uint32_t value)
2054 {
2055 uint32_t i = 1;
2056
2057 while (value > 1) {
2058 i++;
2059 value >>= 1;
2060 }
2061
2062 return i;
2063 }
2064