1 // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
2 /*
3  * Wave5 series multi-standard codec IP - helper functions
4  *
5  * Copyright (C) 2021-2023 CHIPS&MEDIA INC
6  */
7 
8 #include <linux/bug.h>
9 #include <linux/pm_runtime.h>
10 #include <linux/delay.h>
11 #include "wave5-vpuapi.h"
12 #include "wave5-regdefine.h"
13 #include "wave5.h"
14 
15 #define DECODE_ALL_TEMPORAL_LAYERS 0
16 #define DECODE_ALL_SPATIAL_LAYERS 0
17 
wave5_initialize_vpu(struct device * dev,u8 * code,size_t size)18 static int wave5_initialize_vpu(struct device *dev, u8 *code, size_t size)
19 {
20 	int ret;
21 	struct vpu_device *vpu_dev = dev_get_drvdata(dev);
22 
23 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
24 	if (ret)
25 		return ret;
26 
27 	if (wave5_vpu_is_init(vpu_dev)) {
28 		wave5_vpu_re_init(dev, (void *)code, size);
29 		ret = -EBUSY;
30 		goto err_out;
31 	}
32 
33 	ret = wave5_vpu_reset(dev, SW_RESET_ON_BOOT);
34 	if (ret)
35 		goto err_out;
36 
37 	ret = wave5_vpu_init(dev, (void *)code, size);
38 
39 err_out:
40 	mutex_unlock(&vpu_dev->hw_lock);
41 	return ret;
42 }
43 
wave5_vpu_init_with_bitcode(struct device * dev,u8 * bitcode,size_t size)44 int wave5_vpu_init_with_bitcode(struct device *dev, u8 *bitcode, size_t size)
45 {
46 	if (!bitcode || size == 0)
47 		return -EINVAL;
48 
49 	return wave5_initialize_vpu(dev, bitcode, size);
50 }
51 
wave5_vpu_flush_instance(struct vpu_instance * inst)52 int wave5_vpu_flush_instance(struct vpu_instance *inst)
53 {
54 	int ret = 0;
55 	int retry = 0;
56 
57 	ret = mutex_lock_interruptible(&inst->dev->hw_lock);
58 	if (ret)
59 		return ret;
60 	do {
61 		/*
62 		 * Repeat the FLUSH command until the firmware reports that the
63 		 * VPU isn't running anymore
64 		 */
65 		ret = wave5_vpu_hw_flush_instance(inst);
66 		if (ret < 0 && ret != -EBUSY) {
67 			dev_warn(inst->dev->dev, "Flush of %s instance with id: %d fail: %d\n",
68 				 inst->type == VPU_INST_TYPE_DEC ? "DECODER" : "ENCODER", inst->id,
69 				 ret);
70 			mutex_unlock(&inst->dev->hw_lock);
71 			return ret;
72 		}
73 		if (ret == -EBUSY && retry++ >= MAX_FIRMWARE_CALL_RETRY) {
74 			dev_warn(inst->dev->dev, "Flush of %s instance with id: %d timed out!\n",
75 				 inst->type == VPU_INST_TYPE_DEC ? "DECODER" : "ENCODER", inst->id);
76 			mutex_unlock(&inst->dev->hw_lock);
77 			return -ETIMEDOUT;
78 		} else if (ret == -EBUSY) {
79 			struct dec_output_info dec_info;
80 
81 			mutex_unlock(&inst->dev->hw_lock);
82 			wave5_vpu_dec_get_output_info(inst, &dec_info);
83 			ret = mutex_lock_interruptible(&inst->dev->hw_lock);
84 			if (ret)
85 				return ret;
86 			if (dec_info.index_frame_display > 0)
87 				wave5_vpu_dec_set_disp_flag(inst, dec_info.index_frame_display);
88 		}
89 	} while (ret != 0);
90 	mutex_unlock(&inst->dev->hw_lock);
91 
92 	return ret;
93 }
94 
wave5_vpu_get_version_info(struct device * dev,u32 * revision,unsigned int * product_id)95 int wave5_vpu_get_version_info(struct device *dev, u32 *revision, unsigned int *product_id)
96 {
97 	int ret;
98 	struct vpu_device *vpu_dev = dev_get_drvdata(dev);
99 
100 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
101 	if (ret)
102 		return ret;
103 
104 	if (!wave5_vpu_is_init(vpu_dev)) {
105 		ret = -EINVAL;
106 		goto err_out;
107 	}
108 
109 	if (product_id)
110 		*product_id = vpu_dev->product;
111 	ret = wave5_vpu_get_version(vpu_dev, revision);
112 
113 err_out:
114 	mutex_unlock(&vpu_dev->hw_lock);
115 	return ret;
116 }
117 
wave5_check_dec_open_param(struct vpu_instance * inst,struct dec_open_param * param)118 static int wave5_check_dec_open_param(struct vpu_instance *inst, struct dec_open_param *param)
119 {
120 	if (inst->id >= MAX_NUM_INSTANCE) {
121 		dev_err(inst->dev->dev, "Too many simultaneous instances: %d (max: %u)\n",
122 			inst->id, MAX_NUM_INSTANCE);
123 		return -EOPNOTSUPP;
124 	}
125 
126 	if (param->bitstream_buffer % 8) {
127 		dev_err(inst->dev->dev,
128 			"Bitstream buffer must be aligned to a multiple of 8\n");
129 		return -EINVAL;
130 	}
131 
132 	if (param->bitstream_buffer_size % 1024 ||
133 	    param->bitstream_buffer_size < MIN_BITSTREAM_BUFFER_SIZE) {
134 		dev_err(inst->dev->dev,
135 			"Bitstream buffer size must be aligned to a multiple of 1024 and have a minimum size of %d\n",
136 			MIN_BITSTREAM_BUFFER_SIZE);
137 		return -EINVAL;
138 	}
139 
140 	return 0;
141 }
142 
wave5_vpu_dec_open(struct vpu_instance * inst,struct dec_open_param * open_param)143 int wave5_vpu_dec_open(struct vpu_instance *inst, struct dec_open_param *open_param)
144 {
145 	struct dec_info *p_dec_info;
146 	int ret;
147 	struct vpu_device *vpu_dev = inst->dev;
148 	dma_addr_t buffer_addr;
149 	size_t buffer_size;
150 
151 	ret = wave5_check_dec_open_param(inst, open_param);
152 	if (ret)
153 		return ret;
154 
155 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
156 	if (ret)
157 		return ret;
158 
159 	if (!wave5_vpu_is_init(vpu_dev)) {
160 		mutex_unlock(&vpu_dev->hw_lock);
161 		return -ENODEV;
162 	}
163 
164 	p_dec_info = &inst->codec_info->dec_info;
165 	memcpy(&p_dec_info->open_param, open_param, sizeof(struct dec_open_param));
166 
167 	buffer_addr = open_param->bitstream_buffer;
168 	buffer_size = open_param->bitstream_buffer_size;
169 	p_dec_info->stream_wr_ptr = buffer_addr;
170 	p_dec_info->stream_rd_ptr = buffer_addr;
171 	p_dec_info->stream_buf_start_addr = buffer_addr;
172 	p_dec_info->stream_buf_size = buffer_size;
173 	p_dec_info->stream_buf_end_addr = buffer_addr + buffer_size;
174 	p_dec_info->reorder_enable = TRUE;
175 	p_dec_info->temp_id_select_mode = TEMPORAL_ID_MODE_ABSOLUTE;
176 	p_dec_info->target_temp_id = DECODE_ALL_TEMPORAL_LAYERS;
177 	p_dec_info->target_spatial_id = DECODE_ALL_SPATIAL_LAYERS;
178 
179 	ret = wave5_vpu_build_up_dec_param(inst, open_param);
180 	mutex_unlock(&vpu_dev->hw_lock);
181 
182 	return ret;
183 }
184 
reset_auxiliary_buffers(struct vpu_instance * inst,unsigned int index)185 static int reset_auxiliary_buffers(struct vpu_instance *inst, unsigned int index)
186 {
187 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
188 
189 	if (index >= MAX_REG_FRAME)
190 		return 1;
191 
192 	if (p_dec_info->vb_mv[index].size == 0 && p_dec_info->vb_fbc_y_tbl[index].size == 0 &&
193 	    p_dec_info->vb_fbc_c_tbl[index].size == 0)
194 		return 1;
195 
196 	wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_mv[index]);
197 	wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_fbc_y_tbl[index]);
198 	wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_fbc_c_tbl[index]);
199 
200 	return 0;
201 }
202 
wave5_vpu_dec_close(struct vpu_instance * inst,u32 * fail_res)203 int wave5_vpu_dec_close(struct vpu_instance *inst, u32 *fail_res)
204 {
205 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
206 	int ret;
207 	int retry = 0;
208 	struct vpu_device *vpu_dev = inst->dev;
209 	int i;
210 	int inst_count = 0;
211 	struct vpu_instance *inst_elm;
212 
213 	*fail_res = 0;
214 	if (!inst->codec_info)
215 		return -EINVAL;
216 
217 	pm_runtime_resume_and_get(inst->dev->dev);
218 
219 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
220 	if (ret) {
221 		pm_runtime_put_sync(inst->dev->dev);
222 		return ret;
223 	}
224 
225 	do {
226 		ret = wave5_vpu_dec_finish_seq(inst, fail_res);
227 		if (ret < 0 && *fail_res != WAVE5_SYSERR_VPU_STILL_RUNNING) {
228 			dev_warn(inst->dev->dev, "dec_finish_seq timed out\n");
229 			goto unlock_and_return;
230 		}
231 
232 		if (*fail_res == WAVE5_SYSERR_VPU_STILL_RUNNING &&
233 		    retry++ >= MAX_FIRMWARE_CALL_RETRY) {
234 			ret = -ETIMEDOUT;
235 			goto unlock_and_return;
236 		}
237 	} while (ret != 0);
238 
239 	dev_dbg(inst->dev->dev, "%s: dec_finish_seq complete\n", __func__);
240 
241 	wave5_vdi_free_dma_memory(vpu_dev, &p_dec_info->vb_work);
242 
243 	for (i = 0 ; i < MAX_REG_FRAME; i++) {
244 		ret = reset_auxiliary_buffers(inst, i);
245 		if (ret) {
246 			ret = 0;
247 			break;
248 		}
249 	}
250 
251 	wave5_vdi_free_dma_memory(vpu_dev, &p_dec_info->vb_task);
252 
253 	list_for_each_entry(inst_elm, &vpu_dev->instances, list)
254 		inst_count++;
255 	if (inst_count == 1)
256 		pm_runtime_dont_use_autosuspend(vpu_dev->dev);
257 
258 unlock_and_return:
259 	mutex_unlock(&vpu_dev->hw_lock);
260 	pm_runtime_put_sync(inst->dev->dev);
261 	return ret;
262 }
263 
wave5_vpu_dec_issue_seq_init(struct vpu_instance * inst)264 int wave5_vpu_dec_issue_seq_init(struct vpu_instance *inst)
265 {
266 	int ret;
267 	struct vpu_device *vpu_dev = inst->dev;
268 
269 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
270 	if (ret)
271 		return ret;
272 
273 	ret = wave5_vpu_dec_init_seq(inst);
274 
275 	mutex_unlock(&vpu_dev->hw_lock);
276 
277 	return ret;
278 }
279 
wave5_vpu_dec_complete_seq_init(struct vpu_instance * inst,struct dec_initial_info * info)280 int wave5_vpu_dec_complete_seq_init(struct vpu_instance *inst, struct dec_initial_info *info)
281 {
282 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
283 	int ret;
284 	struct vpu_device *vpu_dev = inst->dev;
285 
286 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
287 	if (ret)
288 		return ret;
289 
290 	ret = wave5_vpu_dec_get_seq_info(inst, info);
291 	if (!ret)
292 		p_dec_info->initial_info_obtained = true;
293 
294 	info->rd_ptr = wave5_dec_get_rd_ptr(inst);
295 	info->wr_ptr = p_dec_info->stream_wr_ptr;
296 
297 	p_dec_info->initial_info = *info;
298 
299 	mutex_unlock(&vpu_dev->hw_lock);
300 
301 	return ret;
302 }
303 
wave5_vpu_dec_register_frame_buffer_ex(struct vpu_instance * inst,int num_of_decoding_fbs,int num_of_display_fbs,int stride,int height)304 int wave5_vpu_dec_register_frame_buffer_ex(struct vpu_instance *inst, int num_of_decoding_fbs,
305 					   int num_of_display_fbs, int stride, int height)
306 {
307 	struct dec_info *p_dec_info;
308 	int ret;
309 	struct vpu_device *vpu_dev = inst->dev;
310 	struct frame_buffer *fb;
311 
312 	if (num_of_decoding_fbs >= WAVE5_MAX_FBS || num_of_display_fbs >= WAVE5_MAX_FBS)
313 		return -EINVAL;
314 
315 	p_dec_info = &inst->codec_info->dec_info;
316 	p_dec_info->num_of_decoding_fbs = num_of_decoding_fbs;
317 	p_dec_info->num_of_display_fbs = num_of_display_fbs;
318 	p_dec_info->stride = stride;
319 
320 	if (!p_dec_info->initial_info_obtained)
321 		return -EINVAL;
322 
323 	if (stride < p_dec_info->initial_info.pic_width || (stride % 8 != 0) ||
324 	    height < p_dec_info->initial_info.pic_height)
325 		return -EINVAL;
326 
327 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
328 	if (ret)
329 		return ret;
330 
331 	fb = inst->frame_buf;
332 	ret = wave5_vpu_dec_register_framebuffer(inst, &fb[p_dec_info->num_of_decoding_fbs],
333 						 LINEAR_FRAME_MAP, p_dec_info->num_of_display_fbs);
334 	if (ret)
335 		goto err_out;
336 
337 	ret = wave5_vpu_dec_register_framebuffer(inst, &fb[0], COMPRESSED_FRAME_MAP,
338 						 p_dec_info->num_of_decoding_fbs);
339 
340 err_out:
341 	mutex_unlock(&vpu_dev->hw_lock);
342 
343 	return ret;
344 }
345 
wave5_vpu_dec_get_bitstream_buffer(struct vpu_instance * inst,dma_addr_t * prd_ptr,dma_addr_t * pwr_ptr,size_t * size)346 int wave5_vpu_dec_get_bitstream_buffer(struct vpu_instance *inst, dma_addr_t *prd_ptr,
347 				       dma_addr_t *pwr_ptr, size_t *size)
348 {
349 	struct dec_info *p_dec_info;
350 	dma_addr_t rd_ptr;
351 	dma_addr_t wr_ptr;
352 	int room;
353 	struct vpu_device *vpu_dev = inst->dev;
354 	int ret;
355 
356 	p_dec_info = &inst->codec_info->dec_info;
357 
358 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
359 	if (ret)
360 		return ret;
361 	rd_ptr = wave5_dec_get_rd_ptr(inst);
362 	mutex_unlock(&vpu_dev->hw_lock);
363 
364 	wr_ptr = p_dec_info->stream_wr_ptr;
365 
366 	if (wr_ptr < rd_ptr)
367 		room = rd_ptr - wr_ptr;
368 	else
369 		room = (p_dec_info->stream_buf_end_addr - wr_ptr) +
370 			(rd_ptr - p_dec_info->stream_buf_start_addr);
371 	room--;
372 
373 	if (prd_ptr)
374 		*prd_ptr = rd_ptr;
375 	if (pwr_ptr)
376 		*pwr_ptr = wr_ptr;
377 	if (size)
378 		*size = room;
379 
380 	return 0;
381 }
382 
wave5_vpu_dec_update_bitstream_buffer(struct vpu_instance * inst,size_t size)383 int wave5_vpu_dec_update_bitstream_buffer(struct vpu_instance *inst, size_t size)
384 {
385 	struct dec_info *p_dec_info;
386 	dma_addr_t wr_ptr;
387 	dma_addr_t rd_ptr;
388 	int ret;
389 	struct vpu_device *vpu_dev = inst->dev;
390 
391 	if (!inst->codec_info)
392 		return -EINVAL;
393 
394 	p_dec_info = &inst->codec_info->dec_info;
395 	wr_ptr = p_dec_info->stream_wr_ptr;
396 	rd_ptr = p_dec_info->stream_rd_ptr;
397 
398 	if (size > 0) {
399 		if (wr_ptr < rd_ptr && rd_ptr <= wr_ptr + size)
400 			return -EINVAL;
401 
402 		wr_ptr += size;
403 
404 		if (wr_ptr > p_dec_info->stream_buf_end_addr) {
405 			u32 room = wr_ptr - p_dec_info->stream_buf_end_addr;
406 
407 			wr_ptr = p_dec_info->stream_buf_start_addr;
408 			wr_ptr += room;
409 		} else if (wr_ptr == p_dec_info->stream_buf_end_addr) {
410 			wr_ptr = p_dec_info->stream_buf_start_addr;
411 		}
412 
413 		p_dec_info->stream_wr_ptr = wr_ptr;
414 		p_dec_info->stream_rd_ptr = rd_ptr;
415 	}
416 
417 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
418 	if (ret)
419 		return ret;
420 	ret = wave5_vpu_dec_set_bitstream_flag(inst, (size == 0));
421 	mutex_unlock(&vpu_dev->hw_lock);
422 
423 	return ret;
424 }
425 
wave5_vpu_dec_start_one_frame(struct vpu_instance * inst,u32 * res_fail)426 int wave5_vpu_dec_start_one_frame(struct vpu_instance *inst, u32 *res_fail)
427 {
428 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
429 	int ret;
430 	struct vpu_device *vpu_dev = inst->dev;
431 
432 	if (p_dec_info->stride == 0) /* this means frame buffers have not been registered. */
433 		return -EINVAL;
434 
435 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
436 	if (ret)
437 		return ret;
438 
439 	ret = wave5_vpu_decode(inst, res_fail);
440 
441 	mutex_unlock(&vpu_dev->hw_lock);
442 
443 	return ret;
444 }
445 
wave5_vpu_dec_set_rd_ptr(struct vpu_instance * inst,dma_addr_t addr,int update_wr_ptr)446 int wave5_vpu_dec_set_rd_ptr(struct vpu_instance *inst, dma_addr_t addr, int update_wr_ptr)
447 {
448 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
449 	int ret;
450 	struct vpu_device *vpu_dev = inst->dev;
451 
452 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
453 	if (ret)
454 		return ret;
455 
456 	ret = wave5_dec_set_rd_ptr(inst, addr);
457 
458 	p_dec_info->stream_rd_ptr = addr;
459 	if (update_wr_ptr)
460 		p_dec_info->stream_wr_ptr = addr;
461 
462 	mutex_unlock(&vpu_dev->hw_lock);
463 
464 	return ret;
465 }
466 
wave5_vpu_dec_get_rd_ptr(struct vpu_instance * inst)467 dma_addr_t wave5_vpu_dec_get_rd_ptr(struct vpu_instance *inst)
468 {
469 	int ret;
470 	dma_addr_t rd_ptr;
471 
472 	ret = mutex_lock_interruptible(&inst->dev->hw_lock);
473 	if (ret)
474 		return ret;
475 
476 	rd_ptr = wave5_dec_get_rd_ptr(inst);
477 
478 	mutex_unlock(&inst->dev->hw_lock);
479 
480 	return rd_ptr;
481 }
482 
wave5_vpu_dec_get_output_info(struct vpu_instance * inst,struct dec_output_info * info)483 int wave5_vpu_dec_get_output_info(struct vpu_instance *inst, struct dec_output_info *info)
484 {
485 	struct dec_info *p_dec_info;
486 	int ret;
487 	struct vpu_rect rect_info;
488 	u32 val;
489 	u32 decoded_index;
490 	u32 disp_idx;
491 	u32 max_dec_index;
492 	struct vpu_device *vpu_dev = inst->dev;
493 	struct dec_output_info *disp_info;
494 
495 	if (!info)
496 		return -EINVAL;
497 
498 	p_dec_info = &inst->codec_info->dec_info;
499 
500 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
501 	if (ret)
502 		return ret;
503 
504 	memset(info, 0, sizeof(*info));
505 
506 	ret = wave5_vpu_dec_get_result(inst, info);
507 	if (ret) {
508 		info->rd_ptr = p_dec_info->stream_rd_ptr;
509 		info->wr_ptr = p_dec_info->stream_wr_ptr;
510 		goto err_out;
511 	}
512 
513 	decoded_index = info->index_frame_decoded;
514 
515 	/* calculate display frame region */
516 	val = 0;
517 	rect_info.left = 0;
518 	rect_info.right = 0;
519 	rect_info.top = 0;
520 	rect_info.bottom = 0;
521 
522 	if (decoded_index < WAVE5_MAX_FBS) {
523 		if (inst->std == W_HEVC_DEC || inst->std == W_AVC_DEC)
524 			rect_info = p_dec_info->initial_info.pic_crop_rect;
525 
526 		if (inst->std == W_HEVC_DEC)
527 			p_dec_info->dec_out_info[decoded_index].decoded_poc = info->decoded_poc;
528 
529 		p_dec_info->dec_out_info[decoded_index].rc_decoded = rect_info;
530 	}
531 	info->rc_decoded = rect_info;
532 
533 	disp_idx = info->index_frame_display;
534 	if (info->index_frame_display >= 0 && info->index_frame_display < WAVE5_MAX_FBS) {
535 		disp_info = &p_dec_info->dec_out_info[disp_idx];
536 		if (info->index_frame_display != info->index_frame_decoded) {
537 			/*
538 			 * when index_frame_decoded < 0, and index_frame_display >= 0
539 			 * info->dec_pic_width and info->dec_pic_height are still valid
540 			 * but those of p_dec_info->dec_out_info[disp_idx] are invalid in VP9
541 			 */
542 			info->disp_pic_width = disp_info->dec_pic_width;
543 			info->disp_pic_height = disp_info->dec_pic_height;
544 		} else {
545 			info->disp_pic_width = info->dec_pic_width;
546 			info->disp_pic_height = info->dec_pic_height;
547 		}
548 
549 		info->rc_display = disp_info->rc_decoded;
550 
551 	} else {
552 		info->rc_display.left = 0;
553 		info->rc_display.right = 0;
554 		info->rc_display.top = 0;
555 		info->rc_display.bottom = 0;
556 		info->disp_pic_width = 0;
557 		info->disp_pic_height = 0;
558 	}
559 
560 	p_dec_info->stream_rd_ptr = wave5_dec_get_rd_ptr(inst);
561 	p_dec_info->frame_display_flag = vpu_read_reg(vpu_dev, W5_RET_DEC_DISP_IDC);
562 
563 	val = p_dec_info->num_of_decoding_fbs; //fb_offset
564 
565 	max_dec_index = (p_dec_info->num_of_decoding_fbs > p_dec_info->num_of_display_fbs) ?
566 		p_dec_info->num_of_decoding_fbs : p_dec_info->num_of_display_fbs;
567 
568 	if (info->index_frame_display >= 0 &&
569 	    info->index_frame_display < (int)max_dec_index)
570 		info->disp_frame = inst->frame_buf[val + info->index_frame_display];
571 
572 	info->rd_ptr = p_dec_info->stream_rd_ptr;
573 	info->wr_ptr = p_dec_info->stream_wr_ptr;
574 	info->frame_display_flag = p_dec_info->frame_display_flag;
575 
576 	info->sequence_no = p_dec_info->initial_info.sequence_no;
577 	if (decoded_index < WAVE5_MAX_FBS)
578 		p_dec_info->dec_out_info[decoded_index] = *info;
579 
580 	if (disp_idx < WAVE5_MAX_FBS)
581 		info->disp_frame.sequence_no = info->sequence_no;
582 
583 	if (info->sequence_changed) {
584 		memcpy((void *)&p_dec_info->initial_info, (void *)&p_dec_info->new_seq_info,
585 		       sizeof(struct dec_initial_info));
586 		p_dec_info->initial_info.sequence_no++;
587 	}
588 
589 err_out:
590 	mutex_unlock(&vpu_dev->hw_lock);
591 
592 	return ret;
593 }
594 
wave5_vpu_dec_clr_disp_flag(struct vpu_instance * inst,int index)595 int wave5_vpu_dec_clr_disp_flag(struct vpu_instance *inst, int index)
596 {
597 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
598 	int ret;
599 	struct vpu_device *vpu_dev = inst->dev;
600 
601 	if (index >= p_dec_info->num_of_display_fbs)
602 		return -EINVAL;
603 
604 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
605 	if (ret)
606 		return ret;
607 	ret = wave5_dec_clr_disp_flag(inst, index);
608 	mutex_unlock(&vpu_dev->hw_lock);
609 
610 	return ret;
611 }
612 
wave5_vpu_dec_set_disp_flag(struct vpu_instance * inst,int index)613 int wave5_vpu_dec_set_disp_flag(struct vpu_instance *inst, int index)
614 {
615 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
616 	int ret = 0;
617 	struct vpu_device *vpu_dev = inst->dev;
618 
619 	if (index >= p_dec_info->num_of_display_fbs)
620 		return -EINVAL;
621 
622 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
623 	if (ret)
624 		return ret;
625 	ret = wave5_dec_set_disp_flag(inst, index);
626 	mutex_unlock(&vpu_dev->hw_lock);
627 
628 	return ret;
629 }
630 
wave5_vpu_dec_reset_framebuffer(struct vpu_instance * inst,unsigned int index)631 int wave5_vpu_dec_reset_framebuffer(struct vpu_instance *inst, unsigned int index)
632 {
633 	if (index >= MAX_REG_FRAME)
634 		return -EINVAL;
635 
636 	if (inst->frame_vbuf[index].size == 0)
637 		return -EINVAL;
638 
639 	wave5_vdi_free_dma_memory(inst->dev, &inst->frame_vbuf[index]);
640 
641 	return 0;
642 }
643 
wave5_vpu_dec_give_command(struct vpu_instance * inst,enum codec_command cmd,void * parameter)644 int wave5_vpu_dec_give_command(struct vpu_instance *inst, enum codec_command cmd, void *parameter)
645 {
646 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
647 	int ret = 0;
648 
649 	switch (cmd) {
650 	case DEC_GET_QUEUE_STATUS: {
651 		struct queue_status_info *queue_info = parameter;
652 
653 		queue_info->instance_queue_count = p_dec_info->instance_queue_count;
654 		queue_info->report_queue_count = p_dec_info->report_queue_count;
655 		break;
656 	}
657 	case DEC_RESET_FRAMEBUF_INFO: {
658 		int i;
659 
660 		for (i = 0; i < MAX_REG_FRAME; i++) {
661 			ret = wave5_vpu_dec_reset_framebuffer(inst, i);
662 			if (ret)
663 				break;
664 		}
665 
666 		for (i = 0; i < MAX_REG_FRAME; i++) {
667 			ret = reset_auxiliary_buffers(inst, i);
668 			if (ret)
669 				break;
670 		}
671 
672 		wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_task);
673 		break;
674 	}
675 	case DEC_GET_SEQ_INFO: {
676 		struct dec_initial_info *seq_info = parameter;
677 
678 		*seq_info = p_dec_info->initial_info;
679 		break;
680 	}
681 
682 	default:
683 		return -EINVAL;
684 	}
685 
686 	return ret;
687 }
688 
wave5_vpu_enc_open(struct vpu_instance * inst,struct enc_open_param * open_param)689 int wave5_vpu_enc_open(struct vpu_instance *inst, struct enc_open_param *open_param)
690 {
691 	struct enc_info *p_enc_info;
692 	int ret;
693 	struct vpu_device *vpu_dev = inst->dev;
694 
695 	ret = wave5_vpu_enc_check_open_param(inst, open_param);
696 	if (ret)
697 		return ret;
698 
699 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
700 	if (ret)
701 		return ret;
702 
703 	if (!wave5_vpu_is_init(vpu_dev)) {
704 		mutex_unlock(&vpu_dev->hw_lock);
705 		return -ENODEV;
706 	}
707 
708 	p_enc_info = &inst->codec_info->enc_info;
709 	p_enc_info->open_param = *open_param;
710 
711 	ret = wave5_vpu_build_up_enc_param(vpu_dev->dev, inst, open_param);
712 	mutex_unlock(&vpu_dev->hw_lock);
713 
714 	return ret;
715 }
716 
wave5_vpu_enc_close(struct vpu_instance * inst,u32 * fail_res)717 int wave5_vpu_enc_close(struct vpu_instance *inst, u32 *fail_res)
718 {
719 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
720 	int ret;
721 	int retry = 0;
722 	struct vpu_device *vpu_dev = inst->dev;
723 	int inst_count = 0;
724 	struct vpu_instance *inst_elm;
725 
726 	*fail_res = 0;
727 	if (!inst->codec_info)
728 		return -EINVAL;
729 
730 	pm_runtime_resume_and_get(inst->dev->dev);
731 
732 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
733 	if (ret) {
734 		pm_runtime_resume_and_get(inst->dev->dev);
735 		return ret;
736 	}
737 
738 	do {
739 		ret = wave5_vpu_enc_finish_seq(inst, fail_res);
740 		if (ret < 0 && *fail_res != WAVE5_SYSERR_VPU_STILL_RUNNING) {
741 			dev_warn(inst->dev->dev, "enc_finish_seq timed out\n");
742 			pm_runtime_resume_and_get(inst->dev->dev);
743 			mutex_unlock(&vpu_dev->hw_lock);
744 			return ret;
745 		}
746 
747 		if (*fail_res == WAVE5_SYSERR_VPU_STILL_RUNNING &&
748 		    retry++ >= MAX_FIRMWARE_CALL_RETRY) {
749 			pm_runtime_resume_and_get(inst->dev->dev);
750 			mutex_unlock(&vpu_dev->hw_lock);
751 			return -ETIMEDOUT;
752 		}
753 	} while (ret != 0);
754 
755 	dev_dbg(inst->dev->dev, "%s: enc_finish_seq complete\n", __func__);
756 
757 	wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_work);
758 
759 	if (inst->std == W_HEVC_ENC || inst->std == W_AVC_ENC) {
760 		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_sub_sam_buf);
761 		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_mv);
762 		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_fbc_y_tbl);
763 		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_fbc_c_tbl);
764 	}
765 
766 	wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_task);
767 
768 	list_for_each_entry(inst_elm, &vpu_dev->instances, list)
769 		inst_count++;
770 	if (inst_count == 1)
771 		pm_runtime_dont_use_autosuspend(vpu_dev->dev);
772 
773 	mutex_unlock(&vpu_dev->hw_lock);
774 	pm_runtime_put_sync(inst->dev->dev);
775 
776 	return 0;
777 }
778 
wave5_vpu_enc_register_frame_buffer(struct vpu_instance * inst,unsigned int num,unsigned int stride,int height,enum tiled_map_type map_type)779 int wave5_vpu_enc_register_frame_buffer(struct vpu_instance *inst, unsigned int num,
780 					unsigned int stride, int height,
781 					enum tiled_map_type map_type)
782 {
783 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
784 	int ret;
785 	struct vpu_device *vpu_dev = inst->dev;
786 	unsigned int size_luma, size_chroma;
787 	int i;
788 
789 	if (p_enc_info->stride)
790 		return -EINVAL;
791 
792 	if (!p_enc_info->initial_info_obtained)
793 		return -EINVAL;
794 
795 	if (num < p_enc_info->initial_info.min_frame_buffer_count)
796 		return -EINVAL;
797 
798 	if (stride == 0 || stride % 8 != 0)
799 		return -EINVAL;
800 
801 	if (height <= 0)
802 		return -EINVAL;
803 
804 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
805 	if (ret)
806 		return ret;
807 
808 	p_enc_info->num_frame_buffers = num;
809 	p_enc_info->stride = stride;
810 
811 	size_luma = stride * height;
812 	size_chroma = ALIGN(stride / 2, 16) * height;
813 
814 	for (i = 0; i < num; i++) {
815 		if (!inst->frame_buf[i].update_fb_info)
816 			continue;
817 
818 		inst->frame_buf[i].update_fb_info = false;
819 		inst->frame_buf[i].stride = stride;
820 		inst->frame_buf[i].height = height;
821 		inst->frame_buf[i].map_type = COMPRESSED_FRAME_MAP;
822 		inst->frame_buf[i].buf_y_size = size_luma;
823 		inst->frame_buf[i].buf_cb = inst->frame_buf[i].buf_y + size_luma;
824 		inst->frame_buf[i].buf_cb_size = size_chroma;
825 		inst->frame_buf[i].buf_cr_size = 0;
826 	}
827 
828 	ret = wave5_vpu_enc_register_framebuffer(inst->dev->dev, inst, &inst->frame_buf[0],
829 						 COMPRESSED_FRAME_MAP,
830 						 p_enc_info->num_frame_buffers);
831 
832 	mutex_unlock(&vpu_dev->hw_lock);
833 
834 	return ret;
835 }
836 
wave5_check_enc_param(struct vpu_instance * inst,struct enc_param * param)837 static int wave5_check_enc_param(struct vpu_instance *inst, struct enc_param *param)
838 {
839 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
840 
841 	if (!param)
842 		return -EINVAL;
843 
844 	if (!param->source_frame)
845 		return -EINVAL;
846 
847 	if (p_enc_info->open_param.bit_rate == 0 && inst->std == W_HEVC_ENC) {
848 		if (param->pic_stream_buffer_addr % 16 || param->pic_stream_buffer_size == 0)
849 			return -EINVAL;
850 	}
851 	if (param->pic_stream_buffer_addr % 8 || param->pic_stream_buffer_size == 0)
852 		return -EINVAL;
853 
854 	return 0;
855 }
856 
wave5_vpu_enc_start_one_frame(struct vpu_instance * inst,struct enc_param * param,u32 * fail_res)857 int wave5_vpu_enc_start_one_frame(struct vpu_instance *inst, struct enc_param *param, u32 *fail_res)
858 {
859 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
860 	int ret;
861 	struct vpu_device *vpu_dev = inst->dev;
862 
863 	*fail_res = 0;
864 
865 	if (p_enc_info->stride == 0) /* this means frame buffers have not been registered. */
866 		return -EINVAL;
867 
868 	ret = wave5_check_enc_param(inst, param);
869 	if (ret)
870 		return ret;
871 
872 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
873 	if (ret)
874 		return ret;
875 
876 	p_enc_info->pts_map[param->src_idx] = param->pts;
877 
878 	ret = wave5_vpu_encode(inst, param, fail_res);
879 
880 	mutex_unlock(&vpu_dev->hw_lock);
881 
882 	return ret;
883 }
884 
wave5_vpu_enc_get_output_info(struct vpu_instance * inst,struct enc_output_info * info)885 int wave5_vpu_enc_get_output_info(struct vpu_instance *inst, struct enc_output_info *info)
886 {
887 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
888 	int ret;
889 	struct vpu_device *vpu_dev = inst->dev;
890 
891 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
892 	if (ret)
893 		return ret;
894 
895 	ret = wave5_vpu_enc_get_result(inst, info);
896 	if (ret) {
897 		info->pts = 0;
898 		goto unlock;
899 	}
900 
901 	if (info->recon_frame_index >= 0)
902 		info->pts = p_enc_info->pts_map[info->enc_src_idx];
903 
904 unlock:
905 	mutex_unlock(&vpu_dev->hw_lock);
906 
907 	return ret;
908 }
909 
wave5_vpu_enc_give_command(struct vpu_instance * inst,enum codec_command cmd,void * parameter)910 int wave5_vpu_enc_give_command(struct vpu_instance *inst, enum codec_command cmd, void *parameter)
911 {
912 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
913 
914 	switch (cmd) {
915 	case ENABLE_ROTATION:
916 		p_enc_info->rotation_enable = true;
917 		break;
918 	case ENABLE_MIRRORING:
919 		p_enc_info->mirror_enable = true;
920 		break;
921 	case SET_MIRROR_DIRECTION: {
922 		enum mirror_direction mir_dir;
923 
924 		mir_dir = *(enum mirror_direction *)parameter;
925 		if (mir_dir != MIRDIR_NONE && mir_dir != MIRDIR_HOR &&
926 		    mir_dir != MIRDIR_VER && mir_dir != MIRDIR_HOR_VER)
927 			return -EINVAL;
928 		p_enc_info->mirror_direction = mir_dir;
929 		break;
930 	}
931 	case SET_ROTATION_ANGLE: {
932 		int angle;
933 
934 		angle = *(int *)parameter;
935 		if (angle && angle != 90 && angle != 180 && angle != 270)
936 			return -EINVAL;
937 		if (p_enc_info->initial_info_obtained && (angle == 90 || angle == 270))
938 			return -EINVAL;
939 		p_enc_info->rotation_angle = angle;
940 		break;
941 	}
942 	case ENC_GET_QUEUE_STATUS: {
943 		struct queue_status_info *queue_info = parameter;
944 
945 		queue_info->instance_queue_count = p_enc_info->instance_queue_count;
946 		queue_info->report_queue_count = p_enc_info->report_queue_count;
947 		break;
948 	}
949 	default:
950 		return -EINVAL;
951 	}
952 	return 0;
953 }
954 
wave5_vpu_enc_issue_seq_init(struct vpu_instance * inst)955 int wave5_vpu_enc_issue_seq_init(struct vpu_instance *inst)
956 {
957 	int ret;
958 	struct vpu_device *vpu_dev = inst->dev;
959 
960 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
961 	if (ret)
962 		return ret;
963 
964 	ret = wave5_vpu_enc_init_seq(inst);
965 
966 	mutex_unlock(&vpu_dev->hw_lock);
967 
968 	return ret;
969 }
970 
wave5_vpu_enc_complete_seq_init(struct vpu_instance * inst,struct enc_initial_info * info)971 int wave5_vpu_enc_complete_seq_init(struct vpu_instance *inst, struct enc_initial_info *info)
972 {
973 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
974 	int ret;
975 	struct vpu_device *vpu_dev = inst->dev;
976 
977 	if (!info)
978 		return -EINVAL;
979 
980 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
981 	if (ret)
982 		return ret;
983 
984 	ret = wave5_vpu_enc_get_seq_info(inst, info);
985 	if (ret) {
986 		p_enc_info->initial_info_obtained = false;
987 		mutex_unlock(&vpu_dev->hw_lock);
988 		return ret;
989 	}
990 
991 	p_enc_info->initial_info_obtained = true;
992 	p_enc_info->initial_info = *info;
993 
994 	mutex_unlock(&vpu_dev->hw_lock);
995 
996 	return 0;
997 }
998