1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * The Marvell camera core. This device appears in a number of settings,
4 * so it needs platform-specific support outside of the core.
5 *
6 * Copyright 2011 Jonathan Corbet [email protected]
7 * Copyright 2018 Lubomir Rintel <[email protected]>
8 */
9 #include <linux/kernel.h>
10 #include <linux/module.h>
11 #include <linux/fs.h>
12 #include <linux/mm.h>
13 #include <linux/i2c.h>
14 #include <linux/interrupt.h>
15 #include <linux/spinlock.h>
16 #include <linux/slab.h>
17 #include <linux/device.h>
18 #include <linux/wait.h>
19 #include <linux/list.h>
20 #include <linux/dma-mapping.h>
21 #include <linux/delay.h>
22 #include <linux/vmalloc.h>
23 #include <linux/io.h>
24 #include <linux/clk.h>
25 #include <linux/clk-provider.h>
26 #include <linux/videodev2.h>
27 #include <linux/pm_runtime.h>
28 #include <media/v4l2-device.h>
29 #include <media/v4l2-ioctl.h>
30 #include <media/v4l2-ctrls.h>
31 #include <media/v4l2-event.h>
32 #include <media/videobuf2-vmalloc.h>
33 #include <media/videobuf2-dma-contig.h>
34 #include <media/videobuf2-dma-sg.h>
35
36 #include "mcam-core.h"
37
38 #ifdef MCAM_MODE_VMALLOC
39 /*
40 * Internal DMA buffer management. Since the controller cannot do S/G I/O,
41 * we must have physically contiguous buffers to bring frames into.
42 * These parameters control how many buffers we use, whether we
43 * allocate them at load time (better chance of success, but nails down
44 * memory) or when somebody tries to use the camera (riskier), and,
45 * for load-time allocation, how big they should be.
46 *
47 * The controller can cycle through three buffers. We could use
48 * more by flipping pointers around, but it probably makes little
49 * sense.
50 */
51
52 static bool alloc_bufs_at_read;
53 module_param(alloc_bufs_at_read, bool, 0444);
54 MODULE_PARM_DESC(alloc_bufs_at_read,
55 "Non-zero value causes DMA buffers to be allocated when the video capture device is read, rather than at module load time. This saves memory, but decreases the chances of successfully getting those buffers. This parameter is only used in the vmalloc buffer mode");
56
57 static int n_dma_bufs = 3;
58 module_param(n_dma_bufs, uint, 0644);
59 MODULE_PARM_DESC(n_dma_bufs,
60 "The number of DMA buffers to allocate. Can be either two (saves memory, makes timing tighter) or three.");
61
62 static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
63 module_param(dma_buf_size, uint, 0444);
64 MODULE_PARM_DESC(dma_buf_size,
65 "The size of the allocated DMA buffers. If actual operating parameters require larger buffers, an attempt to reallocate will be made.");
66 #else /* MCAM_MODE_VMALLOC */
67 static const bool alloc_bufs_at_read;
68 static const int n_dma_bufs = 3; /* Used by S/G_PARM */
69 #endif /* MCAM_MODE_VMALLOC */
70
71 static bool flip;
72 module_param(flip, bool, 0444);
73 MODULE_PARM_DESC(flip,
74 "If set, the sensor will be instructed to flip the image vertically.");
75
76 static int buffer_mode = -1;
77 module_param(buffer_mode, int, 0444);
78 MODULE_PARM_DESC(buffer_mode,
79 "Set the buffer mode to be used; default is to go with what the platform driver asks for. Set to 0 for vmalloc, 1 for DMA contiguous.");
80
81 /*
82 * Status flags. Always manipulated with bit operations.
83 */
84 #define CF_BUF0_VALID 0 /* Buffers valid - first three */
85 #define CF_BUF1_VALID 1
86 #define CF_BUF2_VALID 2
87 #define CF_DMA_ACTIVE 3 /* A frame is incoming */
88 #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
89 #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
90 #define CF_SG_RESTART 6 /* SG restart needed */
91 #define CF_FRAME_SOF0 7 /* Frame 0 started */
92 #define CF_FRAME_SOF1 8
93 #define CF_FRAME_SOF2 9
94
95 #define sensor_call(cam, o, f, args...) \
96 v4l2_subdev_call(cam->sensor, o, f, ##args)
97
98 #define notifier_to_mcam(notifier) \
99 container_of(notifier, struct mcam_camera, notifier)
100
101 static struct mcam_format_struct {
102 __u32 pixelformat;
103 int bpp; /* Bytes per pixel */
104 bool planar;
105 u32 mbus_code;
106 } mcam_formats[] = {
107 {
108 .pixelformat = V4L2_PIX_FMT_YUYV,
109 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
110 .bpp = 2,
111 .planar = false,
112 },
113 {
114 .pixelformat = V4L2_PIX_FMT_YVYU,
115 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
116 .bpp = 2,
117 .planar = false,
118 },
119 {
120 .pixelformat = V4L2_PIX_FMT_YUV420,
121 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
122 .bpp = 1,
123 .planar = true,
124 },
125 {
126 .pixelformat = V4L2_PIX_FMT_YVU420,
127 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
128 .bpp = 1,
129 .planar = true,
130 },
131 {
132 .pixelformat = V4L2_PIX_FMT_XRGB444,
133 .mbus_code = MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE,
134 .bpp = 2,
135 .planar = false,
136 },
137 {
138 .pixelformat = V4L2_PIX_FMT_RGB565,
139 .mbus_code = MEDIA_BUS_FMT_RGB565_2X8_LE,
140 .bpp = 2,
141 .planar = false,
142 },
143 {
144 .pixelformat = V4L2_PIX_FMT_SBGGR8,
145 .mbus_code = MEDIA_BUS_FMT_SBGGR8_1X8,
146 .bpp = 1,
147 .planar = false,
148 },
149 };
150 #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
151
mcam_find_format(u32 pixelformat)152 static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
153 {
154 unsigned i;
155
156 for (i = 0; i < N_MCAM_FMTS; i++)
157 if (mcam_formats[i].pixelformat == pixelformat)
158 return mcam_formats + i;
159 /* Not found? Then return the first format. */
160 return mcam_formats;
161 }
162
163 /*
164 * The default format we use until somebody says otherwise.
165 */
166 static const struct v4l2_pix_format mcam_def_pix_format = {
167 .width = VGA_WIDTH,
168 .height = VGA_HEIGHT,
169 .pixelformat = V4L2_PIX_FMT_YUYV,
170 .field = V4L2_FIELD_NONE,
171 .bytesperline = VGA_WIDTH*2,
172 .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
173 .colorspace = V4L2_COLORSPACE_SRGB,
174 };
175
176 static const u32 mcam_def_mbus_code = MEDIA_BUS_FMT_YUYV8_2X8;
177
178
179 /*
180 * The two-word DMA descriptor format used by the Armada 610 and like. There
181 * Is a three-word format as well (set C1_DESC_3WORD) where the third
182 * word is a pointer to the next descriptor, but we don't use it. Two-word
183 * descriptors have to be contiguous in memory.
184 */
185 struct mcam_dma_desc {
186 u32 dma_addr;
187 u32 segment_len;
188 };
189
190 /*
191 * Our buffer type for working with videobuf2. Note that the vb2
192 * developers have decreed that struct vb2_v4l2_buffer must be at the
193 * beginning of this structure.
194 */
195 struct mcam_vb_buffer {
196 struct vb2_v4l2_buffer vb_buf;
197 struct list_head queue;
198 struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
199 dma_addr_t dma_desc_pa; /* Descriptor physical address */
200 };
201
vb_to_mvb(struct vb2_v4l2_buffer * vb)202 static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_v4l2_buffer *vb)
203 {
204 return container_of(vb, struct mcam_vb_buffer, vb_buf);
205 }
206
207 /*
208 * Hand a completed buffer back to user space.
209 */
mcam_buffer_done(struct mcam_camera * cam,int frame,struct vb2_v4l2_buffer * vbuf)210 static void mcam_buffer_done(struct mcam_camera *cam, int frame,
211 struct vb2_v4l2_buffer *vbuf)
212 {
213 vbuf->vb2_buf.planes[0].bytesused = cam->pix_format.sizeimage;
214 vbuf->sequence = cam->buf_seq[frame];
215 vbuf->field = V4L2_FIELD_NONE;
216 vbuf->vb2_buf.timestamp = ktime_get_ns();
217 vb2_set_plane_payload(&vbuf->vb2_buf, 0, cam->pix_format.sizeimage);
218 vb2_buffer_done(&vbuf->vb2_buf, VB2_BUF_STATE_DONE);
219 }
220
221
222
223 /*
224 * Debugging and related.
225 */
226 #define cam_err(cam, fmt, arg...) \
227 dev_err((cam)->dev, fmt, ##arg);
228 #define cam_warn(cam, fmt, arg...) \
229 dev_warn((cam)->dev, fmt, ##arg);
230 #define cam_dbg(cam, fmt, arg...) \
231 dev_dbg((cam)->dev, fmt, ##arg);
232
233
234 /*
235 * Flag manipulation helpers
236 */
mcam_reset_buffers(struct mcam_camera * cam)237 static void mcam_reset_buffers(struct mcam_camera *cam)
238 {
239 int i;
240
241 cam->next_buf = -1;
242 for (i = 0; i < cam->nbufs; i++) {
243 clear_bit(i, &cam->flags);
244 clear_bit(CF_FRAME_SOF0 + i, &cam->flags);
245 }
246 }
247
mcam_needs_config(struct mcam_camera * cam)248 static inline int mcam_needs_config(struct mcam_camera *cam)
249 {
250 return test_bit(CF_CONFIG_NEEDED, &cam->flags);
251 }
252
mcam_set_config_needed(struct mcam_camera * cam,int needed)253 static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
254 {
255 if (needed)
256 set_bit(CF_CONFIG_NEEDED, &cam->flags);
257 else
258 clear_bit(CF_CONFIG_NEEDED, &cam->flags);
259 }
260
261 /* ------------------------------------------------------------------- */
262 /*
263 * Make the controller start grabbing images. Everything must
264 * be set up before doing this.
265 */
mcam_ctlr_start(struct mcam_camera * cam)266 static void mcam_ctlr_start(struct mcam_camera *cam)
267 {
268 /* set_bit performs a read, so no other barrier should be
269 needed here */
270 mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
271 }
272
mcam_ctlr_stop(struct mcam_camera * cam)273 static void mcam_ctlr_stop(struct mcam_camera *cam)
274 {
275 mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
276 }
277
mcam_enable_mipi(struct mcam_camera * mcam)278 static void mcam_enable_mipi(struct mcam_camera *mcam)
279 {
280 /* Using MIPI mode and enable MIPI */
281 if (mcam->calc_dphy)
282 mcam->calc_dphy(mcam);
283 cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
284 mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
285 mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
286 mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
287 mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
288
289 if (!mcam->mipi_enabled) {
290 if (mcam->lane > 4 || mcam->lane <= 0) {
291 cam_warn(mcam, "lane number error\n");
292 mcam->lane = 1; /* set the default value */
293 }
294 /*
295 * 0x41 actives 1 lane
296 * 0x43 actives 2 lanes
297 * 0x45 actives 3 lanes (never happen)
298 * 0x47 actives 4 lanes
299 */
300 mcam_reg_write(mcam, REG_CSI2_CTRL0,
301 CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
302 mcam->mipi_enabled = true;
303 }
304 }
305
mcam_disable_mipi(struct mcam_camera * mcam)306 static void mcam_disable_mipi(struct mcam_camera *mcam)
307 {
308 /* Using Parallel mode or disable MIPI */
309 mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
310 mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
311 mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
312 mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
313 mcam->mipi_enabled = false;
314 }
315
mcam_fmt_is_planar(__u32 pfmt)316 static bool mcam_fmt_is_planar(__u32 pfmt)
317 {
318 struct mcam_format_struct *f;
319
320 f = mcam_find_format(pfmt);
321 return f->planar;
322 }
323
mcam_write_yuv_bases(struct mcam_camera * cam,unsigned frame,dma_addr_t base)324 static void mcam_write_yuv_bases(struct mcam_camera *cam,
325 unsigned frame, dma_addr_t base)
326 {
327 struct v4l2_pix_format *fmt = &cam->pix_format;
328 u32 pixel_count = fmt->width * fmt->height;
329 dma_addr_t y, u = 0, v = 0;
330
331 y = base;
332
333 switch (fmt->pixelformat) {
334 case V4L2_PIX_FMT_YUV420:
335 u = y + pixel_count;
336 v = u + pixel_count / 4;
337 break;
338 case V4L2_PIX_FMT_YVU420:
339 v = y + pixel_count;
340 u = v + pixel_count / 4;
341 break;
342 default:
343 break;
344 }
345
346 mcam_reg_write(cam, REG_Y0BAR + frame * 4, y);
347 if (mcam_fmt_is_planar(fmt->pixelformat)) {
348 mcam_reg_write(cam, REG_U0BAR + frame * 4, u);
349 mcam_reg_write(cam, REG_V0BAR + frame * 4, v);
350 }
351 }
352
353 /* ------------------------------------------------------------------- */
354
355 #ifdef MCAM_MODE_VMALLOC
356 /*
357 * Code specific to the vmalloc buffer mode.
358 */
359
360 /*
361 * Allocate in-kernel DMA buffers for vmalloc mode.
362 */
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)363 static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
364 {
365 int i;
366
367 mcam_set_config_needed(cam, 1);
368 if (loadtime)
369 cam->dma_buf_size = dma_buf_size;
370 else
371 cam->dma_buf_size = cam->pix_format.sizeimage;
372 if (n_dma_bufs > 3)
373 n_dma_bufs = 3;
374
375 cam->nbufs = 0;
376 for (i = 0; i < n_dma_bufs; i++) {
377 cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
378 cam->dma_buf_size, cam->dma_handles + i,
379 GFP_KERNEL);
380 if (cam->dma_bufs[i] == NULL) {
381 cam_warn(cam, "Failed to allocate DMA buffer\n");
382 break;
383 }
384 (cam->nbufs)++;
385 }
386
387 switch (cam->nbufs) {
388 case 1:
389 dma_free_coherent(cam->dev, cam->dma_buf_size,
390 cam->dma_bufs[0], cam->dma_handles[0]);
391 cam->nbufs = 0;
392 fallthrough;
393 case 0:
394 cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
395 return -ENOMEM;
396
397 case 2:
398 if (n_dma_bufs > 2)
399 cam_warn(cam, "Will limp along with only 2 buffers\n");
400 break;
401 }
402 return 0;
403 }
404
mcam_free_dma_bufs(struct mcam_camera * cam)405 static void mcam_free_dma_bufs(struct mcam_camera *cam)
406 {
407 int i;
408
409 for (i = 0; i < cam->nbufs; i++) {
410 dma_free_coherent(cam->dev, cam->dma_buf_size,
411 cam->dma_bufs[i], cam->dma_handles[i]);
412 cam->dma_bufs[i] = NULL;
413 }
414 cam->nbufs = 0;
415 }
416
417
418 /*
419 * Set up DMA buffers when operating in vmalloc mode
420 */
mcam_ctlr_dma_vmalloc(struct mcam_camera * cam)421 static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
422 {
423 /*
424 * Store the first two YUV buffers. Then either
425 * set the third if it exists, or tell the controller
426 * to just use two.
427 */
428 mcam_write_yuv_bases(cam, 0, cam->dma_handles[0]);
429 mcam_write_yuv_bases(cam, 1, cam->dma_handles[1]);
430 if (cam->nbufs > 2) {
431 mcam_write_yuv_bases(cam, 2, cam->dma_handles[2]);
432 mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
433 } else
434 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
435 if (cam->chip_id == MCAM_CAFE)
436 mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
437 }
438
439 /*
440 * Copy data out to user space in the vmalloc case
441 */
mcam_frame_work(struct work_struct * t)442 static void mcam_frame_work(struct work_struct *t)
443 {
444 struct mcam_camera *cam = from_work(cam, t, s_bh_work);
445 int i;
446 unsigned long flags;
447 struct mcam_vb_buffer *buf;
448
449 spin_lock_irqsave(&cam->dev_lock, flags);
450 for (i = 0; i < cam->nbufs; i++) {
451 int bufno = cam->next_buf;
452
453 if (cam->state != S_STREAMING || bufno < 0)
454 break; /* I/O got stopped */
455 if (++(cam->next_buf) >= cam->nbufs)
456 cam->next_buf = 0;
457 if (!test_bit(bufno, &cam->flags))
458 continue;
459 if (list_empty(&cam->buffers)) {
460 cam->frame_state.singles++;
461 break; /* Leave it valid, hope for better later */
462 }
463 cam->frame_state.delivered++;
464 clear_bit(bufno, &cam->flags);
465 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
466 queue);
467 list_del_init(&buf->queue);
468 /*
469 * Drop the lock during the big copy. This *should* be safe...
470 */
471 spin_unlock_irqrestore(&cam->dev_lock, flags);
472 memcpy(vb2_plane_vaddr(&buf->vb_buf.vb2_buf, 0),
473 cam->dma_bufs[bufno],
474 cam->pix_format.sizeimage);
475 mcam_buffer_done(cam, bufno, &buf->vb_buf);
476 spin_lock_irqsave(&cam->dev_lock, flags);
477 }
478 spin_unlock_irqrestore(&cam->dev_lock, flags);
479 }
480
481
482 /*
483 * Make sure our allocated buffers are up to the task.
484 */
mcam_check_dma_buffers(struct mcam_camera * cam)485 static int mcam_check_dma_buffers(struct mcam_camera *cam)
486 {
487 if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
488 mcam_free_dma_bufs(cam);
489 if (cam->nbufs == 0)
490 return mcam_alloc_dma_bufs(cam, 0);
491 return 0;
492 }
493
mcam_vmalloc_done(struct mcam_camera * cam,int frame)494 static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
495 {
496 queue_work(system_bh_wq, &cam->s_bh_work);
497 }
498
499 #else /* MCAM_MODE_VMALLOC */
500
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)501 static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
502 {
503 return 0;
504 }
505
mcam_free_dma_bufs(struct mcam_camera * cam)506 static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
507 {
508 return;
509 }
510
mcam_check_dma_buffers(struct mcam_camera * cam)511 static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
512 {
513 return 0;
514 }
515
516
517
518 #endif /* MCAM_MODE_VMALLOC */
519
520
521 #ifdef MCAM_MODE_DMA_CONTIG
522 /* ---------------------------------------------------------------------- */
523 /*
524 * DMA-contiguous code.
525 */
526
527 /*
528 * Set up a contiguous buffer for the given frame. Here also is where
529 * the underrun strategy is set: if there is no buffer available, reuse
530 * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
531 * keep the interrupt handler from giving that buffer back to user
532 * space. In this way, we always have a buffer to DMA to and don't
533 * have to try to play games stopping and restarting the controller.
534 */
mcam_set_contig_buffer(struct mcam_camera * cam,int frame)535 static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
536 {
537 struct mcam_vb_buffer *buf;
538 dma_addr_t dma_handle;
539 struct vb2_v4l2_buffer *vb;
540
541 /*
542 * If there are no available buffers, go into single mode
543 */
544 if (list_empty(&cam->buffers)) {
545 buf = cam->vb_bufs[frame ^ 0x1];
546 set_bit(CF_SINGLE_BUFFER, &cam->flags);
547 cam->frame_state.singles++;
548 } else {
549 /*
550 * OK, we have a buffer we can use.
551 */
552 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
553 queue);
554 list_del_init(&buf->queue);
555 clear_bit(CF_SINGLE_BUFFER, &cam->flags);
556 }
557
558 cam->vb_bufs[frame] = buf;
559 vb = &buf->vb_buf;
560
561 dma_handle = vb2_dma_contig_plane_dma_addr(&vb->vb2_buf, 0);
562 mcam_write_yuv_bases(cam, frame, dma_handle);
563 }
564
565 /*
566 * Initial B_DMA_contig setup.
567 */
mcam_ctlr_dma_contig(struct mcam_camera * cam)568 static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
569 {
570 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
571 cam->nbufs = 2;
572 mcam_set_contig_buffer(cam, 0);
573 mcam_set_contig_buffer(cam, 1);
574 }
575
576 /*
577 * Frame completion handling.
578 */
mcam_dma_contig_done(struct mcam_camera * cam,int frame)579 static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
580 {
581 struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
582
583 if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
584 cam->frame_state.delivered++;
585 cam->vb_bufs[frame] = NULL;
586 mcam_buffer_done(cam, frame, &buf->vb_buf);
587 }
588 mcam_set_contig_buffer(cam, frame);
589 }
590
591 #endif /* MCAM_MODE_DMA_CONTIG */
592
593 #ifdef MCAM_MODE_DMA_SG
594 /* ---------------------------------------------------------------------- */
595 /*
596 * Scatter/gather-specific code.
597 */
598
599 /*
600 * Set up the next buffer for S/G I/O; caller should be sure that
601 * the controller is stopped and a buffer is available.
602 */
mcam_sg_next_buffer(struct mcam_camera * cam)603 static void mcam_sg_next_buffer(struct mcam_camera *cam)
604 {
605 struct mcam_vb_buffer *buf;
606 struct sg_table *sg_table;
607
608 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
609 list_del_init(&buf->queue);
610 sg_table = vb2_dma_sg_plane_desc(&buf->vb_buf.vb2_buf, 0);
611 /*
612 * Very Bad Not Good Things happen if you don't clear
613 * C1_DESC_ENA before making any descriptor changes.
614 */
615 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
616 mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
617 mcam_reg_write(cam, REG_DESC_LEN_Y,
618 sg_table->nents * sizeof(struct mcam_dma_desc));
619 mcam_reg_write(cam, REG_DESC_LEN_U, 0);
620 mcam_reg_write(cam, REG_DESC_LEN_V, 0);
621 mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
622 cam->vb_bufs[0] = buf;
623 }
624
625 /*
626 * Initial B_DMA_sg setup
627 */
mcam_ctlr_dma_sg(struct mcam_camera * cam)628 static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
629 {
630 /*
631 * The list-empty condition can hit us at resume time
632 * if the buffer list was empty when the system was suspended.
633 */
634 if (list_empty(&cam->buffers)) {
635 set_bit(CF_SG_RESTART, &cam->flags);
636 return;
637 }
638
639 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
640 mcam_sg_next_buffer(cam);
641 cam->nbufs = 3;
642 }
643
644
645 /*
646 * Frame completion with S/G is trickier. We can't muck with
647 * a descriptor chain on the fly, since the controller buffers it
648 * internally. So we have to actually stop and restart; Marvell
649 * says this is the way to do it.
650 *
651 * Of course, stopping is easier said than done; experience shows
652 * that the controller can start a frame *after* C0_ENABLE has been
653 * cleared. So when running in S/G mode, the controller is "stopped"
654 * on receipt of the start-of-frame interrupt. That means we can
655 * safely change the DMA descriptor array here and restart things
656 * (assuming there's another buffer waiting to go).
657 */
mcam_dma_sg_done(struct mcam_camera * cam,int frame)658 static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
659 {
660 struct mcam_vb_buffer *buf = cam->vb_bufs[0];
661
662 /*
663 * If we're no longer supposed to be streaming, don't do anything.
664 */
665 if (cam->state != S_STREAMING)
666 return;
667 /*
668 * If we have another buffer available, put it in and
669 * restart the engine.
670 */
671 if (!list_empty(&cam->buffers)) {
672 mcam_sg_next_buffer(cam);
673 mcam_ctlr_start(cam);
674 /*
675 * Otherwise set CF_SG_RESTART and the controller will
676 * be restarted once another buffer shows up.
677 */
678 } else {
679 set_bit(CF_SG_RESTART, &cam->flags);
680 cam->frame_state.singles++;
681 cam->vb_bufs[0] = NULL;
682 }
683 /*
684 * Now we can give the completed frame back to user space.
685 */
686 cam->frame_state.delivered++;
687 mcam_buffer_done(cam, frame, &buf->vb_buf);
688 }
689
690
691 /*
692 * Scatter/gather mode requires stopping the controller between
693 * frames so we can put in a new DMA descriptor array. If no new
694 * buffer exists at frame completion, the controller is left stopped;
695 * this function is charged with getting things going again.
696 */
mcam_sg_restart(struct mcam_camera * cam)697 static void mcam_sg_restart(struct mcam_camera *cam)
698 {
699 mcam_ctlr_dma_sg(cam);
700 mcam_ctlr_start(cam);
701 clear_bit(CF_SG_RESTART, &cam->flags);
702 }
703
704 #else /* MCAM_MODE_DMA_SG */
705
mcam_sg_restart(struct mcam_camera * cam)706 static inline void mcam_sg_restart(struct mcam_camera *cam)
707 {
708 return;
709 }
710
711 #endif /* MCAM_MODE_DMA_SG */
712
713 /* ---------------------------------------------------------------------- */
714 /*
715 * Buffer-mode-independent controller code.
716 */
717
718 /*
719 * Image format setup
720 */
mcam_ctlr_image(struct mcam_camera * cam)721 static void mcam_ctlr_image(struct mcam_camera *cam)
722 {
723 struct v4l2_pix_format *fmt = &cam->pix_format;
724 u32 widthy = 0, widthuv = 0, imgsz_h, imgsz_w;
725
726 cam_dbg(cam, "camera: bytesperline = %d; height = %d\n",
727 fmt->bytesperline, fmt->sizeimage / fmt->bytesperline);
728 imgsz_h = (fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK;
729 imgsz_w = (fmt->width * 2) & IMGSZ_H_MASK;
730
731 switch (fmt->pixelformat) {
732 case V4L2_PIX_FMT_YUYV:
733 case V4L2_PIX_FMT_YVYU:
734 widthy = fmt->width * 2;
735 widthuv = 0;
736 break;
737 case V4L2_PIX_FMT_YUV420:
738 case V4L2_PIX_FMT_YVU420:
739 widthy = fmt->width;
740 widthuv = fmt->width / 2;
741 break;
742 default:
743 widthy = fmt->bytesperline;
744 widthuv = 0;
745 break;
746 }
747
748 mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy,
749 IMGP_YP_MASK | IMGP_UVP_MASK);
750 mcam_reg_write(cam, REG_IMGSIZE, imgsz_h | imgsz_w);
751 mcam_reg_write(cam, REG_IMGOFFSET, 0x0);
752
753 /*
754 * Tell the controller about the image format we are using.
755 */
756 switch (fmt->pixelformat) {
757 case V4L2_PIX_FMT_YUV420:
758 case V4L2_PIX_FMT_YVU420:
759 mcam_reg_write_mask(cam, REG_CTRL0,
760 C0_DF_YUV | C0_YUV_420PL | C0_YUVE_VYUY, C0_DF_MASK);
761 break;
762 case V4L2_PIX_FMT_YUYV:
763 mcam_reg_write_mask(cam, REG_CTRL0,
764 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_NOSWAP, C0_DF_MASK);
765 break;
766 case V4L2_PIX_FMT_YVYU:
767 mcam_reg_write_mask(cam, REG_CTRL0,
768 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_SWAP24, C0_DF_MASK);
769 break;
770 case V4L2_PIX_FMT_XRGB444:
771 mcam_reg_write_mask(cam, REG_CTRL0,
772 C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XBGR, C0_DF_MASK);
773 break;
774 case V4L2_PIX_FMT_RGB565:
775 mcam_reg_write_mask(cam, REG_CTRL0,
776 C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK);
777 break;
778 case V4L2_PIX_FMT_SBGGR8:
779 mcam_reg_write_mask(cam, REG_CTRL0,
780 C0_DF_RGB | C0_RGB5_GRBG, C0_DF_MASK);
781 break;
782 default:
783 cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat);
784 break;
785 }
786
787 /*
788 * Make sure it knows we want to use hsync/vsync.
789 */
790 mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC, C0_SIFM_MASK);
791 }
792
793
794 /*
795 * Configure the controller for operation; caller holds the
796 * device mutex.
797 */
mcam_ctlr_configure(struct mcam_camera * cam)798 static int mcam_ctlr_configure(struct mcam_camera *cam)
799 {
800 unsigned long flags;
801
802 spin_lock_irqsave(&cam->dev_lock, flags);
803 clear_bit(CF_SG_RESTART, &cam->flags);
804 cam->dma_setup(cam);
805 mcam_ctlr_image(cam);
806 mcam_set_config_needed(cam, 0);
807 spin_unlock_irqrestore(&cam->dev_lock, flags);
808 return 0;
809 }
810
mcam_ctlr_irq_enable(struct mcam_camera * cam)811 static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
812 {
813 /*
814 * Clear any pending interrupts, since we do not
815 * expect to have I/O active prior to enabling.
816 */
817 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
818 mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
819 }
820
mcam_ctlr_irq_disable(struct mcam_camera * cam)821 static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
822 {
823 mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
824 }
825
826 /*
827 * Stop the controller, and don't return until we're really sure that no
828 * further DMA is going on.
829 */
mcam_ctlr_stop_dma(struct mcam_camera * cam)830 static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
831 {
832 unsigned long flags;
833
834 /*
835 * Theory: stop the camera controller (whether it is operating
836 * or not). Delay briefly just in case we race with the SOF
837 * interrupt, then wait until no DMA is active.
838 */
839 spin_lock_irqsave(&cam->dev_lock, flags);
840 clear_bit(CF_SG_RESTART, &cam->flags);
841 mcam_ctlr_stop(cam);
842 cam->state = S_IDLE;
843 spin_unlock_irqrestore(&cam->dev_lock, flags);
844 /*
845 * This is a brutally long sleep, but experience shows that
846 * it can take the controller a while to get the message that
847 * it needs to stop grabbing frames. In particular, we can
848 * sometimes (on mmp) get a frame at the end WITHOUT the
849 * start-of-frame indication.
850 */
851 msleep(150);
852 if (test_bit(CF_DMA_ACTIVE, &cam->flags))
853 cam_err(cam, "Timeout waiting for DMA to end\n");
854 /* This would be bad news - what now? */
855 spin_lock_irqsave(&cam->dev_lock, flags);
856 mcam_ctlr_irq_disable(cam);
857 spin_unlock_irqrestore(&cam->dev_lock, flags);
858 }
859
860 /*
861 * Power up and down.
862 */
mcam_ctlr_power_up(struct mcam_camera * cam)863 static int mcam_ctlr_power_up(struct mcam_camera *cam)
864 {
865 unsigned long flags;
866 int ret;
867
868 spin_lock_irqsave(&cam->dev_lock, flags);
869 if (cam->plat_power_up) {
870 ret = cam->plat_power_up(cam);
871 if (ret) {
872 spin_unlock_irqrestore(&cam->dev_lock, flags);
873 return ret;
874 }
875 }
876 mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
877 spin_unlock_irqrestore(&cam->dev_lock, flags);
878 return 0;
879 }
880
mcam_ctlr_power_down(struct mcam_camera * cam)881 static void mcam_ctlr_power_down(struct mcam_camera *cam)
882 {
883 unsigned long flags;
884
885 spin_lock_irqsave(&cam->dev_lock, flags);
886 /*
887 * School of hard knocks department: be sure we do any register
888 * twiddling on the controller *before* calling the platform
889 * power down routine.
890 */
891 mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
892 if (cam->plat_power_down)
893 cam->plat_power_down(cam);
894 spin_unlock_irqrestore(&cam->dev_lock, flags);
895 }
896
897 /* ---------------------------------------------------------------------- */
898 /*
899 * Master sensor clock.
900 */
mclk_prepare(struct clk_hw * hw)901 static int mclk_prepare(struct clk_hw *hw)
902 {
903 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
904
905 clk_prepare(cam->clk[0]);
906 return 0;
907 }
908
mclk_unprepare(struct clk_hw * hw)909 static void mclk_unprepare(struct clk_hw *hw)
910 {
911 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
912
913 clk_unprepare(cam->clk[0]);
914 }
915
mclk_enable(struct clk_hw * hw)916 static int mclk_enable(struct clk_hw *hw)
917 {
918 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
919 int mclk_src;
920 int mclk_div;
921 int ret;
922
923 /*
924 * Clock the sensor appropriately. Controller clock should
925 * be 48MHz, sensor "typical" value is half that.
926 */
927 if (cam->bus_type == V4L2_MBUS_CSI2_DPHY) {
928 mclk_src = cam->mclk_src;
929 mclk_div = cam->mclk_div;
930 } else {
931 mclk_src = 3;
932 mclk_div = 2;
933 }
934
935 ret = pm_runtime_resume_and_get(cam->dev);
936 if (ret < 0)
937 return ret;
938 ret = clk_enable(cam->clk[0]);
939 if (ret) {
940 pm_runtime_put(cam->dev);
941 return ret;
942 }
943
944 mcam_reg_write(cam, REG_CLKCTRL, (mclk_src << 29) | mclk_div);
945 mcam_ctlr_power_up(cam);
946
947 return 0;
948 }
949
mclk_disable(struct clk_hw * hw)950 static void mclk_disable(struct clk_hw *hw)
951 {
952 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
953
954 mcam_ctlr_power_down(cam);
955 clk_disable(cam->clk[0]);
956 pm_runtime_put(cam->dev);
957 }
958
mclk_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)959 static unsigned long mclk_recalc_rate(struct clk_hw *hw,
960 unsigned long parent_rate)
961 {
962 return 48000000;
963 }
964
965 static const struct clk_ops mclk_ops = {
966 .prepare = mclk_prepare,
967 .unprepare = mclk_unprepare,
968 .enable = mclk_enable,
969 .disable = mclk_disable,
970 .recalc_rate = mclk_recalc_rate,
971 };
972
973 /* -------------------------------------------------------------------- */
974 /*
975 * Communications with the sensor.
976 */
977
__mcam_cam_reset(struct mcam_camera * cam)978 static int __mcam_cam_reset(struct mcam_camera *cam)
979 {
980 return sensor_call(cam, core, reset, 0);
981 }
982
983 /*
984 * We have found the sensor on the i2c. Let's try to have a
985 * conversation.
986 */
mcam_cam_init(struct mcam_camera * cam)987 static int mcam_cam_init(struct mcam_camera *cam)
988 {
989 int ret;
990
991 if (cam->state != S_NOTREADY)
992 cam_warn(cam, "Cam init with device in funky state %d",
993 cam->state);
994 ret = __mcam_cam_reset(cam);
995 /* Get/set parameters? */
996 cam->state = S_IDLE;
997 return ret;
998 }
999
1000 /*
1001 * Configure the sensor to match the parameters we have. Caller should
1002 * hold s_mutex
1003 */
mcam_cam_set_flip(struct mcam_camera * cam)1004 static int mcam_cam_set_flip(struct mcam_camera *cam)
1005 {
1006 struct v4l2_control ctrl;
1007
1008 memset(&ctrl, 0, sizeof(ctrl));
1009 ctrl.id = V4L2_CID_VFLIP;
1010 ctrl.value = flip;
1011 return v4l2_s_ctrl(NULL, cam->sensor->ctrl_handler, &ctrl);
1012 }
1013
1014
mcam_cam_configure(struct mcam_camera * cam)1015 static int mcam_cam_configure(struct mcam_camera *cam)
1016 {
1017 struct v4l2_subdev_format format = {
1018 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1019 };
1020 int ret;
1021
1022 v4l2_fill_mbus_format(&format.format, &cam->pix_format, cam->mbus_code);
1023 ret = sensor_call(cam, core, init, 0);
1024 if (ret == 0)
1025 ret = sensor_call(cam, pad, set_fmt, NULL, &format);
1026 /*
1027 * OV7670 does weird things if flip is set *before* format...
1028 */
1029 ret += mcam_cam_set_flip(cam);
1030 return ret;
1031 }
1032
1033 /*
1034 * Get everything ready, and start grabbing frames.
1035 */
mcam_read_setup(struct mcam_camera * cam)1036 static int mcam_read_setup(struct mcam_camera *cam)
1037 {
1038 int ret;
1039 unsigned long flags;
1040
1041 /*
1042 * Configuration. If we still don't have DMA buffers,
1043 * make one last, desperate attempt.
1044 */
1045 if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
1046 mcam_alloc_dma_bufs(cam, 0))
1047 return -ENOMEM;
1048
1049 if (mcam_needs_config(cam)) {
1050 mcam_cam_configure(cam);
1051 ret = mcam_ctlr_configure(cam);
1052 if (ret)
1053 return ret;
1054 }
1055
1056 /*
1057 * Turn it loose.
1058 */
1059 spin_lock_irqsave(&cam->dev_lock, flags);
1060 clear_bit(CF_DMA_ACTIVE, &cam->flags);
1061 mcam_reset_buffers(cam);
1062 if (cam->bus_type == V4L2_MBUS_CSI2_DPHY)
1063 mcam_enable_mipi(cam);
1064 else
1065 mcam_disable_mipi(cam);
1066 mcam_ctlr_irq_enable(cam);
1067 cam->state = S_STREAMING;
1068 if (!test_bit(CF_SG_RESTART, &cam->flags))
1069 mcam_ctlr_start(cam);
1070 spin_unlock_irqrestore(&cam->dev_lock, flags);
1071 return 0;
1072 }
1073
1074 /* ----------------------------------------------------------------------- */
1075 /*
1076 * Videobuf2 interface code.
1077 */
1078
mcam_vb_queue_setup(struct vb2_queue * vq,unsigned int * nbufs,unsigned int * num_planes,unsigned int sizes[],struct device * alloc_devs[])1079 static int mcam_vb_queue_setup(struct vb2_queue *vq,
1080 unsigned int *nbufs,
1081 unsigned int *num_planes, unsigned int sizes[],
1082 struct device *alloc_devs[])
1083 {
1084 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1085 int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
1086 unsigned size = cam->pix_format.sizeimage;
1087
1088 if (*nbufs < minbufs)
1089 *nbufs = minbufs;
1090
1091 if (*num_planes)
1092 return sizes[0] < size ? -EINVAL : 0;
1093 sizes[0] = size;
1094 *num_planes = 1; /* Someday we have to support planar formats... */
1095 return 0;
1096 }
1097
1098
mcam_vb_buf_queue(struct vb2_buffer * vb)1099 static void mcam_vb_buf_queue(struct vb2_buffer *vb)
1100 {
1101 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1102 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1103 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1104 unsigned long flags;
1105 int start;
1106
1107 spin_lock_irqsave(&cam->dev_lock, flags);
1108 start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
1109 list_add(&mvb->queue, &cam->buffers);
1110 if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
1111 mcam_sg_restart(cam);
1112 spin_unlock_irqrestore(&cam->dev_lock, flags);
1113 if (start)
1114 mcam_read_setup(cam);
1115 }
1116
mcam_vb_requeue_bufs(struct vb2_queue * vq,enum vb2_buffer_state state)1117 static void mcam_vb_requeue_bufs(struct vb2_queue *vq,
1118 enum vb2_buffer_state state)
1119 {
1120 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1121 struct mcam_vb_buffer *buf, *node;
1122 unsigned long flags;
1123 unsigned i;
1124
1125 spin_lock_irqsave(&cam->dev_lock, flags);
1126 list_for_each_entry_safe(buf, node, &cam->buffers, queue) {
1127 vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1128 list_del(&buf->queue);
1129 }
1130 for (i = 0; i < MAX_DMA_BUFS; i++) {
1131 buf = cam->vb_bufs[i];
1132
1133 if (buf) {
1134 vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1135 cam->vb_bufs[i] = NULL;
1136 }
1137 }
1138 spin_unlock_irqrestore(&cam->dev_lock, flags);
1139 }
1140
1141 /*
1142 * These need to be called with the mutex held from vb2
1143 */
mcam_vb_start_streaming(struct vb2_queue * vq,unsigned int count)1144 static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
1145 {
1146 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1147 unsigned int frame;
1148 int ret;
1149
1150 if (cam->state != S_IDLE) {
1151 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1152 return -EINVAL;
1153 }
1154 cam->frame_state.frames = 0;
1155 cam->frame_state.singles = 0;
1156 cam->frame_state.delivered = 0;
1157 cam->sequence = 0;
1158 /*
1159 * Videobuf2 sneakily hoards all the buffers and won't
1160 * give them to us until *after* streaming starts. But
1161 * we can't actually start streaming until we have a
1162 * destination. So go into a wait state and hope they
1163 * give us buffers soon.
1164 */
1165 if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
1166 cam->state = S_BUFWAIT;
1167 return 0;
1168 }
1169
1170 /*
1171 * Ensure clear the left over frame flags
1172 * before every really start streaming
1173 */
1174 for (frame = 0; frame < cam->nbufs; frame++)
1175 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1176
1177 ret = mcam_read_setup(cam);
1178 if (ret)
1179 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1180 return ret;
1181 }
1182
mcam_vb_stop_streaming(struct vb2_queue * vq)1183 static void mcam_vb_stop_streaming(struct vb2_queue *vq)
1184 {
1185 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1186
1187 cam_dbg(cam, "stop_streaming: %d frames, %d singles, %d delivered\n",
1188 cam->frame_state.frames, cam->frame_state.singles,
1189 cam->frame_state.delivered);
1190 if (cam->state == S_BUFWAIT) {
1191 /* They never gave us buffers */
1192 cam->state = S_IDLE;
1193 return;
1194 }
1195 if (cam->state != S_STREAMING)
1196 return;
1197 mcam_ctlr_stop_dma(cam);
1198 /*
1199 * VB2 reclaims the buffers, so we need to forget
1200 * about them.
1201 */
1202 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_ERROR);
1203 }
1204
1205
1206 static const struct vb2_ops mcam_vb2_ops = {
1207 .queue_setup = mcam_vb_queue_setup,
1208 .buf_queue = mcam_vb_buf_queue,
1209 .start_streaming = mcam_vb_start_streaming,
1210 .stop_streaming = mcam_vb_stop_streaming,
1211 };
1212
1213
1214 #ifdef MCAM_MODE_DMA_SG
1215 /*
1216 * Scatter/gather mode uses all of the above functions plus a
1217 * few extras to deal with DMA mapping.
1218 */
mcam_vb_sg_buf_init(struct vb2_buffer * vb)1219 static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
1220 {
1221 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1222 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1223 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1224 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1225
1226 mvb->dma_desc = dma_alloc_coherent(cam->dev,
1227 ndesc * sizeof(struct mcam_dma_desc),
1228 &mvb->dma_desc_pa, GFP_KERNEL);
1229 if (mvb->dma_desc == NULL) {
1230 cam_err(cam, "Unable to get DMA descriptor array\n");
1231 return -ENOMEM;
1232 }
1233 return 0;
1234 }
1235
mcam_vb_sg_buf_prepare(struct vb2_buffer * vb)1236 static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
1237 {
1238 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1239 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1240 struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0);
1241 struct mcam_dma_desc *desc = mvb->dma_desc;
1242 struct scatterlist *sg;
1243 int i;
1244
1245 for_each_sg(sg_table->sgl, sg, sg_table->nents, i) {
1246 desc->dma_addr = sg_dma_address(sg);
1247 desc->segment_len = sg_dma_len(sg);
1248 desc++;
1249 }
1250 return 0;
1251 }
1252
mcam_vb_sg_buf_cleanup(struct vb2_buffer * vb)1253 static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
1254 {
1255 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1256 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1257 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1258 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1259
1260 dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
1261 mvb->dma_desc, mvb->dma_desc_pa);
1262 }
1263
1264
1265 static const struct vb2_ops mcam_vb2_sg_ops = {
1266 .queue_setup = mcam_vb_queue_setup,
1267 .buf_init = mcam_vb_sg_buf_init,
1268 .buf_prepare = mcam_vb_sg_buf_prepare,
1269 .buf_queue = mcam_vb_buf_queue,
1270 .buf_cleanup = mcam_vb_sg_buf_cleanup,
1271 .start_streaming = mcam_vb_start_streaming,
1272 .stop_streaming = mcam_vb_stop_streaming,
1273 };
1274
1275 #endif /* MCAM_MODE_DMA_SG */
1276
mcam_setup_vb2(struct mcam_camera * cam)1277 static int mcam_setup_vb2(struct mcam_camera *cam)
1278 {
1279 struct vb2_queue *vq = &cam->vb_queue;
1280
1281 memset(vq, 0, sizeof(*vq));
1282 vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1283 vq->drv_priv = cam;
1284 vq->lock = &cam->s_mutex;
1285 vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
1286 vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF | VB2_READ;
1287 vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1288 vq->dev = cam->dev;
1289 INIT_LIST_HEAD(&cam->buffers);
1290 switch (cam->buffer_mode) {
1291 case B_DMA_contig:
1292 #ifdef MCAM_MODE_DMA_CONTIG
1293 vq->ops = &mcam_vb2_ops;
1294 vq->mem_ops = &vb2_dma_contig_memops;
1295 cam->dma_setup = mcam_ctlr_dma_contig;
1296 cam->frame_complete = mcam_dma_contig_done;
1297 #endif
1298 break;
1299 case B_DMA_sg:
1300 #ifdef MCAM_MODE_DMA_SG
1301 vq->ops = &mcam_vb2_sg_ops;
1302 vq->mem_ops = &vb2_dma_sg_memops;
1303 cam->dma_setup = mcam_ctlr_dma_sg;
1304 cam->frame_complete = mcam_dma_sg_done;
1305 #endif
1306 break;
1307 case B_vmalloc:
1308 #ifdef MCAM_MODE_VMALLOC
1309 INIT_WORK(&cam->s_bh_work, mcam_frame_work);
1310 vq->ops = &mcam_vb2_ops;
1311 vq->mem_ops = &vb2_vmalloc_memops;
1312 cam->dma_setup = mcam_ctlr_dma_vmalloc;
1313 cam->frame_complete = mcam_vmalloc_done;
1314 #endif
1315 break;
1316 }
1317 return vb2_queue_init(vq);
1318 }
1319
1320
1321 /* ---------------------------------------------------------------------- */
1322 /*
1323 * The long list of V4L2 ioctl() operations.
1324 */
1325
mcam_vidioc_querycap(struct file * file,void * priv,struct v4l2_capability * cap)1326 static int mcam_vidioc_querycap(struct file *file, void *priv,
1327 struct v4l2_capability *cap)
1328 {
1329 struct mcam_camera *cam = video_drvdata(file);
1330
1331 strscpy(cap->driver, "marvell_ccic", sizeof(cap->driver));
1332 strscpy(cap->card, "marvell_ccic", sizeof(cap->card));
1333 strscpy(cap->bus_info, cam->bus_info, sizeof(cap->bus_info));
1334 return 0;
1335 }
1336
1337
mcam_vidioc_enum_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_fmtdesc * fmt)1338 static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
1339 void *priv, struct v4l2_fmtdesc *fmt)
1340 {
1341 if (fmt->index >= N_MCAM_FMTS)
1342 return -EINVAL;
1343 fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
1344 return 0;
1345 }
1346
mcam_vidioc_try_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1347 static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
1348 struct v4l2_format *fmt)
1349 {
1350 struct mcam_camera *cam = video_drvdata(filp);
1351 struct mcam_format_struct *f;
1352 struct v4l2_pix_format *pix = &fmt->fmt.pix;
1353 struct v4l2_subdev_pad_config pad_cfg;
1354 struct v4l2_subdev_state pad_state = {
1355 .pads = &pad_cfg,
1356 };
1357 struct v4l2_subdev_format format = {
1358 .which = V4L2_SUBDEV_FORMAT_TRY,
1359 };
1360 int ret;
1361
1362 f = mcam_find_format(pix->pixelformat);
1363 pix->pixelformat = f->pixelformat;
1364 v4l2_fill_mbus_format(&format.format, pix, f->mbus_code);
1365 ret = sensor_call(cam, pad, set_fmt, &pad_state, &format);
1366 v4l2_fill_pix_format(pix, &format.format);
1367 pix->bytesperline = pix->width * f->bpp;
1368 switch (f->pixelformat) {
1369 case V4L2_PIX_FMT_YUV420:
1370 case V4L2_PIX_FMT_YVU420:
1371 pix->sizeimage = pix->height * pix->bytesperline * 3 / 2;
1372 break;
1373 default:
1374 pix->sizeimage = pix->height * pix->bytesperline;
1375 break;
1376 }
1377 pix->colorspace = V4L2_COLORSPACE_SRGB;
1378 return ret;
1379 }
1380
mcam_vidioc_s_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1381 static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
1382 struct v4l2_format *fmt)
1383 {
1384 struct mcam_camera *cam = video_drvdata(filp);
1385 struct mcam_format_struct *f;
1386 int ret;
1387
1388 /*
1389 * Can't do anything if the device is not idle
1390 * Also can't if there are streaming buffers in place.
1391 */
1392 if (cam->state != S_IDLE || vb2_is_busy(&cam->vb_queue))
1393 return -EBUSY;
1394
1395 f = mcam_find_format(fmt->fmt.pix.pixelformat);
1396
1397 /*
1398 * See if the formatting works in principle.
1399 */
1400 ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
1401 if (ret)
1402 return ret;
1403 /*
1404 * Now we start to change things for real, so let's do it
1405 * under lock.
1406 */
1407 cam->pix_format = fmt->fmt.pix;
1408 cam->mbus_code = f->mbus_code;
1409
1410 /*
1411 * Make sure we have appropriate DMA buffers.
1412 */
1413 if (cam->buffer_mode == B_vmalloc) {
1414 ret = mcam_check_dma_buffers(cam);
1415 if (ret)
1416 goto out;
1417 }
1418 mcam_set_config_needed(cam, 1);
1419 out:
1420 return ret;
1421 }
1422
1423 /*
1424 * Return our stored notion of how the camera is/should be configured.
1425 * The V4l2 spec wants us to be smarter, and actually get this from
1426 * the camera (and not mess with it at open time). Someday.
1427 */
mcam_vidioc_g_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * f)1428 static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
1429 struct v4l2_format *f)
1430 {
1431 struct mcam_camera *cam = video_drvdata(filp);
1432
1433 f->fmt.pix = cam->pix_format;
1434 return 0;
1435 }
1436
1437 /*
1438 * We only have one input - the sensor - so minimize the nonsense here.
1439 */
mcam_vidioc_enum_input(struct file * filp,void * priv,struct v4l2_input * input)1440 static int mcam_vidioc_enum_input(struct file *filp, void *priv,
1441 struct v4l2_input *input)
1442 {
1443 if (input->index != 0)
1444 return -EINVAL;
1445
1446 input->type = V4L2_INPUT_TYPE_CAMERA;
1447 strscpy(input->name, "Camera", sizeof(input->name));
1448 return 0;
1449 }
1450
mcam_vidioc_g_input(struct file * filp,void * priv,unsigned int * i)1451 static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
1452 {
1453 *i = 0;
1454 return 0;
1455 }
1456
mcam_vidioc_s_input(struct file * filp,void * priv,unsigned int i)1457 static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
1458 {
1459 if (i != 0)
1460 return -EINVAL;
1461 return 0;
1462 }
1463
1464 /*
1465 * G/S_PARM. Most of this is done by the sensor, but we are
1466 * the level which controls the number of read buffers.
1467 */
mcam_vidioc_g_parm(struct file * filp,void * priv,struct v4l2_streamparm * a)1468 static int mcam_vidioc_g_parm(struct file *filp, void *priv,
1469 struct v4l2_streamparm *a)
1470 {
1471 struct mcam_camera *cam = video_drvdata(filp);
1472 int ret;
1473
1474 ret = v4l2_g_parm_cap(video_devdata(filp), cam->sensor, a);
1475 a->parm.capture.readbuffers = n_dma_bufs;
1476 return ret;
1477 }
1478
mcam_vidioc_s_parm(struct file * filp,void * priv,struct v4l2_streamparm * a)1479 static int mcam_vidioc_s_parm(struct file *filp, void *priv,
1480 struct v4l2_streamparm *a)
1481 {
1482 struct mcam_camera *cam = video_drvdata(filp);
1483 int ret;
1484
1485 ret = v4l2_s_parm_cap(video_devdata(filp), cam->sensor, a);
1486 a->parm.capture.readbuffers = n_dma_bufs;
1487 return ret;
1488 }
1489
mcam_vidioc_enum_framesizes(struct file * filp,void * priv,struct v4l2_frmsizeenum * sizes)1490 static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
1491 struct v4l2_frmsizeenum *sizes)
1492 {
1493 struct mcam_camera *cam = video_drvdata(filp);
1494 struct mcam_format_struct *f;
1495 struct v4l2_subdev_frame_size_enum fse = {
1496 .index = sizes->index,
1497 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1498 };
1499 int ret;
1500
1501 f = mcam_find_format(sizes->pixel_format);
1502 if (f->pixelformat != sizes->pixel_format)
1503 return -EINVAL;
1504 fse.code = f->mbus_code;
1505 ret = sensor_call(cam, pad, enum_frame_size, NULL, &fse);
1506 if (ret)
1507 return ret;
1508 if (fse.min_width == fse.max_width &&
1509 fse.min_height == fse.max_height) {
1510 sizes->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1511 sizes->discrete.width = fse.min_width;
1512 sizes->discrete.height = fse.min_height;
1513 return 0;
1514 }
1515 sizes->type = V4L2_FRMSIZE_TYPE_CONTINUOUS;
1516 sizes->stepwise.min_width = fse.min_width;
1517 sizes->stepwise.max_width = fse.max_width;
1518 sizes->stepwise.min_height = fse.min_height;
1519 sizes->stepwise.max_height = fse.max_height;
1520 sizes->stepwise.step_width = 1;
1521 sizes->stepwise.step_height = 1;
1522 return 0;
1523 }
1524
mcam_vidioc_enum_frameintervals(struct file * filp,void * priv,struct v4l2_frmivalenum * interval)1525 static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
1526 struct v4l2_frmivalenum *interval)
1527 {
1528 struct mcam_camera *cam = video_drvdata(filp);
1529 struct mcam_format_struct *f;
1530 struct v4l2_subdev_frame_interval_enum fie = {
1531 .index = interval->index,
1532 .width = interval->width,
1533 .height = interval->height,
1534 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1535 };
1536 int ret;
1537
1538 f = mcam_find_format(interval->pixel_format);
1539 if (f->pixelformat != interval->pixel_format)
1540 return -EINVAL;
1541 fie.code = f->mbus_code;
1542 ret = sensor_call(cam, pad, enum_frame_interval, NULL, &fie);
1543 if (ret)
1544 return ret;
1545 interval->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1546 interval->discrete = fie.interval;
1547 return 0;
1548 }
1549
1550 #ifdef CONFIG_VIDEO_ADV_DEBUG
mcam_vidioc_g_register(struct file * file,void * priv,struct v4l2_dbg_register * reg)1551 static int mcam_vidioc_g_register(struct file *file, void *priv,
1552 struct v4l2_dbg_register *reg)
1553 {
1554 struct mcam_camera *cam = video_drvdata(file);
1555
1556 if (reg->reg > cam->regs_size - 4)
1557 return -EINVAL;
1558 reg->val = mcam_reg_read(cam, reg->reg);
1559 reg->size = 4;
1560 return 0;
1561 }
1562
mcam_vidioc_s_register(struct file * file,void * priv,const struct v4l2_dbg_register * reg)1563 static int mcam_vidioc_s_register(struct file *file, void *priv,
1564 const struct v4l2_dbg_register *reg)
1565 {
1566 struct mcam_camera *cam = video_drvdata(file);
1567
1568 if (reg->reg > cam->regs_size - 4)
1569 return -EINVAL;
1570 mcam_reg_write(cam, reg->reg, reg->val);
1571 return 0;
1572 }
1573 #endif
1574
1575 static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
1576 .vidioc_querycap = mcam_vidioc_querycap,
1577 .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
1578 .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
1579 .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
1580 .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
1581 .vidioc_enum_input = mcam_vidioc_enum_input,
1582 .vidioc_g_input = mcam_vidioc_g_input,
1583 .vidioc_s_input = mcam_vidioc_s_input,
1584 .vidioc_reqbufs = vb2_ioctl_reqbufs,
1585 .vidioc_create_bufs = vb2_ioctl_create_bufs,
1586 .vidioc_querybuf = vb2_ioctl_querybuf,
1587 .vidioc_qbuf = vb2_ioctl_qbuf,
1588 .vidioc_dqbuf = vb2_ioctl_dqbuf,
1589 .vidioc_expbuf = vb2_ioctl_expbuf,
1590 .vidioc_streamon = vb2_ioctl_streamon,
1591 .vidioc_streamoff = vb2_ioctl_streamoff,
1592 .vidioc_g_parm = mcam_vidioc_g_parm,
1593 .vidioc_s_parm = mcam_vidioc_s_parm,
1594 .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
1595 .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
1596 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
1597 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1598 #ifdef CONFIG_VIDEO_ADV_DEBUG
1599 .vidioc_g_register = mcam_vidioc_g_register,
1600 .vidioc_s_register = mcam_vidioc_s_register,
1601 #endif
1602 };
1603
1604 /* ---------------------------------------------------------------------- */
1605 /*
1606 * Our various file operations.
1607 */
mcam_v4l_open(struct file * filp)1608 static int mcam_v4l_open(struct file *filp)
1609 {
1610 struct mcam_camera *cam = video_drvdata(filp);
1611 int ret;
1612
1613 mutex_lock(&cam->s_mutex);
1614 ret = v4l2_fh_open(filp);
1615 if (ret)
1616 goto out;
1617 if (v4l2_fh_is_singular_file(filp)) {
1618 ret = sensor_call(cam, core, s_power, 1);
1619 if (ret)
1620 goto out;
1621 ret = pm_runtime_resume_and_get(cam->dev);
1622 if (ret < 0)
1623 goto out;
1624 __mcam_cam_reset(cam);
1625 mcam_set_config_needed(cam, 1);
1626 }
1627 out:
1628 mutex_unlock(&cam->s_mutex);
1629 if (ret)
1630 v4l2_fh_release(filp);
1631 return ret;
1632 }
1633
1634
mcam_v4l_release(struct file * filp)1635 static int mcam_v4l_release(struct file *filp)
1636 {
1637 struct mcam_camera *cam = video_drvdata(filp);
1638 bool last_open;
1639
1640 mutex_lock(&cam->s_mutex);
1641 last_open = v4l2_fh_is_singular_file(filp);
1642 _vb2_fop_release(filp, NULL);
1643 if (last_open) {
1644 mcam_disable_mipi(cam);
1645 sensor_call(cam, core, s_power, 0);
1646 pm_runtime_put(cam->dev);
1647 if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
1648 mcam_free_dma_bufs(cam);
1649 }
1650
1651 mutex_unlock(&cam->s_mutex);
1652 return 0;
1653 }
1654
1655 static const struct v4l2_file_operations mcam_v4l_fops = {
1656 .owner = THIS_MODULE,
1657 .open = mcam_v4l_open,
1658 .release = mcam_v4l_release,
1659 .read = vb2_fop_read,
1660 .poll = vb2_fop_poll,
1661 .mmap = vb2_fop_mmap,
1662 .unlocked_ioctl = video_ioctl2,
1663 };
1664
1665
1666 /*
1667 * This template device holds all of those v4l2 methods; we
1668 * clone it for specific real devices.
1669 */
1670 static const struct video_device mcam_v4l_template = {
1671 .name = "mcam",
1672 .fops = &mcam_v4l_fops,
1673 .ioctl_ops = &mcam_v4l_ioctl_ops,
1674 .release = video_device_release_empty,
1675 .device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_READWRITE |
1676 V4L2_CAP_STREAMING,
1677 };
1678
1679 /* ---------------------------------------------------------------------- */
1680 /*
1681 * Interrupt handler stuff
1682 */
mcam_frame_complete(struct mcam_camera * cam,int frame)1683 static void mcam_frame_complete(struct mcam_camera *cam, int frame)
1684 {
1685 /*
1686 * Basic frame housekeeping.
1687 */
1688 set_bit(frame, &cam->flags);
1689 clear_bit(CF_DMA_ACTIVE, &cam->flags);
1690 cam->next_buf = frame;
1691 cam->buf_seq[frame] = cam->sequence++;
1692 cam->frame_state.frames++;
1693 /*
1694 * "This should never happen"
1695 */
1696 if (cam->state != S_STREAMING)
1697 return;
1698 /*
1699 * Process the frame and set up the next one.
1700 */
1701 cam->frame_complete(cam, frame);
1702 }
1703
1704
1705 /*
1706 * The interrupt handler; this needs to be called from the
1707 * platform irq handler with the lock held.
1708 */
mccic_irq(struct mcam_camera * cam,unsigned int irqs)1709 int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
1710 {
1711 unsigned int frame, handled = 0;
1712
1713 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
1714 /*
1715 * Handle any frame completions. There really should
1716 * not be more than one of these, or we have fallen
1717 * far behind.
1718 *
1719 * When running in S/G mode, the frame number lacks any
1720 * real meaning - there's only one descriptor array - but
1721 * the controller still picks a different one to signal
1722 * each time.
1723 */
1724 for (frame = 0; frame < cam->nbufs; frame++)
1725 if (irqs & (IRQ_EOF0 << frame) &&
1726 test_bit(CF_FRAME_SOF0 + frame, &cam->flags)) {
1727 mcam_frame_complete(cam, frame);
1728 handled = 1;
1729 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1730 if (cam->buffer_mode == B_DMA_sg)
1731 break;
1732 }
1733 /*
1734 * If a frame starts, note that we have DMA active. This
1735 * code assumes that we won't get multiple frame interrupts
1736 * at once; may want to rethink that.
1737 */
1738 for (frame = 0; frame < cam->nbufs; frame++) {
1739 if (irqs & (IRQ_SOF0 << frame)) {
1740 set_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1741 handled = IRQ_HANDLED;
1742 }
1743 }
1744
1745 if (handled == IRQ_HANDLED) {
1746 set_bit(CF_DMA_ACTIVE, &cam->flags);
1747 if (cam->buffer_mode == B_DMA_sg)
1748 mcam_ctlr_stop(cam);
1749 }
1750 return handled;
1751 }
1752 EXPORT_SYMBOL_GPL(mccic_irq);
1753
1754 /* ---------------------------------------------------------------------- */
1755 /*
1756 * Registration and such.
1757 */
1758
mccic_notify_bound(struct v4l2_async_notifier * notifier,struct v4l2_subdev * subdev,struct v4l2_async_connection * asd)1759 static int mccic_notify_bound(struct v4l2_async_notifier *notifier,
1760 struct v4l2_subdev *subdev, struct v4l2_async_connection *asd)
1761 {
1762 struct mcam_camera *cam = notifier_to_mcam(notifier);
1763 int ret;
1764
1765 mutex_lock(&cam->s_mutex);
1766 if (cam->sensor) {
1767 cam_err(cam, "sensor already bound\n");
1768 ret = -EBUSY;
1769 goto out;
1770 }
1771
1772 v4l2_set_subdev_hostdata(subdev, cam);
1773 cam->sensor = subdev;
1774
1775 ret = mcam_cam_init(cam);
1776 if (ret) {
1777 cam->sensor = NULL;
1778 goto out;
1779 }
1780
1781 ret = mcam_setup_vb2(cam);
1782 if (ret) {
1783 cam->sensor = NULL;
1784 goto out;
1785 }
1786
1787 cam->vdev = mcam_v4l_template;
1788 cam->vdev.v4l2_dev = &cam->v4l2_dev;
1789 cam->vdev.lock = &cam->s_mutex;
1790 cam->vdev.queue = &cam->vb_queue;
1791 video_set_drvdata(&cam->vdev, cam);
1792 ret = video_register_device(&cam->vdev, VFL_TYPE_VIDEO, -1);
1793 if (ret) {
1794 cam->sensor = NULL;
1795 goto out;
1796 }
1797
1798 cam_dbg(cam, "sensor %s bound\n", subdev->name);
1799 out:
1800 mutex_unlock(&cam->s_mutex);
1801 return ret;
1802 }
1803
mccic_notify_unbind(struct v4l2_async_notifier * notifier,struct v4l2_subdev * subdev,struct v4l2_async_connection * asd)1804 static void mccic_notify_unbind(struct v4l2_async_notifier *notifier,
1805 struct v4l2_subdev *subdev, struct v4l2_async_connection *asd)
1806 {
1807 struct mcam_camera *cam = notifier_to_mcam(notifier);
1808
1809 mutex_lock(&cam->s_mutex);
1810 if (cam->sensor != subdev) {
1811 cam_err(cam, "sensor %s not bound\n", subdev->name);
1812 goto out;
1813 }
1814
1815 video_unregister_device(&cam->vdev);
1816 cam->sensor = NULL;
1817 cam_dbg(cam, "sensor %s unbound\n", subdev->name);
1818
1819 out:
1820 mutex_unlock(&cam->s_mutex);
1821 }
1822
mccic_notify_complete(struct v4l2_async_notifier * notifier)1823 static int mccic_notify_complete(struct v4l2_async_notifier *notifier)
1824 {
1825 struct mcam_camera *cam = notifier_to_mcam(notifier);
1826 int ret;
1827
1828 /*
1829 * Get the v4l2 setup done.
1830 */
1831 ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
1832 if (!ret)
1833 cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
1834
1835 return ret;
1836 }
1837
1838 static const struct v4l2_async_notifier_operations mccic_notify_ops = {
1839 .bound = mccic_notify_bound,
1840 .unbind = mccic_notify_unbind,
1841 .complete = mccic_notify_complete,
1842 };
1843
mccic_register(struct mcam_camera * cam)1844 int mccic_register(struct mcam_camera *cam)
1845 {
1846 struct clk_init_data mclk_init = { };
1847 int ret;
1848
1849 /*
1850 * Validate the requested buffer mode.
1851 */
1852 if (buffer_mode >= 0)
1853 cam->buffer_mode = buffer_mode;
1854 if (cam->buffer_mode == B_DMA_sg &&
1855 cam->chip_id == MCAM_CAFE) {
1856 printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, attempting vmalloc mode instead\n");
1857 cam->buffer_mode = B_vmalloc;
1858 }
1859
1860 if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
1861 printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
1862 cam->buffer_mode);
1863 ret = -EINVAL;
1864 goto out;
1865 }
1866
1867 mutex_init(&cam->s_mutex);
1868 cam->state = S_NOTREADY;
1869 mcam_set_config_needed(cam, 1);
1870 cam->pix_format = mcam_def_pix_format;
1871 cam->mbus_code = mcam_def_mbus_code;
1872
1873 cam->notifier.ops = &mccic_notify_ops;
1874 ret = v4l2_async_nf_register(&cam->notifier);
1875 if (ret < 0) {
1876 cam_warn(cam, "failed to register a sensor notifier");
1877 goto out;
1878 }
1879
1880 /*
1881 * Register sensor master clock.
1882 */
1883 mclk_init.parent_names = NULL;
1884 mclk_init.num_parents = 0;
1885 mclk_init.ops = &mclk_ops;
1886 mclk_init.name = "mclk";
1887
1888 of_property_read_string(cam->dev->of_node, "clock-output-names",
1889 &mclk_init.name);
1890
1891 cam->mclk_hw.init = &mclk_init;
1892
1893 cam->mclk = devm_clk_register(cam->dev, &cam->mclk_hw);
1894 if (IS_ERR(cam->mclk)) {
1895 ret = PTR_ERR(cam->mclk);
1896 dev_err(cam->dev, "can't register clock\n");
1897 goto out;
1898 }
1899
1900 /*
1901 * If so requested, try to get our DMA buffers now.
1902 */
1903 if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
1904 if (mcam_alloc_dma_bufs(cam, 1))
1905 cam_warn(cam, "Unable to alloc DMA buffers at load will try again later.");
1906 }
1907
1908 return 0;
1909
1910 out:
1911 v4l2_async_nf_unregister(&cam->notifier);
1912 v4l2_async_nf_cleanup(&cam->notifier);
1913 return ret;
1914 }
1915 EXPORT_SYMBOL_GPL(mccic_register);
1916
mccic_shutdown(struct mcam_camera * cam)1917 void mccic_shutdown(struct mcam_camera *cam)
1918 {
1919 /*
1920 * If we have no users (and we really, really should have no
1921 * users) the device will already be powered down. Trying to
1922 * take it down again will wedge the machine, which is frowned
1923 * upon.
1924 */
1925 if (!list_empty(&cam->vdev.fh_list)) {
1926 cam_warn(cam, "Removing a device with users!\n");
1927 sensor_call(cam, core, s_power, 0);
1928 }
1929 if (cam->buffer_mode == B_vmalloc)
1930 mcam_free_dma_bufs(cam);
1931 v4l2_ctrl_handler_free(&cam->ctrl_handler);
1932 v4l2_async_nf_unregister(&cam->notifier);
1933 v4l2_async_nf_cleanup(&cam->notifier);
1934 }
1935 EXPORT_SYMBOL_GPL(mccic_shutdown);
1936
1937 /*
1938 * Power management
1939 */
mccic_suspend(struct mcam_camera * cam)1940 void mccic_suspend(struct mcam_camera *cam)
1941 {
1942 mutex_lock(&cam->s_mutex);
1943 if (!list_empty(&cam->vdev.fh_list)) {
1944 enum mcam_state cstate = cam->state;
1945
1946 mcam_ctlr_stop_dma(cam);
1947 sensor_call(cam, core, s_power, 0);
1948 cam->state = cstate;
1949 }
1950 mutex_unlock(&cam->s_mutex);
1951 }
1952 EXPORT_SYMBOL_GPL(mccic_suspend);
1953
mccic_resume(struct mcam_camera * cam)1954 int mccic_resume(struct mcam_camera *cam)
1955 {
1956 int ret = 0;
1957
1958 mutex_lock(&cam->s_mutex);
1959 if (!list_empty(&cam->vdev.fh_list)) {
1960 ret = sensor_call(cam, core, s_power, 1);
1961 if (ret) {
1962 mutex_unlock(&cam->s_mutex);
1963 return ret;
1964 }
1965 __mcam_cam_reset(cam);
1966 } else {
1967 sensor_call(cam, core, s_power, 0);
1968 }
1969 mutex_unlock(&cam->s_mutex);
1970
1971 set_bit(CF_CONFIG_NEEDED, &cam->flags);
1972 if (cam->state == S_STREAMING) {
1973 /*
1974 * If there was a buffer in the DMA engine at suspend
1975 * time, put it back on the queue or we'll forget about it.
1976 */
1977 if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
1978 list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
1979 ret = mcam_read_setup(cam);
1980 }
1981 return ret;
1982 }
1983 EXPORT_SYMBOL_GPL(mccic_resume);
1984
1985 MODULE_DESCRIPTION("Marvell camera core driver");
1986 MODULE_LICENSE("GPL v2");
1987 MODULE_AUTHOR("Jonathan Corbet <[email protected]>");
1988