1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Rockchip Video Decoder VP9 backend
4  *
5  * Copyright (C) 2019 Collabora, Ltd.
6  *	Boris Brezillon <boris.brezillon@collabora.com>
7  * Copyright (C) 2021 Collabora, Ltd.
8  *	Andrzej Pietrasiewicz <andrzej.p@collabora.com>
9  *
10  * Copyright (C) 2016 Rockchip Electronics Co., Ltd.
11  *	Alpha Lin <Alpha.Lin@rock-chips.com>
12  */
13 
14 /*
15  * For following the vp9 spec please start reading this driver
16  * code from rkvdec_vp9_run() followed by rkvdec_vp9_done().
17  */
18 
19 #include <linux/kernel.h>
20 #include <linux/vmalloc.h>
21 #include <media/v4l2-mem2mem.h>
22 #include <media/v4l2-vp9.h>
23 
24 #include "rkvdec.h"
25 #include "rkvdec-regs.h"
26 
27 #define RKVDEC_VP9_PROBE_SIZE		4864
28 #define RKVDEC_VP9_COUNT_SIZE		13232
29 #define RKVDEC_VP9_MAX_SEGMAP_SIZE	73728
30 
31 struct rkvdec_vp9_intra_mode_probs {
32 	u8 y_mode[105];
33 	u8 uv_mode[23];
34 };
35 
36 struct rkvdec_vp9_intra_only_frame_probs {
37 	u8 coef_intra[4][2][128];
38 	struct rkvdec_vp9_intra_mode_probs intra_mode[10];
39 };
40 
41 struct rkvdec_vp9_inter_frame_probs {
42 	u8 y_mode[4][9];
43 	u8 comp_mode[5];
44 	u8 comp_ref[5];
45 	u8 single_ref[5][2];
46 	u8 inter_mode[7][3];
47 	u8 interp_filter[4][2];
48 	u8 padding0[11];
49 	u8 coef[2][4][2][128];
50 	u8 uv_mode_0_2[3][9];
51 	u8 padding1[5];
52 	u8 uv_mode_3_5[3][9];
53 	u8 padding2[5];
54 	u8 uv_mode_6_8[3][9];
55 	u8 padding3[5];
56 	u8 uv_mode_9[9];
57 	u8 padding4[7];
58 	u8 padding5[16];
59 	struct {
60 		u8 joint[3];
61 		u8 sign[2];
62 		u8 classes[2][10];
63 		u8 class0_bit[2];
64 		u8 bits[2][10];
65 		u8 class0_fr[2][2][3];
66 		u8 fr[2][3];
67 		u8 class0_hp[2];
68 		u8 hp[2];
69 	} mv;
70 };
71 
72 struct rkvdec_vp9_probs {
73 	u8 partition[16][3];
74 	u8 pred[3];
75 	u8 tree[7];
76 	u8 skip[3];
77 	u8 tx32[2][3];
78 	u8 tx16[2][2];
79 	u8 tx8[2][1];
80 	u8 is_inter[4];
81 	/* 128 bit alignment */
82 	u8 padding0[3];
83 	union {
84 		struct rkvdec_vp9_inter_frame_probs inter;
85 		struct rkvdec_vp9_intra_only_frame_probs intra_only;
86 	};
87 };
88 
89 /* Data structure describing auxiliary buffer format. */
90 struct rkvdec_vp9_priv_tbl {
91 	struct rkvdec_vp9_probs probs;
92 	u8 segmap[2][RKVDEC_VP9_MAX_SEGMAP_SIZE];
93 };
94 
95 struct rkvdec_vp9_refs_counts {
96 	u32 eob[2];
97 	u32 coeff[3];
98 };
99 
100 struct rkvdec_vp9_inter_frame_symbol_counts {
101 	u32 partition[16][4];
102 	u32 skip[3][2];
103 	u32 inter[4][2];
104 	u32 tx32p[2][4];
105 	u32 tx16p[2][4];
106 	u32 tx8p[2][2];
107 	u32 y_mode[4][10];
108 	u32 uv_mode[10][10];
109 	u32 comp[5][2];
110 	u32 comp_ref[5][2];
111 	u32 single_ref[5][2][2];
112 	u32 mv_mode[7][4];
113 	u32 filter[4][3];
114 	u32 mv_joint[4];
115 	u32 sign[2][2];
116 	/* add 1 element for align */
117 	u32 classes[2][11 + 1];
118 	u32 class0[2][2];
119 	u32 bits[2][10][2];
120 	u32 class0_fp[2][2][4];
121 	u32 fp[2][4];
122 	u32 class0_hp[2][2];
123 	u32 hp[2][2];
124 	struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
125 };
126 
127 struct rkvdec_vp9_intra_frame_symbol_counts {
128 	u32 partition[4][4][4];
129 	u32 skip[3][2];
130 	u32 intra[4][2];
131 	u32 tx32p[2][4];
132 	u32 tx16p[2][4];
133 	u32 tx8p[2][2];
134 	struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
135 };
136 
137 struct rkvdec_vp9_run {
138 	struct rkvdec_run base;
139 	const struct v4l2_ctrl_vp9_frame *decode_params;
140 };
141 
142 struct rkvdec_vp9_frame_info {
143 	u32 valid : 1;
144 	u32 segmapid : 1;
145 	u32 frame_context_idx : 2;
146 	u32 reference_mode : 2;
147 	u32 tx_mode : 3;
148 	u32 interpolation_filter : 3;
149 	u32 flags;
150 	u64 timestamp;
151 	struct v4l2_vp9_segmentation seg;
152 	struct v4l2_vp9_loop_filter lf;
153 };
154 
155 struct rkvdec_vp9_ctx {
156 	struct rkvdec_aux_buf priv_tbl;
157 	struct rkvdec_aux_buf count_tbl;
158 	struct v4l2_vp9_frame_symbol_counts inter_cnts;
159 	struct v4l2_vp9_frame_symbol_counts intra_cnts;
160 	struct v4l2_vp9_frame_context probability_tables;
161 	struct v4l2_vp9_frame_context frame_context[4];
162 	struct rkvdec_vp9_frame_info cur;
163 	struct rkvdec_vp9_frame_info last;
164 };
165 
166 static void write_coeff_plane(const u8 coef[6][6][3], u8 *coeff_plane)
167 {
168 	unsigned int idx = 0, byte_count = 0;
169 	int k, m, n;
170 	u8 p;
171 
172 	for (k = 0; k < 6; k++) {
173 		for (m = 0; m < 6; m++) {
174 			for (n = 0; n < 3; n++) {
175 				p = coef[k][m][n];
176 				coeff_plane[idx++] = p;
177 				byte_count++;
178 				if (byte_count == 27) {
179 					idx += 5;
180 					byte_count = 0;
181 				}
182 			}
183 		}
184 	}
185 }
186 
187 static void init_intra_only_probs(struct rkvdec_ctx *ctx,
188 				  const struct rkvdec_vp9_run *run)
189 {
190 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
191 	struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
192 	struct rkvdec_vp9_intra_only_frame_probs *rkprobs;
193 	const struct v4l2_vp9_frame_context *probs;
194 	unsigned int i, j, k;
195 
196 	rkprobs = &tbl->probs.intra_only;
197 	probs = &vp9_ctx->probability_tables;
198 
199 	/*
200 	 * intra only 149 x 128 bits ,aligned to 152 x 128 bits coeff related
201 	 * prob 64 x 128 bits
202 	 */
203 	for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
204 		for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++)
205 			write_coeff_plane(probs->coef[i][j][0],
206 					  rkprobs->coef_intra[i][j]);
207 	}
208 
209 	/* intra mode prob  80 x 128 bits */
210 	for (i = 0; i < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob); i++) {
211 		unsigned int byte_count = 0;
212 		int idx = 0;
213 
214 		/* vp9_kf_y_mode_prob */
215 		for (j = 0; j < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0]); j++) {
216 			for (k = 0; k < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0][0]);
217 			     k++) {
218 				u8 val = v4l2_vp9_kf_y_mode_prob[i][j][k];
219 
220 				rkprobs->intra_mode[i].y_mode[idx++] = val;
221 				byte_count++;
222 				if (byte_count == 27) {
223 					byte_count = 0;
224 					idx += 5;
225 				}
226 			}
227 		}
228 
229 	}
230 
231 	for (i = 0; i < sizeof(v4l2_vp9_kf_uv_mode_prob); ++i) {
232 		const u8 *ptr = (const u8 *)v4l2_vp9_kf_uv_mode_prob;
233 
234 		rkprobs->intra_mode[i / 23].uv_mode[i % 23] = ptr[i];
235 	}
236 }
237 
238 static void init_inter_probs(struct rkvdec_ctx *ctx,
239 			     const struct rkvdec_vp9_run *run)
240 {
241 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
242 	struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
243 	struct rkvdec_vp9_inter_frame_probs *rkprobs;
244 	const struct v4l2_vp9_frame_context *probs;
245 	unsigned int i, j, k;
246 
247 	rkprobs = &tbl->probs.inter;
248 	probs = &vp9_ctx->probability_tables;
249 
250 	/*
251 	 * inter probs
252 	 * 151 x 128 bits, aligned to 152 x 128 bits
253 	 * inter only
254 	 * intra_y_mode & inter_block info 6 x 128 bits
255 	 */
256 
257 	memcpy(rkprobs->y_mode, probs->y_mode, sizeof(rkprobs->y_mode));
258 	memcpy(rkprobs->comp_mode, probs->comp_mode,
259 	       sizeof(rkprobs->comp_mode));
260 	memcpy(rkprobs->comp_ref, probs->comp_ref,
261 	       sizeof(rkprobs->comp_ref));
262 	memcpy(rkprobs->single_ref, probs->single_ref,
263 	       sizeof(rkprobs->single_ref));
264 	memcpy(rkprobs->inter_mode, probs->inter_mode,
265 	       sizeof(rkprobs->inter_mode));
266 	memcpy(rkprobs->interp_filter, probs->interp_filter,
267 	       sizeof(rkprobs->interp_filter));
268 
269 	/* 128 x 128 bits coeff related */
270 	for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
271 		for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++) {
272 			for (k = 0; k < ARRAY_SIZE(probs->coef[0][0]); k++)
273 				write_coeff_plane(probs->coef[i][j][k],
274 						  rkprobs->coef[k][i][j]);
275 		}
276 	}
277 
278 	/* intra uv mode 6 x 128 */
279 	memcpy(rkprobs->uv_mode_0_2, &probs->uv_mode[0],
280 	       sizeof(rkprobs->uv_mode_0_2));
281 	memcpy(rkprobs->uv_mode_3_5, &probs->uv_mode[3],
282 	       sizeof(rkprobs->uv_mode_3_5));
283 	memcpy(rkprobs->uv_mode_6_8, &probs->uv_mode[6],
284 	       sizeof(rkprobs->uv_mode_6_8));
285 	memcpy(rkprobs->uv_mode_9, &probs->uv_mode[9],
286 	       sizeof(rkprobs->uv_mode_9));
287 
288 	/* mv related 6 x 128 */
289 	memcpy(rkprobs->mv.joint, probs->mv.joint,
290 	       sizeof(rkprobs->mv.joint));
291 	memcpy(rkprobs->mv.sign, probs->mv.sign,
292 	       sizeof(rkprobs->mv.sign));
293 	memcpy(rkprobs->mv.classes, probs->mv.classes,
294 	       sizeof(rkprobs->mv.classes));
295 	memcpy(rkprobs->mv.class0_bit, probs->mv.class0_bit,
296 	       sizeof(rkprobs->mv.class0_bit));
297 	memcpy(rkprobs->mv.bits, probs->mv.bits,
298 	       sizeof(rkprobs->mv.bits));
299 	memcpy(rkprobs->mv.class0_fr, probs->mv.class0_fr,
300 	       sizeof(rkprobs->mv.class0_fr));
301 	memcpy(rkprobs->mv.fr, probs->mv.fr,
302 	       sizeof(rkprobs->mv.fr));
303 	memcpy(rkprobs->mv.class0_hp, probs->mv.class0_hp,
304 	       sizeof(rkprobs->mv.class0_hp));
305 	memcpy(rkprobs->mv.hp, probs->mv.hp,
306 	       sizeof(rkprobs->mv.hp));
307 }
308 
309 static void init_probs(struct rkvdec_ctx *ctx,
310 		       const struct rkvdec_vp9_run *run)
311 {
312 	const struct v4l2_ctrl_vp9_frame *dec_params;
313 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
314 	struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
315 	struct rkvdec_vp9_probs *rkprobs = &tbl->probs;
316 	const struct v4l2_vp9_segmentation *seg;
317 	const struct v4l2_vp9_frame_context *probs;
318 	bool intra_only;
319 
320 	dec_params = run->decode_params;
321 	probs = &vp9_ctx->probability_tables;
322 	seg = &dec_params->seg;
323 
324 	memset(rkprobs, 0, sizeof(*rkprobs));
325 
326 	intra_only = !!(dec_params->flags &
327 			(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
328 			 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
329 
330 	/* sb info  5 x 128 bit */
331 	memcpy(rkprobs->partition,
332 	       intra_only ? v4l2_vp9_kf_partition_probs : probs->partition,
333 	       sizeof(rkprobs->partition));
334 
335 	memcpy(rkprobs->pred, seg->pred_probs, sizeof(rkprobs->pred));
336 	memcpy(rkprobs->tree, seg->tree_probs, sizeof(rkprobs->tree));
337 	memcpy(rkprobs->skip, probs->skip, sizeof(rkprobs->skip));
338 	memcpy(rkprobs->tx32, probs->tx32, sizeof(rkprobs->tx32));
339 	memcpy(rkprobs->tx16, probs->tx16, sizeof(rkprobs->tx16));
340 	memcpy(rkprobs->tx8, probs->tx8, sizeof(rkprobs->tx8));
341 	memcpy(rkprobs->is_inter, probs->is_inter, sizeof(rkprobs->is_inter));
342 
343 	if (intra_only)
344 		init_intra_only_probs(ctx, run);
345 	else
346 		init_inter_probs(ctx, run);
347 }
348 
349 struct rkvdec_vp9_ref_reg {
350 	u32 reg_frm_size;
351 	u32 reg_hor_stride;
352 	u32 reg_y_stride;
353 	u32 reg_yuv_stride;
354 	u32 reg_ref_base;
355 };
356 
357 static struct rkvdec_vp9_ref_reg ref_regs[] = {
358 	{
359 		.reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(0),
360 		.reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(0),
361 		.reg_y_stride = RKVDEC_VP9_LAST_FRAME_YSTRIDE,
362 		.reg_yuv_stride = RKVDEC_VP9_LAST_FRAME_YUVSTRIDE,
363 		.reg_ref_base = RKVDEC_REG_VP9_LAST_FRAME_BASE,
364 	},
365 	{
366 		.reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(1),
367 		.reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(1),
368 		.reg_y_stride = RKVDEC_VP9_GOLDEN_FRAME_YSTRIDE,
369 		.reg_yuv_stride = 0,
370 		.reg_ref_base = RKVDEC_REG_VP9_GOLDEN_FRAME_BASE,
371 	},
372 	{
373 		.reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(2),
374 		.reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(2),
375 		.reg_y_stride = RKVDEC_VP9_ALTREF_FRAME_YSTRIDE,
376 		.reg_yuv_stride = 0,
377 		.reg_ref_base = RKVDEC_REG_VP9_ALTREF_FRAME_BASE,
378 	}
379 };
380 
381 static struct rkvdec_decoded_buffer *
382 get_ref_buf(struct rkvdec_ctx *ctx, struct vb2_v4l2_buffer *dst, u64 timestamp)
383 {
384 	struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
385 	struct vb2_queue *cap_q = &m2m_ctx->cap_q_ctx.q;
386 	struct vb2_buffer *buf;
387 
388 	/*
389 	 * If a ref is unused or invalid, address of current destination
390 	 * buffer is returned.
391 	 */
392 	buf = vb2_find_buffer(cap_q, timestamp);
393 	if (!buf)
394 		buf = &dst->vb2_buf;
395 
396 	return vb2_to_rkvdec_decoded_buf(buf);
397 }
398 
399 static dma_addr_t get_mv_base_addr(struct rkvdec_decoded_buffer *buf)
400 {
401 	unsigned int aligned_pitch, aligned_height, yuv_len;
402 
403 	aligned_height = round_up(buf->vp9.height, 64);
404 	aligned_pitch = round_up(buf->vp9.width * buf->vp9.bit_depth, 512) / 8;
405 	yuv_len = (aligned_height * aligned_pitch * 3) / 2;
406 
407 	return vb2_dma_contig_plane_dma_addr(&buf->base.vb.vb2_buf, 0) +
408 	       yuv_len;
409 }
410 
411 static void config_ref_registers(struct rkvdec_ctx *ctx,
412 				 const struct rkvdec_vp9_run *run,
413 				 struct rkvdec_decoded_buffer *ref_buf,
414 				 struct rkvdec_vp9_ref_reg *ref_reg)
415 {
416 	unsigned int aligned_pitch, aligned_height, y_len, yuv_len;
417 	struct rkvdec_dev *rkvdec = ctx->dev;
418 
419 	aligned_height = round_up(ref_buf->vp9.height, 64);
420 	writel_relaxed(RKVDEC_VP9_FRAMEWIDTH(ref_buf->vp9.width) |
421 		       RKVDEC_VP9_FRAMEHEIGHT(ref_buf->vp9.height),
422 		       rkvdec->regs + ref_reg->reg_frm_size);
423 
424 	writel_relaxed(vb2_dma_contig_plane_dma_addr(&ref_buf->base.vb.vb2_buf, 0),
425 		       rkvdec->regs + ref_reg->reg_ref_base);
426 
427 	if (&ref_buf->base.vb == run->base.bufs.dst)
428 		return;
429 
430 	aligned_pitch = round_up(ref_buf->vp9.width * ref_buf->vp9.bit_depth, 512) / 8;
431 	y_len = aligned_height * aligned_pitch;
432 	yuv_len = (y_len * 3) / 2;
433 
434 	writel_relaxed(RKVDEC_HOR_Y_VIRSTRIDE(aligned_pitch / 16) |
435 		       RKVDEC_HOR_UV_VIRSTRIDE(aligned_pitch / 16),
436 		       rkvdec->regs + ref_reg->reg_hor_stride);
437 	writel_relaxed(RKVDEC_VP9_REF_YSTRIDE(y_len / 16),
438 		       rkvdec->regs + ref_reg->reg_y_stride);
439 
440 	if (!ref_reg->reg_yuv_stride)
441 		return;
442 
443 	writel_relaxed(RKVDEC_VP9_REF_YUVSTRIDE(yuv_len / 16),
444 		       rkvdec->regs + ref_reg->reg_yuv_stride);
445 }
446 
447 static void config_seg_registers(struct rkvdec_ctx *ctx, unsigned int segid)
448 {
449 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
450 	const struct v4l2_vp9_segmentation *seg;
451 	struct rkvdec_dev *rkvdec = ctx->dev;
452 	s16 feature_val;
453 	int feature_id;
454 	u32 val = 0;
455 
456 	seg = vp9_ctx->last.valid ? &vp9_ctx->last.seg : &vp9_ctx->cur.seg;
457 	feature_id = V4L2_VP9_SEG_LVL_ALT_Q;
458 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
459 		feature_val = seg->feature_data[segid][feature_id];
460 		val |= RKVDEC_SEGID_FRAME_QP_DELTA_EN(1) |
461 		       RKVDEC_SEGID_FRAME_QP_DELTA(feature_val);
462 	}
463 
464 	feature_id = V4L2_VP9_SEG_LVL_ALT_L;
465 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
466 		feature_val = seg->feature_data[segid][feature_id];
467 		val |= RKVDEC_SEGID_FRAME_LOOPFILTER_VALUE_EN(1) |
468 		       RKVDEC_SEGID_FRAME_LOOPFILTER_VALUE(feature_val);
469 	}
470 
471 	feature_id = V4L2_VP9_SEG_LVL_REF_FRAME;
472 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
473 		feature_val = seg->feature_data[segid][feature_id];
474 		val |= RKVDEC_SEGID_REFERINFO_EN(1) |
475 		       RKVDEC_SEGID_REFERINFO(feature_val);
476 	}
477 
478 	feature_id = V4L2_VP9_SEG_LVL_SKIP;
479 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid))
480 		val |= RKVDEC_SEGID_FRAME_SKIP_EN(1);
481 
482 	if (!segid &&
483 	    (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE))
484 		val |= RKVDEC_SEGID_ABS_DELTA(1);
485 
486 	writel_relaxed(val, rkvdec->regs + RKVDEC_VP9_SEGID_GRP(segid));
487 }
488 
489 static void update_dec_buf_info(struct rkvdec_decoded_buffer *buf,
490 				const struct v4l2_ctrl_vp9_frame *dec_params)
491 {
492 	buf->vp9.width = dec_params->frame_width_minus_1 + 1;
493 	buf->vp9.height = dec_params->frame_height_minus_1 + 1;
494 	buf->vp9.bit_depth = dec_params->bit_depth;
495 }
496 
497 static void update_ctx_cur_info(struct rkvdec_vp9_ctx *vp9_ctx,
498 				struct rkvdec_decoded_buffer *buf,
499 				const struct v4l2_ctrl_vp9_frame *dec_params)
500 {
501 	vp9_ctx->cur.valid = true;
502 	vp9_ctx->cur.reference_mode = dec_params->reference_mode;
503 	vp9_ctx->cur.interpolation_filter = dec_params->interpolation_filter;
504 	vp9_ctx->cur.flags = dec_params->flags;
505 	vp9_ctx->cur.timestamp = buf->base.vb.vb2_buf.timestamp;
506 	vp9_ctx->cur.seg = dec_params->seg;
507 	vp9_ctx->cur.lf = dec_params->lf;
508 }
509 
510 static void update_ctx_last_info(struct rkvdec_vp9_ctx *vp9_ctx)
511 {
512 	vp9_ctx->last = vp9_ctx->cur;
513 }
514 
515 static void config_registers(struct rkvdec_ctx *ctx,
516 			     const struct rkvdec_vp9_run *run)
517 {
518 	unsigned int y_len, uv_len, yuv_len, bit_depth, aligned_height, aligned_pitch, stream_len;
519 	const struct v4l2_ctrl_vp9_frame *dec_params;
520 	struct rkvdec_decoded_buffer *ref_bufs[3];
521 	struct rkvdec_decoded_buffer *dst, *last, *mv_ref;
522 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
523 	u32 val, last_frame_info = 0;
524 	const struct v4l2_vp9_segmentation *seg;
525 	struct rkvdec_dev *rkvdec = ctx->dev;
526 	dma_addr_t addr;
527 	bool intra_only;
528 	unsigned int i;
529 
530 	dec_params = run->decode_params;
531 	dst = vb2_to_rkvdec_decoded_buf(&run->base.bufs.dst->vb2_buf);
532 	ref_bufs[0] = get_ref_buf(ctx, &dst->base.vb, dec_params->last_frame_ts);
533 	ref_bufs[1] = get_ref_buf(ctx, &dst->base.vb, dec_params->golden_frame_ts);
534 	ref_bufs[2] = get_ref_buf(ctx, &dst->base.vb, dec_params->alt_frame_ts);
535 
536 	if (vp9_ctx->last.valid)
537 		last = get_ref_buf(ctx, &dst->base.vb, vp9_ctx->last.timestamp);
538 	else
539 		last = dst;
540 
541 	update_dec_buf_info(dst, dec_params);
542 	update_ctx_cur_info(vp9_ctx, dst, dec_params);
543 	seg = &dec_params->seg;
544 
545 	intra_only = !!(dec_params->flags &
546 			(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
547 			 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
548 
549 	writel_relaxed(RKVDEC_MODE(RKVDEC_MODE_VP9),
550 		       rkvdec->regs + RKVDEC_REG_SYSCTRL);
551 
552 	bit_depth = dec_params->bit_depth;
553 	aligned_height = round_up(ctx->decoded_fmt.fmt.pix_mp.height, 64);
554 
555 	aligned_pitch = round_up(ctx->decoded_fmt.fmt.pix_mp.width *
556 				 bit_depth,
557 				 512) / 8;
558 	y_len = aligned_height * aligned_pitch;
559 	uv_len = y_len / 2;
560 	yuv_len = y_len + uv_len;
561 
562 	writel_relaxed(RKVDEC_Y_HOR_VIRSTRIDE(aligned_pitch / 16) |
563 		       RKVDEC_UV_HOR_VIRSTRIDE(aligned_pitch / 16),
564 		       rkvdec->regs + RKVDEC_REG_PICPAR);
565 	writel_relaxed(RKVDEC_Y_VIRSTRIDE(y_len / 16),
566 		       rkvdec->regs + RKVDEC_REG_Y_VIRSTRIDE);
567 	writel_relaxed(RKVDEC_YUV_VIRSTRIDE(yuv_len / 16),
568 		       rkvdec->regs + RKVDEC_REG_YUV_VIRSTRIDE);
569 
570 	stream_len = vb2_get_plane_payload(&run->base.bufs.src->vb2_buf, 0);
571 	writel_relaxed(RKVDEC_STRM_LEN(stream_len),
572 		       rkvdec->regs + RKVDEC_REG_STRM_LEN);
573 
574 	/*
575 	 * Reset count buffer, because decoder only output intra related syntax
576 	 * counts when decoding intra frame, but update entropy need to update
577 	 * all the probabilities.
578 	 */
579 	if (intra_only)
580 		memset(vp9_ctx->count_tbl.cpu, 0, vp9_ctx->count_tbl.size);
581 
582 	vp9_ctx->cur.segmapid = vp9_ctx->last.segmapid;
583 	if (!intra_only &&
584 	    !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
585 	    (!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED) ||
586 	     (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP)))
587 		vp9_ctx->cur.segmapid++;
588 
589 	for (i = 0; i < ARRAY_SIZE(ref_bufs); i++)
590 		config_ref_registers(ctx, run, ref_bufs[i], &ref_regs[i]);
591 
592 	for (i = 0; i < 8; i++)
593 		config_seg_registers(ctx, i);
594 
595 	writel_relaxed(RKVDEC_VP9_TX_MODE(vp9_ctx->cur.tx_mode) |
596 		       RKVDEC_VP9_FRAME_REF_MODE(dec_params->reference_mode),
597 		       rkvdec->regs + RKVDEC_VP9_CPRHEADER_CONFIG);
598 
599 	if (!intra_only) {
600 		const struct v4l2_vp9_loop_filter *lf;
601 		s8 delta;
602 
603 		if (vp9_ctx->last.valid)
604 			lf = &vp9_ctx->last.lf;
605 		else
606 			lf = &vp9_ctx->cur.lf;
607 
608 		val = 0;
609 		for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) {
610 			delta = lf->ref_deltas[i];
611 			val |= RKVDEC_REF_DELTAS_LASTFRAME(i, delta);
612 		}
613 
614 		writel_relaxed(val,
615 			       rkvdec->regs + RKVDEC_VP9_REF_DELTAS_LASTFRAME);
616 
617 		for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) {
618 			delta = lf->mode_deltas[i];
619 			last_frame_info |= RKVDEC_MODE_DELTAS_LASTFRAME(i,
620 									delta);
621 		}
622 	}
623 
624 	if (vp9_ctx->last.valid && !intra_only &&
625 	    vp9_ctx->last.seg.flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED)
626 		last_frame_info |= RKVDEC_SEG_EN_LASTFRAME;
627 
628 	if (vp9_ctx->last.valid &&
629 	    vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_SHOW_FRAME)
630 		last_frame_info |= RKVDEC_LAST_SHOW_FRAME;
631 
632 	if (vp9_ctx->last.valid &&
633 	    vp9_ctx->last.flags &
634 	    (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY))
635 		last_frame_info |= RKVDEC_LAST_INTRA_ONLY;
636 
637 	if (vp9_ctx->last.valid &&
638 	    last->vp9.width == dst->vp9.width &&
639 	    last->vp9.height == dst->vp9.height)
640 		last_frame_info |= RKVDEC_LAST_WIDHHEIGHT_EQCUR;
641 
642 	writel_relaxed(last_frame_info,
643 		       rkvdec->regs + RKVDEC_VP9_INFO_LASTFRAME);
644 
645 	writel_relaxed(stream_len - dec_params->compressed_header_size -
646 		       dec_params->uncompressed_header_size,
647 		       rkvdec->regs + RKVDEC_VP9_LASTTILE_SIZE);
648 
649 	for (i = 0; !intra_only && i < ARRAY_SIZE(ref_bufs); i++) {
650 		unsigned int refw = ref_bufs[i]->vp9.width;
651 		unsigned int refh = ref_bufs[i]->vp9.height;
652 		u32 hscale, vscale;
653 
654 		hscale = (refw << 14) /	dst->vp9.width;
655 		vscale = (refh << 14) / dst->vp9.height;
656 		writel_relaxed(RKVDEC_VP9_REF_HOR_SCALE(hscale) |
657 			       RKVDEC_VP9_REF_VER_SCALE(vscale),
658 			       rkvdec->regs + RKVDEC_VP9_REF_SCALE(i));
659 	}
660 
661 	addr = vb2_dma_contig_plane_dma_addr(&dst->base.vb.vb2_buf, 0);
662 	writel_relaxed(addr, rkvdec->regs + RKVDEC_REG_DECOUT_BASE);
663 	addr = vb2_dma_contig_plane_dma_addr(&run->base.bufs.src->vb2_buf, 0);
664 	writel_relaxed(addr, rkvdec->regs + RKVDEC_REG_STRM_RLC_BASE);
665 	writel_relaxed(vp9_ctx->priv_tbl.dma +
666 		       offsetof(struct rkvdec_vp9_priv_tbl, probs),
667 		       rkvdec->regs + RKVDEC_REG_CABACTBL_PROB_BASE);
668 	writel_relaxed(vp9_ctx->count_tbl.dma,
669 		       rkvdec->regs + RKVDEC_REG_VP9COUNT_BASE);
670 
671 	writel_relaxed(vp9_ctx->priv_tbl.dma +
672 		       offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
673 		       (RKVDEC_VP9_MAX_SEGMAP_SIZE * vp9_ctx->cur.segmapid),
674 		       rkvdec->regs + RKVDEC_REG_VP9_SEGIDCUR_BASE);
675 	writel_relaxed(vp9_ctx->priv_tbl.dma +
676 		       offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
677 		       (RKVDEC_VP9_MAX_SEGMAP_SIZE * (!vp9_ctx->cur.segmapid)),
678 		       rkvdec->regs + RKVDEC_REG_VP9_SEGIDLAST_BASE);
679 
680 	if (!intra_only &&
681 	    !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
682 	    vp9_ctx->last.valid)
683 		mv_ref = last;
684 	else
685 		mv_ref = dst;
686 
687 	writel_relaxed(get_mv_base_addr(mv_ref),
688 		       rkvdec->regs + RKVDEC_VP9_REF_COLMV_BASE);
689 
690 	writel_relaxed(ctx->decoded_fmt.fmt.pix_mp.width |
691 		       (ctx->decoded_fmt.fmt.pix_mp.height << 16),
692 		       rkvdec->regs + RKVDEC_REG_PERFORMANCE_CYCLE);
693 }
694 
695 static int validate_dec_params(struct rkvdec_ctx *ctx,
696 			       const struct v4l2_ctrl_vp9_frame *dec_params)
697 {
698 	unsigned int aligned_width, aligned_height;
699 
700 	/* We only support profile 0. */
701 	if (dec_params->profile != 0) {
702 		dev_err(ctx->dev->dev, "unsupported profile %d\n",
703 			dec_params->profile);
704 		return -EINVAL;
705 	}
706 
707 	aligned_width = round_up(dec_params->frame_width_minus_1 + 1, 64);
708 	aligned_height = round_up(dec_params->frame_height_minus_1 + 1, 64);
709 
710 	/*
711 	 * Userspace should update the capture/decoded format when the
712 	 * resolution changes.
713 	 */
714 	if (aligned_width != ctx->decoded_fmt.fmt.pix_mp.width ||
715 	    aligned_height != ctx->decoded_fmt.fmt.pix_mp.height) {
716 		dev_err(ctx->dev->dev,
717 			"unexpected bitstream resolution %dx%d\n",
718 			dec_params->frame_width_minus_1 + 1,
719 			dec_params->frame_height_minus_1 + 1);
720 		return -EINVAL;
721 	}
722 
723 	return 0;
724 }
725 
726 static int rkvdec_vp9_run_preamble(struct rkvdec_ctx *ctx,
727 				   struct rkvdec_vp9_run *run)
728 {
729 	const struct v4l2_ctrl_vp9_frame *dec_params;
730 	const struct v4l2_ctrl_vp9_compressed_hdr *prob_updates;
731 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
732 	struct v4l2_ctrl *ctrl;
733 	unsigned int fctx_idx;
734 	int ret;
735 
736 	/* v4l2-specific stuff */
737 	rkvdec_run_preamble(ctx, &run->base);
738 
739 	ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl,
740 			      V4L2_CID_STATELESS_VP9_FRAME);
741 	if (WARN_ON(!ctrl))
742 		return -EINVAL;
743 	dec_params = ctrl->p_cur.p;
744 
745 	ret = validate_dec_params(ctx, dec_params);
746 	if (ret)
747 		return ret;
748 
749 	run->decode_params = dec_params;
750 
751 	ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl, V4L2_CID_STATELESS_VP9_COMPRESSED_HDR);
752 	if (WARN_ON(!ctrl))
753 		return -EINVAL;
754 	prob_updates = ctrl->p_cur.p;
755 	vp9_ctx->cur.tx_mode = prob_updates->tx_mode;
756 
757 	/*
758 	 * vp9 stuff
759 	 *
760 	 * by this point the userspace has done all parts of 6.2 uncompressed_header()
761 	 * except this fragment:
762 	 * if ( FrameIsIntra || error_resilient_mode ) {
763 	 *	setup_past_independence ( )
764 	 *	if ( frame_type == KEY_FRAME || error_resilient_mode == 1 ||
765 	 *	     reset_frame_context == 3 ) {
766 	 *		for ( i = 0; i < 4; i ++ ) {
767 	 *			save_probs( i )
768 	 *		}
769 	 *	} else if ( reset_frame_context == 2 ) {
770 	 *		save_probs( frame_context_idx )
771 	 *	}
772 	 *	frame_context_idx = 0
773 	 * }
774 	 */
775 	fctx_idx = v4l2_vp9_reset_frame_ctx(dec_params, vp9_ctx->frame_context);
776 	vp9_ctx->cur.frame_context_idx = fctx_idx;
777 
778 	/* 6.1 frame(sz): load_probs() and load_probs2() */
779 	vp9_ctx->probability_tables = vp9_ctx->frame_context[fctx_idx];
780 
781 	/*
782 	 * The userspace has also performed 6.3 compressed_header(), but handling the
783 	 * probs in a special way. All probs which need updating, except MV-related,
784 	 * have been read from the bitstream and translated through inv_map_table[],
785 	 * but no 6.3.6 inv_recenter_nonneg(v, m) has been performed. The values passed
786 	 * by userspace are either translated values (there are no 0 values in
787 	 * inv_map_table[]), or zero to indicate no update. All MV-related probs which need
788 	 * updating have been read from the bitstream and (mv_prob << 1) | 1 has been
789 	 * performed. The values passed by userspace are either new values
790 	 * to replace old ones (the above mentioned shift and bitwise or never result in
791 	 * a zero) or zero to indicate no update.
792 	 * fw_update_probs() performs actual probs updates or leaves probs as-is
793 	 * for values for which a zero was passed from userspace.
794 	 */
795 	v4l2_vp9_fw_update_probs(&vp9_ctx->probability_tables, prob_updates, dec_params);
796 
797 	return 0;
798 }
799 
800 static int rkvdec_vp9_run(struct rkvdec_ctx *ctx)
801 {
802 	struct rkvdec_dev *rkvdec = ctx->dev;
803 	struct rkvdec_vp9_run run = { };
804 	int ret;
805 
806 	ret = rkvdec_vp9_run_preamble(ctx, &run);
807 	if (ret) {
808 		rkvdec_run_postamble(ctx, &run.base);
809 		return ret;
810 	}
811 
812 	/* Prepare probs. */
813 	init_probs(ctx, &run);
814 
815 	/* Configure hardware registers. */
816 	config_registers(ctx, &run);
817 
818 	rkvdec_run_postamble(ctx, &run.base);
819 
820 	schedule_delayed_work(&rkvdec->watchdog_work, msecs_to_jiffies(2000));
821 
822 	writel(1, rkvdec->regs + RKVDEC_REG_PREF_LUMA_CACHE_COMMAND);
823 	writel(1, rkvdec->regs + RKVDEC_REG_PREF_CHR_CACHE_COMMAND);
824 
825 	writel(0xe, rkvdec->regs + RKVDEC_REG_STRMD_ERR_EN);
826 	/* Start decoding! */
827 	writel(RKVDEC_INTERRUPT_DEC_E | RKVDEC_CONFIG_DEC_CLK_GATE_E |
828 	       RKVDEC_TIMEOUT_E | RKVDEC_BUF_EMPTY_E,
829 	       rkvdec->regs + RKVDEC_REG_INTERRUPT);
830 
831 	return 0;
832 }
833 
834 #define copy_tx_and_skip(p1, p2)				\
835 do {								\
836 	memcpy((p1)->tx8, (p2)->tx8, sizeof((p1)->tx8));	\
837 	memcpy((p1)->tx16, (p2)->tx16, sizeof((p1)->tx16));	\
838 	memcpy((p1)->tx32, (p2)->tx32, sizeof((p1)->tx32));	\
839 	memcpy((p1)->skip, (p2)->skip, sizeof((p1)->skip));	\
840 } while (0)
841 
842 static void rkvdec_vp9_done(struct rkvdec_ctx *ctx,
843 			    struct vb2_v4l2_buffer *src_buf,
844 			    struct vb2_v4l2_buffer *dst_buf,
845 			    enum vb2_buffer_state result)
846 {
847 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
848 	unsigned int fctx_idx;
849 
850 	/* v4l2-specific stuff */
851 	if (result == VB2_BUF_STATE_ERROR)
852 		goto out_update_last;
853 
854 	/*
855 	 * vp9 stuff
856 	 *
857 	 * 6.1.2 refresh_probs()
858 	 *
859 	 * In the spec a complementary condition goes last in 6.1.2 refresh_probs(),
860 	 * but it makes no sense to perform all the activities from the first "if"
861 	 * there if we actually are not refreshing the frame context. On top of that,
862 	 * because of 6.2 uncompressed_header() whenever error_resilient_mode == 1,
863 	 * refresh_frame_context == 0. Consequently, if we don't jump to out_update_last
864 	 * it means error_resilient_mode must be 0.
865 	 */
866 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX))
867 		goto out_update_last;
868 
869 	fctx_idx = vp9_ctx->cur.frame_context_idx;
870 
871 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE)) {
872 		/* error_resilient_mode == 0 && frame_parallel_decoding_mode == 0 */
873 		struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
874 		bool frame_is_intra = vp9_ctx->cur.flags &
875 		    (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY);
876 		struct tx_and_skip {
877 			u8 tx8[2][1];
878 			u8 tx16[2][2];
879 			u8 tx32[2][3];
880 			u8 skip[3];
881 		} _tx_skip, *tx_skip = &_tx_skip;
882 		struct v4l2_vp9_frame_symbol_counts *counts;
883 
884 		/* buffer the forward-updated TX and skip probs */
885 		if (frame_is_intra)
886 			copy_tx_and_skip(tx_skip, probs);
887 
888 		/* 6.1.2 refresh_probs(): load_probs() and load_probs2() */
889 		*probs = vp9_ctx->frame_context[fctx_idx];
890 
891 		/* if FrameIsIntra then undo the effect of load_probs2() */
892 		if (frame_is_intra)
893 			copy_tx_and_skip(probs, tx_skip);
894 
895 		counts = frame_is_intra ? &vp9_ctx->intra_cnts : &vp9_ctx->inter_cnts;
896 		v4l2_vp9_adapt_coef_probs(probs, counts,
897 					  !vp9_ctx->last.valid ||
898 					  vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME,
899 					  frame_is_intra);
900 		if (!frame_is_intra) {
901 			const struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts;
902 			u32 classes[2][11];
903 			int i;
904 
905 			inter_cnts = vp9_ctx->count_tbl.cpu;
906 			for (i = 0; i < ARRAY_SIZE(classes); ++i)
907 				memcpy(classes[i], inter_cnts->classes[i], sizeof(classes[0]));
908 			counts->classes = &classes;
909 
910 			/* load_probs2() already done */
911 			v4l2_vp9_adapt_noncoef_probs(&vp9_ctx->probability_tables, counts,
912 						     vp9_ctx->cur.reference_mode,
913 						     vp9_ctx->cur.interpolation_filter,
914 						     vp9_ctx->cur.tx_mode, vp9_ctx->cur.flags);
915 		}
916 	}
917 
918 	/* 6.1.2 refresh_probs(): save_probs(fctx_idx) */
919 	vp9_ctx->frame_context[fctx_idx] = vp9_ctx->probability_tables;
920 
921 out_update_last:
922 	update_ctx_last_info(vp9_ctx);
923 }
924 
925 static void rkvdec_init_v4l2_vp9_count_tbl(struct rkvdec_ctx *ctx)
926 {
927 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
928 	struct rkvdec_vp9_intra_frame_symbol_counts *intra_cnts = vp9_ctx->count_tbl.cpu;
929 	struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts = vp9_ctx->count_tbl.cpu;
930 	int i, j, k, l, m;
931 
932 	vp9_ctx->inter_cnts.partition = &inter_cnts->partition;
933 	vp9_ctx->inter_cnts.skip = &inter_cnts->skip;
934 	vp9_ctx->inter_cnts.intra_inter = &inter_cnts->inter;
935 	vp9_ctx->inter_cnts.tx32p = &inter_cnts->tx32p;
936 	vp9_ctx->inter_cnts.tx16p = &inter_cnts->tx16p;
937 	vp9_ctx->inter_cnts.tx8p = &inter_cnts->tx8p;
938 
939 	vp9_ctx->intra_cnts.partition = (u32 (*)[16][4])(&intra_cnts->partition);
940 	vp9_ctx->intra_cnts.skip = &intra_cnts->skip;
941 	vp9_ctx->intra_cnts.intra_inter = &intra_cnts->intra;
942 	vp9_ctx->intra_cnts.tx32p = &intra_cnts->tx32p;
943 	vp9_ctx->intra_cnts.tx16p = &intra_cnts->tx16p;
944 	vp9_ctx->intra_cnts.tx8p = &intra_cnts->tx8p;
945 
946 	vp9_ctx->inter_cnts.y_mode = &inter_cnts->y_mode;
947 	vp9_ctx->inter_cnts.uv_mode = &inter_cnts->uv_mode;
948 	vp9_ctx->inter_cnts.comp = &inter_cnts->comp;
949 	vp9_ctx->inter_cnts.comp_ref = &inter_cnts->comp_ref;
950 	vp9_ctx->inter_cnts.single_ref = &inter_cnts->single_ref;
951 	vp9_ctx->inter_cnts.mv_mode = &inter_cnts->mv_mode;
952 	vp9_ctx->inter_cnts.filter = &inter_cnts->filter;
953 	vp9_ctx->inter_cnts.mv_joint = &inter_cnts->mv_joint;
954 	vp9_ctx->inter_cnts.sign = &inter_cnts->sign;
955 	/*
956 	 * rk hardware actually uses "u32 classes[2][11 + 1];"
957 	 * instead of "u32 classes[2][11];", so this must be explicitly
958 	 * copied into vp9_ctx->classes when passing the data to the
959 	 * vp9 library function
960 	 */
961 	vp9_ctx->inter_cnts.class0 = &inter_cnts->class0;
962 	vp9_ctx->inter_cnts.bits = &inter_cnts->bits;
963 	vp9_ctx->inter_cnts.class0_fp = &inter_cnts->class0_fp;
964 	vp9_ctx->inter_cnts.fp = &inter_cnts->fp;
965 	vp9_ctx->inter_cnts.class0_hp = &inter_cnts->class0_hp;
966 	vp9_ctx->inter_cnts.hp = &inter_cnts->hp;
967 
968 #define INNERMOST_LOOP \
969 	do {										\
970 		for (m = 0; m < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0][0]); ++m) {\
971 			vp9_ctx->inter_cnts.coeff[i][j][k][l][m] =			\
972 				&inter_cnts->ref_cnt[k][i][j][l][m].coeff;		\
973 			vp9_ctx->inter_cnts.eob[i][j][k][l][m][0] =			\
974 				&inter_cnts->ref_cnt[k][i][j][l][m].eob[0];		\
975 			vp9_ctx->inter_cnts.eob[i][j][k][l][m][1] =			\
976 				&inter_cnts->ref_cnt[k][i][j][l][m].eob[1];		\
977 											\
978 			vp9_ctx->intra_cnts.coeff[i][j][k][l][m] =			\
979 				&intra_cnts->ref_cnt[k][i][j][l][m].coeff;		\
980 			vp9_ctx->intra_cnts.eob[i][j][k][l][m][0] =			\
981 				&intra_cnts->ref_cnt[k][i][j][l][m].eob[0];		\
982 			vp9_ctx->intra_cnts.eob[i][j][k][l][m][1] =			\
983 				&intra_cnts->ref_cnt[k][i][j][l][m].eob[1];		\
984 		}									\
985 	} while (0)
986 
987 	for (i = 0; i < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff); ++i)
988 		for (j = 0; j < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0]); ++j)
989 			for (k = 0; k < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0]); ++k)
990 				for (l = 0; l < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0]); ++l)
991 					INNERMOST_LOOP;
992 #undef INNERMOST_LOOP
993 }
994 
995 static int rkvdec_vp9_start(struct rkvdec_ctx *ctx)
996 {
997 	struct rkvdec_dev *rkvdec = ctx->dev;
998 	struct rkvdec_vp9_priv_tbl *priv_tbl;
999 	struct rkvdec_vp9_ctx *vp9_ctx;
1000 	unsigned char *count_tbl;
1001 	int ret;
1002 
1003 	vp9_ctx = kzalloc(sizeof(*vp9_ctx), GFP_KERNEL);
1004 	if (!vp9_ctx)
1005 		return -ENOMEM;
1006 
1007 	ctx->priv = vp9_ctx;
1008 
1009 	priv_tbl = dma_alloc_coherent(rkvdec->dev, sizeof(*priv_tbl),
1010 				      &vp9_ctx->priv_tbl.dma, GFP_KERNEL);
1011 	if (!priv_tbl) {
1012 		ret = -ENOMEM;
1013 		goto err_free_ctx;
1014 	}
1015 
1016 	vp9_ctx->priv_tbl.size = sizeof(*priv_tbl);
1017 	vp9_ctx->priv_tbl.cpu = priv_tbl;
1018 
1019 	count_tbl = dma_alloc_coherent(rkvdec->dev, RKVDEC_VP9_COUNT_SIZE,
1020 				       &vp9_ctx->count_tbl.dma, GFP_KERNEL);
1021 	if (!count_tbl) {
1022 		ret = -ENOMEM;
1023 		goto err_free_priv_tbl;
1024 	}
1025 
1026 	vp9_ctx->count_tbl.size = RKVDEC_VP9_COUNT_SIZE;
1027 	vp9_ctx->count_tbl.cpu = count_tbl;
1028 	rkvdec_init_v4l2_vp9_count_tbl(ctx);
1029 
1030 	return 0;
1031 
1032 err_free_priv_tbl:
1033 	dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1034 			  vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1035 
1036 err_free_ctx:
1037 	kfree(vp9_ctx);
1038 	return ret;
1039 }
1040 
1041 static void rkvdec_vp9_stop(struct rkvdec_ctx *ctx)
1042 {
1043 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
1044 	struct rkvdec_dev *rkvdec = ctx->dev;
1045 
1046 	dma_free_coherent(rkvdec->dev, vp9_ctx->count_tbl.size,
1047 			  vp9_ctx->count_tbl.cpu, vp9_ctx->count_tbl.dma);
1048 	dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1049 			  vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1050 	kfree(vp9_ctx);
1051 }
1052 
1053 static int rkvdec_vp9_adjust_fmt(struct rkvdec_ctx *ctx,
1054 				 struct v4l2_format *f)
1055 {
1056 	struct v4l2_pix_format_mplane *fmt = &f->fmt.pix_mp;
1057 
1058 	fmt->num_planes = 1;
1059 	if (!fmt->plane_fmt[0].sizeimage)
1060 		fmt->plane_fmt[0].sizeimage = fmt->width * fmt->height * 2;
1061 	return 0;
1062 }
1063 
1064 const struct rkvdec_coded_fmt_ops rkvdec_vp9_fmt_ops = {
1065 	.adjust_fmt = rkvdec_vp9_adjust_fmt,
1066 	.start = rkvdec_vp9_start,
1067 	.stop = rkvdec_vp9_stop,
1068 	.run = rkvdec_vp9_run,
1069 	.done = rkvdec_vp9_done,
1070 };
1071