GNU Linux-libre 5.19-rc6-gnu
[releases.git] / drivers / staging / media / hantro / hantro_g2_vp9_dec.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Hantro VP9 codec driver
4  *
5  * Copyright (C) 2021 Collabora Ltd.
6  */
7 #include "media/videobuf2-core.h"
8 #include "media/videobuf2-dma-contig.h"
9 #include "media/videobuf2-v4l2.h"
10 #include <linux/kernel.h>
11 #include <linux/vmalloc.h>
12 #include <media/v4l2-mem2mem.h>
13 #include <media/v4l2-vp9.h>
14
15 #include "hantro.h"
16 #include "hantro_vp9.h"
17 #include "hantro_g2_regs.h"
18
19 #define G2_ALIGN 16
20
21 enum hantro_ref_frames {
22         INTRA_FRAME = 0,
23         LAST_FRAME = 1,
24         GOLDEN_FRAME = 2,
25         ALTREF_FRAME = 3,
26         MAX_REF_FRAMES = 4
27 };
28
29 static int start_prepare_run(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame **dec_params)
30 {
31         const struct v4l2_ctrl_vp9_compressed_hdr *prob_updates;
32         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
33         struct v4l2_ctrl *ctrl;
34         unsigned int fctx_idx;
35
36         /* v4l2-specific stuff */
37         hantro_start_prepare_run(ctx);
38
39         ctrl = v4l2_ctrl_find(&ctx->ctrl_handler, V4L2_CID_STATELESS_VP9_FRAME);
40         if (WARN_ON(!ctrl))
41                 return -EINVAL;
42         *dec_params = ctrl->p_cur.p;
43
44         ctrl = v4l2_ctrl_find(&ctx->ctrl_handler, V4L2_CID_STATELESS_VP9_COMPRESSED_HDR);
45         if (WARN_ON(!ctrl))
46                 return -EINVAL;
47         prob_updates = ctrl->p_cur.p;
48         vp9_ctx->cur.tx_mode = prob_updates->tx_mode;
49
50         /*
51          * vp9 stuff
52          *
53          * by this point the userspace has done all parts of 6.2 uncompressed_header()
54          * except this fragment:
55          * if ( FrameIsIntra || error_resilient_mode ) {
56          *      setup_past_independence ( )
57          *      if ( frame_type == KEY_FRAME || error_resilient_mode == 1 ||
58          *           reset_frame_context == 3 ) {
59          *              for ( i = 0; i < 4; i ++ ) {
60          *                      save_probs( i )
61          *              }
62          *      } else if ( reset_frame_context == 2 ) {
63          *              save_probs( frame_context_idx )
64          *      }
65          *      frame_context_idx = 0
66          * }
67          */
68         fctx_idx = v4l2_vp9_reset_frame_ctx(*dec_params, vp9_ctx->frame_context);
69         vp9_ctx->cur.frame_context_idx = fctx_idx;
70
71         /* 6.1 frame(sz): load_probs() and load_probs2() */
72         vp9_ctx->probability_tables = vp9_ctx->frame_context[fctx_idx];
73
74         /*
75          * The userspace has also performed 6.3 compressed_header(), but handling the
76          * probs in a special way. All probs which need updating, except MV-related,
77          * have been read from the bitstream and translated through inv_map_table[],
78          * but no 6.3.6 inv_recenter_nonneg(v, m) has been performed. The values passed
79          * by userspace are either translated values (there are no 0 values in
80          * inv_map_table[]), or zero to indicate no update. All MV-related probs which need
81          * updating have been read from the bitstream and (mv_prob << 1) | 1 has been
82          * performed. The values passed by userspace are either new values
83          * to replace old ones (the above mentioned shift and bitwise or never result in
84          * a zero) or zero to indicate no update.
85          * fw_update_probs() performs actual probs updates or leaves probs as-is
86          * for values for which a zero was passed from userspace.
87          */
88         v4l2_vp9_fw_update_probs(&vp9_ctx->probability_tables, prob_updates, *dec_params);
89
90         return 0;
91 }
92
93 static size_t chroma_offset(const struct hantro_ctx *ctx,
94                             const struct v4l2_ctrl_vp9_frame *dec_params)
95 {
96         int bytes_per_pixel = dec_params->bit_depth == 8 ? 1 : 2;
97
98         return ctx->src_fmt.width * ctx->src_fmt.height * bytes_per_pixel;
99 }
100
101 static size_t mv_offset(const struct hantro_ctx *ctx,
102                         const struct v4l2_ctrl_vp9_frame *dec_params)
103 {
104         size_t cr_offset = chroma_offset(ctx, dec_params);
105
106         return ALIGN((cr_offset * 3) / 2, G2_ALIGN);
107 }
108
109 static struct hantro_decoded_buffer *
110 get_ref_buf(struct hantro_ctx *ctx, struct vb2_v4l2_buffer *dst, u64 timestamp)
111 {
112         struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
113         struct vb2_queue *cap_q = &m2m_ctx->cap_q_ctx.q;
114         int buf_idx;
115
116         /*
117          * If a ref is unused or invalid, address of current destination
118          * buffer is returned.
119          */
120         buf_idx = vb2_find_timestamp(cap_q, timestamp, 0);
121         if (buf_idx < 0)
122                 return vb2_to_hantro_decoded_buf(&dst->vb2_buf);
123
124         return vb2_to_hantro_decoded_buf(vb2_get_buffer(cap_q, buf_idx));
125 }
126
127 static void update_dec_buf_info(struct hantro_decoded_buffer *buf,
128                                 const struct v4l2_ctrl_vp9_frame *dec_params)
129 {
130         buf->vp9.width = dec_params->frame_width_minus_1 + 1;
131         buf->vp9.height = dec_params->frame_height_minus_1 + 1;
132         buf->vp9.bit_depth = dec_params->bit_depth;
133 }
134
135 static void update_ctx_cur_info(struct hantro_vp9_dec_hw_ctx *vp9_ctx,
136                                 struct hantro_decoded_buffer *buf,
137                                 const struct v4l2_ctrl_vp9_frame *dec_params)
138 {
139         vp9_ctx->cur.valid = true;
140         vp9_ctx->cur.reference_mode = dec_params->reference_mode;
141         vp9_ctx->cur.interpolation_filter = dec_params->interpolation_filter;
142         vp9_ctx->cur.flags = dec_params->flags;
143         vp9_ctx->cur.timestamp = buf->base.vb.vb2_buf.timestamp;
144 }
145
146 static void config_output(struct hantro_ctx *ctx,
147                           struct hantro_decoded_buffer *dst,
148                           const struct v4l2_ctrl_vp9_frame *dec_params)
149 {
150         dma_addr_t luma_addr, chroma_addr, mv_addr;
151
152         hantro_reg_write(ctx->dev, &g2_out_dis, 0);
153         if (!ctx->dev->variant->legacy_regs)
154                 hantro_reg_write(ctx->dev, &g2_output_format, 0);
155
156         luma_addr = hantro_get_dec_buf_addr(ctx, &dst->base.vb.vb2_buf);
157         hantro_write_addr(ctx->dev, G2_OUT_LUMA_ADDR, luma_addr);
158
159         chroma_addr = luma_addr + chroma_offset(ctx, dec_params);
160         hantro_write_addr(ctx->dev, G2_OUT_CHROMA_ADDR, chroma_addr);
161
162         mv_addr = luma_addr + mv_offset(ctx, dec_params);
163         hantro_write_addr(ctx->dev, G2_OUT_MV_ADDR, mv_addr);
164 }
165
166 struct hantro_vp9_ref_reg {
167         const struct hantro_reg width;
168         const struct hantro_reg height;
169         const struct hantro_reg hor_scale;
170         const struct hantro_reg ver_scale;
171         u32 y_base;
172         u32 c_base;
173 };
174
175 static void config_ref(struct hantro_ctx *ctx,
176                        struct hantro_decoded_buffer *dst,
177                        const struct hantro_vp9_ref_reg *ref_reg,
178                        const struct v4l2_ctrl_vp9_frame *dec_params,
179                        u64 ref_ts)
180 {
181         struct hantro_decoded_buffer *buf;
182         dma_addr_t luma_addr, chroma_addr;
183         u32 refw, refh;
184
185         buf = get_ref_buf(ctx, &dst->base.vb, ref_ts);
186         refw = buf->vp9.width;
187         refh = buf->vp9.height;
188
189         hantro_reg_write(ctx->dev, &ref_reg->width, refw);
190         hantro_reg_write(ctx->dev, &ref_reg->height, refh);
191
192         hantro_reg_write(ctx->dev, &ref_reg->hor_scale, (refw << 14) / dst->vp9.width);
193         hantro_reg_write(ctx->dev, &ref_reg->ver_scale, (refh << 14) / dst->vp9.height);
194
195         luma_addr = hantro_get_dec_buf_addr(ctx, &buf->base.vb.vb2_buf);
196         hantro_write_addr(ctx->dev, ref_reg->y_base, luma_addr);
197
198         chroma_addr = luma_addr + chroma_offset(ctx, dec_params);
199         hantro_write_addr(ctx->dev, ref_reg->c_base, chroma_addr);
200 }
201
202 static void config_ref_registers(struct hantro_ctx *ctx,
203                                  const struct v4l2_ctrl_vp9_frame *dec_params,
204                                  struct hantro_decoded_buffer *dst,
205                                  struct hantro_decoded_buffer *mv_ref)
206 {
207         static const struct hantro_vp9_ref_reg ref_regs[] = {
208                 {
209                         /* Last */
210                         .width = vp9_lref_width,
211                         .height = vp9_lref_height,
212                         .hor_scale = vp9_lref_hor_scale,
213                         .ver_scale = vp9_lref_ver_scale,
214                         .y_base = G2_REF_LUMA_ADDR(0),
215                         .c_base = G2_REF_CHROMA_ADDR(0),
216                 }, {
217                         /* Golden */
218                         .width = vp9_gref_width,
219                         .height = vp9_gref_height,
220                         .hor_scale = vp9_gref_hor_scale,
221                         .ver_scale = vp9_gref_ver_scale,
222                         .y_base = G2_REF_LUMA_ADDR(4),
223                         .c_base = G2_REF_CHROMA_ADDR(4),
224                 }, {
225                         /* Altref */
226                         .width = vp9_aref_width,
227                         .height = vp9_aref_height,
228                         .hor_scale = vp9_aref_hor_scale,
229                         .ver_scale = vp9_aref_ver_scale,
230                         .y_base = G2_REF_LUMA_ADDR(5),
231                         .c_base = G2_REF_CHROMA_ADDR(5),
232                 },
233         };
234         dma_addr_t mv_addr;
235
236         config_ref(ctx, dst, &ref_regs[0], dec_params, dec_params->last_frame_ts);
237         config_ref(ctx, dst, &ref_regs[1], dec_params, dec_params->golden_frame_ts);
238         config_ref(ctx, dst, &ref_regs[2], dec_params, dec_params->alt_frame_ts);
239
240         mv_addr = hantro_get_dec_buf_addr(ctx, &mv_ref->base.vb.vb2_buf) +
241                   mv_offset(ctx, dec_params);
242         hantro_write_addr(ctx->dev, G2_REF_MV_ADDR(0), mv_addr);
243
244         hantro_reg_write(ctx->dev, &vp9_last_sign_bias,
245                          dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_LAST ? 1 : 0);
246
247         hantro_reg_write(ctx->dev, &vp9_gref_sign_bias,
248                          dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_GOLDEN ? 1 : 0);
249
250         hantro_reg_write(ctx->dev, &vp9_aref_sign_bias,
251                          dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_ALT ? 1 : 0);
252 }
253
254 static void recompute_tile_info(unsigned short *tile_info, unsigned int tiles, unsigned int sbs)
255 {
256         int i;
257         unsigned int accumulated = 0;
258         unsigned int next_accumulated;
259
260         for (i = 1; i <= tiles; ++i) {
261                 next_accumulated = i * sbs / tiles;
262                 *tile_info++ = next_accumulated - accumulated;
263                 accumulated = next_accumulated;
264         }
265 }
266
267 static void
268 recompute_tile_rc_info(struct hantro_ctx *ctx,
269                        unsigned int tile_r, unsigned int tile_c,
270                        unsigned int sbs_r, unsigned int sbs_c)
271 {
272         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
273
274         recompute_tile_info(vp9_ctx->tile_r_info, tile_r, sbs_r);
275         recompute_tile_info(vp9_ctx->tile_c_info, tile_c, sbs_c);
276
277         vp9_ctx->last_tile_r = tile_r;
278         vp9_ctx->last_tile_c = tile_c;
279         vp9_ctx->last_sbs_r = sbs_r;
280         vp9_ctx->last_sbs_c = sbs_c;
281 }
282
283 static inline unsigned int first_tile_row(unsigned int tile_r, unsigned int sbs_r)
284 {
285         if (tile_r == sbs_r + 1)
286                 return 1;
287
288         if (tile_r == sbs_r + 2)
289                 return 2;
290
291         return 0;
292 }
293
294 static void
295 fill_tile_info(struct hantro_ctx *ctx,
296                unsigned int tile_r, unsigned int tile_c,
297                unsigned int sbs_r, unsigned int sbs_c,
298                unsigned short *tile_mem)
299 {
300         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
301         unsigned int i, j;
302         bool first = true;
303
304         for (i = first_tile_row(tile_r, sbs_r); i < tile_r; ++i) {
305                 unsigned short r_info = vp9_ctx->tile_r_info[i];
306
307                 if (first) {
308                         if (i > 0)
309                                 r_info += vp9_ctx->tile_r_info[0];
310                         if (i == 2)
311                                 r_info += vp9_ctx->tile_r_info[1];
312                         first = false;
313                 }
314                 for (j = 0; j < tile_c; ++j) {
315                         *tile_mem++ = vp9_ctx->tile_c_info[j];
316                         *tile_mem++ = r_info;
317                 }
318         }
319 }
320
321 static void
322 config_tiles(struct hantro_ctx *ctx,
323              const struct v4l2_ctrl_vp9_frame *dec_params,
324              struct hantro_decoded_buffer *dst)
325 {
326         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
327         struct hantro_aux_buf *misc = &vp9_ctx->misc;
328         struct hantro_aux_buf *tile_edge = &vp9_ctx->tile_edge;
329         dma_addr_t addr;
330         unsigned short *tile_mem;
331         unsigned int rows, cols;
332
333         addr = misc->dma + vp9_ctx->tile_info_offset;
334         hantro_write_addr(ctx->dev, G2_TILE_SIZES_ADDR, addr);
335
336         tile_mem = misc->cpu + vp9_ctx->tile_info_offset;
337         if (dec_params->tile_cols_log2 || dec_params->tile_rows_log2) {
338                 unsigned int tile_r = (1 << dec_params->tile_rows_log2);
339                 unsigned int tile_c = (1 << dec_params->tile_cols_log2);
340                 unsigned int sbs_r = hantro_vp9_num_sbs(dst->vp9.height);
341                 unsigned int sbs_c = hantro_vp9_num_sbs(dst->vp9.width);
342
343                 if (tile_r != vp9_ctx->last_tile_r || tile_c != vp9_ctx->last_tile_c ||
344                     sbs_r != vp9_ctx->last_sbs_r || sbs_c != vp9_ctx->last_sbs_c)
345                         recompute_tile_rc_info(ctx, tile_r, tile_c, sbs_r, sbs_c);
346
347                 fill_tile_info(ctx, tile_r, tile_c, sbs_r, sbs_c, tile_mem);
348
349                 cols = tile_c;
350                 rows = tile_r;
351                 hantro_reg_write(ctx->dev, &g2_tile_e, 1);
352         } else {
353                 tile_mem[0] = hantro_vp9_num_sbs(dst->vp9.width);
354                 tile_mem[1] = hantro_vp9_num_sbs(dst->vp9.height);
355
356                 cols = 1;
357                 rows = 1;
358                 hantro_reg_write(ctx->dev, &g2_tile_e, 0);
359         }
360
361         if (ctx->dev->variant->legacy_regs) {
362                 hantro_reg_write(ctx->dev, &g2_num_tile_cols_old, cols);
363                 hantro_reg_write(ctx->dev, &g2_num_tile_rows_old, rows);
364         } else {
365                 hantro_reg_write(ctx->dev, &g2_num_tile_cols, cols);
366                 hantro_reg_write(ctx->dev, &g2_num_tile_rows, rows);
367         }
368
369         /* provide aux buffers even if no tiles are used */
370         addr = tile_edge->dma;
371         hantro_write_addr(ctx->dev, G2_TILE_FILTER_ADDR, addr);
372
373         addr = tile_edge->dma + vp9_ctx->bsd_ctrl_offset;
374         hantro_write_addr(ctx->dev, G2_TILE_BSD_ADDR, addr);
375 }
376
377 static void
378 update_feat_and_flag(struct hantro_vp9_dec_hw_ctx *vp9_ctx,
379                      const struct v4l2_vp9_segmentation *seg,
380                      unsigned int feature,
381                      unsigned int segid)
382 {
383         u8 mask = V4L2_VP9_SEGMENT_FEATURE_ENABLED(feature);
384
385         vp9_ctx->feature_data[segid][feature] = seg->feature_data[segid][feature];
386         vp9_ctx->feature_enabled[segid] &= ~mask;
387         vp9_ctx->feature_enabled[segid] |= (seg->feature_enabled[segid] & mask);
388 }
389
390 static inline s16 clip3(s16 x, s16 y, s16 z)
391 {
392         return (z < x) ? x : (z > y) ? y : z;
393 }
394
395 static s16 feat_val_clip3(s16 feat_val, s16 feature_data, bool absolute, u8 clip)
396 {
397         if (absolute)
398                 return feature_data;
399
400         return clip3(0, 255, feat_val + feature_data);
401 }
402
403 static void config_segment(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
404 {
405         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
406         const struct v4l2_vp9_segmentation *seg;
407         s16 feat_val;
408         unsigned char feat_id;
409         unsigned int segid;
410         bool segment_enabled, absolute, update_data;
411
412         static const struct hantro_reg seg_regs[8][V4L2_VP9_SEG_LVL_MAX] = {
413                 { vp9_quant_seg0, vp9_filt_level_seg0, vp9_refpic_seg0, vp9_skip_seg0 },
414                 { vp9_quant_seg1, vp9_filt_level_seg1, vp9_refpic_seg1, vp9_skip_seg1 },
415                 { vp9_quant_seg2, vp9_filt_level_seg2, vp9_refpic_seg2, vp9_skip_seg2 },
416                 { vp9_quant_seg3, vp9_filt_level_seg3, vp9_refpic_seg3, vp9_skip_seg3 },
417                 { vp9_quant_seg4, vp9_filt_level_seg4, vp9_refpic_seg4, vp9_skip_seg4 },
418                 { vp9_quant_seg5, vp9_filt_level_seg5, vp9_refpic_seg5, vp9_skip_seg5 },
419                 { vp9_quant_seg6, vp9_filt_level_seg6, vp9_refpic_seg6, vp9_skip_seg6 },
420                 { vp9_quant_seg7, vp9_filt_level_seg7, vp9_refpic_seg7, vp9_skip_seg7 },
421         };
422
423         segment_enabled = !!(dec_params->seg.flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED);
424         hantro_reg_write(ctx->dev, &vp9_segment_e, segment_enabled);
425         hantro_reg_write(ctx->dev, &vp9_segment_upd_e,
426                          !!(dec_params->seg.flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP));
427         hantro_reg_write(ctx->dev, &vp9_segment_temp_upd_e,
428                          !!(dec_params->seg.flags & V4L2_VP9_SEGMENTATION_FLAG_TEMPORAL_UPDATE));
429
430         seg = &dec_params->seg;
431         absolute = !!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE);
432         update_data = !!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_DATA);
433
434         for (segid = 0; segid < 8; ++segid) {
435                 /* Quantizer segment feature */
436                 feat_id = V4L2_VP9_SEG_LVL_ALT_Q;
437                 feat_val = dec_params->quant.base_q_idx;
438                 if (segment_enabled) {
439                         if (update_data)
440                                 update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
441                         if (v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled, feat_id, segid))
442                                 feat_val = feat_val_clip3(feat_val,
443                                                           vp9_ctx->feature_data[segid][feat_id],
444                                                           absolute, 255);
445                 }
446                 hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
447
448                 /* Loop filter segment feature */
449                 feat_id = V4L2_VP9_SEG_LVL_ALT_L;
450                 feat_val = dec_params->lf.level;
451                 if (segment_enabled) {
452                         if (update_data)
453                                 update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
454                         if (v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled, feat_id, segid))
455                                 feat_val = feat_val_clip3(feat_val,
456                                                           vp9_ctx->feature_data[segid][feat_id],
457                                                           absolute, 63);
458                 }
459                 hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
460
461                 /* Reference frame segment feature */
462                 feat_id = V4L2_VP9_SEG_LVL_REF_FRAME;
463                 feat_val = 0;
464                 if (segment_enabled) {
465                         if (update_data)
466                                 update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
467                         if (!(dec_params->flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME) &&
468                             v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled, feat_id, segid))
469                                 feat_val = vp9_ctx->feature_data[segid][feat_id] + 1;
470                 }
471                 hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
472
473                 /* Skip segment feature */
474                 feat_id = V4L2_VP9_SEG_LVL_SKIP;
475                 feat_val = 0;
476                 if (segment_enabled) {
477                         if (update_data)
478                                 update_feat_and_flag(vp9_ctx, seg, feat_id, segid);
479                         feat_val = v4l2_vp9_seg_feat_enabled(vp9_ctx->feature_enabled,
480                                                              feat_id, segid) ? 1 : 0;
481                 }
482                 hantro_reg_write(ctx->dev, &seg_regs[segid][feat_id], feat_val);
483         }
484 }
485
486 static void config_loop_filter(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
487 {
488         bool d = dec_params->lf.flags & V4L2_VP9_LOOP_FILTER_FLAG_DELTA_ENABLED;
489
490         hantro_reg_write(ctx->dev, &vp9_filt_level, dec_params->lf.level);
491         hantro_reg_write(ctx->dev, &g2_out_filtering_dis, dec_params->lf.level == 0);
492         hantro_reg_write(ctx->dev, &vp9_filt_sharpness, dec_params->lf.sharpness);
493
494         hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_0, d ? dec_params->lf.ref_deltas[0] : 0);
495         hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_1, d ? dec_params->lf.ref_deltas[1] : 0);
496         hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_2, d ? dec_params->lf.ref_deltas[2] : 0);
497         hantro_reg_write(ctx->dev, &vp9_filt_ref_adj_3, d ? dec_params->lf.ref_deltas[3] : 0);
498         hantro_reg_write(ctx->dev, &vp9_filt_mb_adj_0, d ? dec_params->lf.mode_deltas[0] : 0);
499         hantro_reg_write(ctx->dev, &vp9_filt_mb_adj_1, d ? dec_params->lf.mode_deltas[1] : 0);
500 }
501
502 static void config_picture_dimensions(struct hantro_ctx *ctx, struct hantro_decoded_buffer *dst)
503 {
504         u32 pic_w_4x4, pic_h_4x4;
505
506         hantro_reg_write(ctx->dev, &g2_pic_width_in_cbs, (dst->vp9.width + 7) / 8);
507         hantro_reg_write(ctx->dev, &g2_pic_height_in_cbs, (dst->vp9.height + 7) / 8);
508         pic_w_4x4 = roundup(dst->vp9.width, 8) >> 2;
509         pic_h_4x4 = roundup(dst->vp9.height, 8) >> 2;
510         hantro_reg_write(ctx->dev, &g2_pic_width_4x4, pic_w_4x4);
511         hantro_reg_write(ctx->dev, &g2_pic_height_4x4, pic_h_4x4);
512 }
513
514 static void
515 config_bit_depth(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
516 {
517         if (ctx->dev->variant->legacy_regs) {
518                 u8 pp_shift = 0;
519
520                 hantro_reg_write(ctx->dev, &g2_bit_depth_y, dec_params->bit_depth);
521                 hantro_reg_write(ctx->dev, &g2_bit_depth_c, dec_params->bit_depth);
522                 hantro_reg_write(ctx->dev, &g2_rs_out_bit_depth, dec_params->bit_depth);
523
524                 if (dec_params->bit_depth > 8)
525                         pp_shift = 16 - dec_params->bit_depth;
526
527                 hantro_reg_write(ctx->dev, &g2_pp_pix_shift, pp_shift);
528                 hantro_reg_write(ctx->dev, &g2_pix_shift, 0);
529         } else {
530                 hantro_reg_write(ctx->dev, &g2_bit_depth_y_minus8, dec_params->bit_depth - 8);
531                 hantro_reg_write(ctx->dev, &g2_bit_depth_c_minus8, dec_params->bit_depth - 8);
532         }
533 }
534
535 static inline bool is_lossless(const struct v4l2_vp9_quantization *quant)
536 {
537         return quant->base_q_idx == 0 && quant->delta_q_uv_ac == 0 &&
538                quant->delta_q_uv_dc == 0 && quant->delta_q_y_dc == 0;
539 }
540
541 static void
542 config_quant(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
543 {
544         hantro_reg_write(ctx->dev, &vp9_qp_delta_y_dc, dec_params->quant.delta_q_y_dc);
545         hantro_reg_write(ctx->dev, &vp9_qp_delta_ch_dc, dec_params->quant.delta_q_uv_dc);
546         hantro_reg_write(ctx->dev, &vp9_qp_delta_ch_ac, dec_params->quant.delta_q_uv_ac);
547         hantro_reg_write(ctx->dev, &vp9_lossless_e, is_lossless(&dec_params->quant));
548 }
549
550 static u32
551 hantro_interp_filter_from_v4l2(unsigned int interpolation_filter)
552 {
553         switch (interpolation_filter) {
554         case V4L2_VP9_INTERP_FILTER_EIGHTTAP:
555                 return 0x1;
556         case V4L2_VP9_INTERP_FILTER_EIGHTTAP_SMOOTH:
557                 return 0;
558         case V4L2_VP9_INTERP_FILTER_EIGHTTAP_SHARP:
559                 return 0x2;
560         case V4L2_VP9_INTERP_FILTER_BILINEAR:
561                 return 0x3;
562         case V4L2_VP9_INTERP_FILTER_SWITCHABLE:
563                 return 0x4;
564         }
565
566         return 0;
567 }
568
569 static void
570 config_others(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params,
571               bool intra_only, bool resolution_change)
572 {
573         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
574
575         hantro_reg_write(ctx->dev, &g2_idr_pic_e, intra_only);
576
577         hantro_reg_write(ctx->dev, &vp9_transform_mode, vp9_ctx->cur.tx_mode);
578
579         hantro_reg_write(ctx->dev, &vp9_mcomp_filt_type, intra_only ?
580                 0 : hantro_interp_filter_from_v4l2(dec_params->interpolation_filter));
581
582         hantro_reg_write(ctx->dev, &vp9_high_prec_mv_e,
583                          !!(dec_params->flags & V4L2_VP9_FRAME_FLAG_ALLOW_HIGH_PREC_MV));
584
585         hantro_reg_write(ctx->dev, &vp9_comp_pred_mode, dec_params->reference_mode);
586
587         hantro_reg_write(ctx->dev, &g2_tempor_mvp_e,
588                          !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
589                          !(dec_params->flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME) &&
590                          !(vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME) &&
591                          !(dec_params->flags & V4L2_VP9_FRAME_FLAG_INTRA_ONLY) &&
592                          !resolution_change &&
593                          vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_SHOW_FRAME
594         );
595
596         hantro_reg_write(ctx->dev, &g2_write_mvs_e,
597                          !(dec_params->flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME));
598 }
599
600 static void
601 config_compound_reference(struct hantro_ctx *ctx,
602                           const struct v4l2_ctrl_vp9_frame *dec_params)
603 {
604         u32 comp_fixed_ref, comp_var_ref[2];
605         bool last_ref_frame_sign_bias;
606         bool golden_ref_frame_sign_bias;
607         bool alt_ref_frame_sign_bias;
608         bool comp_ref_allowed = 0;
609
610         comp_fixed_ref = 0;
611         comp_var_ref[0] = 0;
612         comp_var_ref[1] = 0;
613
614         last_ref_frame_sign_bias = dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_LAST;
615         golden_ref_frame_sign_bias = dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_GOLDEN;
616         alt_ref_frame_sign_bias = dec_params->ref_frame_sign_bias & V4L2_VP9_SIGN_BIAS_ALT;
617
618         /* 6.3.12 Frame reference mode syntax */
619         comp_ref_allowed |= golden_ref_frame_sign_bias != last_ref_frame_sign_bias;
620         comp_ref_allowed |= alt_ref_frame_sign_bias != last_ref_frame_sign_bias;
621
622         if (comp_ref_allowed) {
623                 if (last_ref_frame_sign_bias ==
624                     golden_ref_frame_sign_bias) {
625                         comp_fixed_ref = ALTREF_FRAME;
626                         comp_var_ref[0] = LAST_FRAME;
627                         comp_var_ref[1] = GOLDEN_FRAME;
628                 } else if (last_ref_frame_sign_bias ==
629                            alt_ref_frame_sign_bias) {
630                         comp_fixed_ref = GOLDEN_FRAME;
631                         comp_var_ref[0] = LAST_FRAME;
632                         comp_var_ref[1] = ALTREF_FRAME;
633                 } else {
634                         comp_fixed_ref = LAST_FRAME;
635                         comp_var_ref[0] = GOLDEN_FRAME;
636                         comp_var_ref[1] = ALTREF_FRAME;
637                 }
638         }
639
640         hantro_reg_write(ctx->dev, &vp9_comp_pred_fixed_ref, comp_fixed_ref);
641         hantro_reg_write(ctx->dev, &vp9_comp_pred_var_ref0, comp_var_ref[0]);
642         hantro_reg_write(ctx->dev, &vp9_comp_pred_var_ref1, comp_var_ref[1]);
643 }
644
645 #define INNER_LOOP \
646 do {                                                                    \
647         for (m = 0; m < ARRAY_SIZE(adaptive->coef[0][0][0][0]); ++m) {  \
648                 memcpy(adaptive->coef[i][j][k][l][m],                   \
649                        probs->coef[i][j][k][l][m],                      \
650                        sizeof(probs->coef[i][j][k][l][m]));             \
651                                                                         \
652                 adaptive->coef[i][j][k][l][m][3] = 0;                   \
653         }                                                               \
654 } while (0)
655
656 static void config_probs(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params)
657 {
658         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
659         struct hantro_aux_buf *misc = &vp9_ctx->misc;
660         struct hantro_g2_all_probs *all_probs = misc->cpu;
661         struct hantro_g2_probs *adaptive;
662         struct hantro_g2_mv_probs *mv;
663         const struct v4l2_vp9_segmentation *seg = &dec_params->seg;
664         const struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
665         int i, j, k, l, m;
666
667         for (i = 0; i < ARRAY_SIZE(all_probs->kf_y_mode_prob); ++i)
668                 for (j = 0; j < ARRAY_SIZE(all_probs->kf_y_mode_prob[0]); ++j) {
669                         memcpy(all_probs->kf_y_mode_prob[i][j],
670                                v4l2_vp9_kf_y_mode_prob[i][j],
671                                ARRAY_SIZE(all_probs->kf_y_mode_prob[i][j]));
672
673                         all_probs->kf_y_mode_prob_tail[i][j][0] =
674                                 v4l2_vp9_kf_y_mode_prob[i][j][8];
675                 }
676
677         memcpy(all_probs->mb_segment_tree_probs, seg->tree_probs,
678                sizeof(all_probs->mb_segment_tree_probs));
679
680         memcpy(all_probs->segment_pred_probs, seg->pred_probs,
681                sizeof(all_probs->segment_pred_probs));
682
683         for (i = 0; i < ARRAY_SIZE(all_probs->kf_uv_mode_prob); ++i) {
684                 memcpy(all_probs->kf_uv_mode_prob[i], v4l2_vp9_kf_uv_mode_prob[i],
685                        ARRAY_SIZE(all_probs->kf_uv_mode_prob[i]));
686
687                 all_probs->kf_uv_mode_prob_tail[i][0] = v4l2_vp9_kf_uv_mode_prob[i][8];
688         }
689
690         adaptive = &all_probs->probs;
691
692         for (i = 0; i < ARRAY_SIZE(adaptive->inter_mode); ++i) {
693                 memcpy(adaptive->inter_mode[i], probs->inter_mode[i],
694                        ARRAY_SIZE(probs->inter_mode[i]));
695
696                 adaptive->inter_mode[i][3] = 0;
697         }
698
699         memcpy(adaptive->is_inter, probs->is_inter, sizeof(adaptive->is_inter));
700
701         for (i = 0; i < ARRAY_SIZE(adaptive->uv_mode); ++i) {
702                 memcpy(adaptive->uv_mode[i], probs->uv_mode[i],
703                        sizeof(adaptive->uv_mode[i]));
704                 adaptive->uv_mode_tail[i][0] = probs->uv_mode[i][8];
705         }
706
707         memcpy(adaptive->tx8, probs->tx8, sizeof(adaptive->tx8));
708         memcpy(adaptive->tx16, probs->tx16, sizeof(adaptive->tx16));
709         memcpy(adaptive->tx32, probs->tx32, sizeof(adaptive->tx32));
710
711         for (i = 0; i < ARRAY_SIZE(adaptive->y_mode); ++i) {
712                 memcpy(adaptive->y_mode[i], probs->y_mode[i],
713                        ARRAY_SIZE(adaptive->y_mode[i]));
714
715                 adaptive->y_mode_tail[i][0] = probs->y_mode[i][8];
716         }
717
718         for (i = 0; i < ARRAY_SIZE(adaptive->partition[0]); ++i) {
719                 memcpy(adaptive->partition[0][i], v4l2_vp9_kf_partition_probs[i],
720                        sizeof(v4l2_vp9_kf_partition_probs[i]));
721
722                 adaptive->partition[0][i][3] = 0;
723         }
724
725         for (i = 0; i < ARRAY_SIZE(adaptive->partition[1]); ++i) {
726                 memcpy(adaptive->partition[1][i], probs->partition[i],
727                        sizeof(probs->partition[i]));
728
729                 adaptive->partition[1][i][3] = 0;
730         }
731
732         memcpy(adaptive->interp_filter, probs->interp_filter,
733                sizeof(adaptive->interp_filter));
734
735         memcpy(adaptive->comp_mode, probs->comp_mode, sizeof(adaptive->comp_mode));
736
737         memcpy(adaptive->skip, probs->skip, sizeof(adaptive->skip));
738
739         mv = &adaptive->mv;
740
741         memcpy(mv->joint, probs->mv.joint, sizeof(mv->joint));
742         memcpy(mv->sign, probs->mv.sign, sizeof(mv->sign));
743         memcpy(mv->class0_bit, probs->mv.class0_bit, sizeof(mv->class0_bit));
744         memcpy(mv->fr, probs->mv.fr, sizeof(mv->fr));
745         memcpy(mv->class0_hp, probs->mv.class0_hp, sizeof(mv->class0_hp));
746         memcpy(mv->hp, probs->mv.hp, sizeof(mv->hp));
747         memcpy(mv->classes, probs->mv.classes, sizeof(mv->classes));
748         memcpy(mv->class0_fr, probs->mv.class0_fr, sizeof(mv->class0_fr));
749         memcpy(mv->bits, probs->mv.bits, sizeof(mv->bits));
750
751         memcpy(adaptive->single_ref, probs->single_ref, sizeof(adaptive->single_ref));
752
753         memcpy(adaptive->comp_ref, probs->comp_ref, sizeof(adaptive->comp_ref));
754
755         for (i = 0; i < ARRAY_SIZE(adaptive->coef); ++i)
756                 for (j = 0; j < ARRAY_SIZE(adaptive->coef[0]); ++j)
757                         for (k = 0; k < ARRAY_SIZE(adaptive->coef[0][0]); ++k)
758                                 for (l = 0; l < ARRAY_SIZE(adaptive->coef[0][0][0]); ++l)
759                                         INNER_LOOP;
760
761         hantro_write_addr(ctx->dev, G2_VP9_PROBS_ADDR, misc->dma);
762 }
763
764 static void config_counts(struct hantro_ctx *ctx)
765 {
766         struct hantro_vp9_dec_hw_ctx *vp9_dec = &ctx->vp9_dec;
767         struct hantro_aux_buf *misc = &vp9_dec->misc;
768         dma_addr_t addr = misc->dma + vp9_dec->ctx_counters_offset;
769
770         hantro_write_addr(ctx->dev, G2_VP9_CTX_COUNT_ADDR, addr);
771 }
772
773 static void config_seg_map(struct hantro_ctx *ctx,
774                            const struct v4l2_ctrl_vp9_frame *dec_params,
775                            bool intra_only, bool update_map)
776 {
777         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
778         struct hantro_aux_buf *segment_map = &vp9_ctx->segment_map;
779         dma_addr_t addr;
780
781         if (intra_only ||
782             (dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT)) {
783                 memset(segment_map->cpu, 0, segment_map->size);
784                 memset(vp9_ctx->feature_data, 0, sizeof(vp9_ctx->feature_data));
785                 memset(vp9_ctx->feature_enabled, 0, sizeof(vp9_ctx->feature_enabled));
786         }
787
788         addr = segment_map->dma + vp9_ctx->active_segment * vp9_ctx->segment_map_size;
789         hantro_write_addr(ctx->dev, G2_VP9_SEGMENT_READ_ADDR, addr);
790
791         addr = segment_map->dma + (1 - vp9_ctx->active_segment) * vp9_ctx->segment_map_size;
792         hantro_write_addr(ctx->dev, G2_VP9_SEGMENT_WRITE_ADDR, addr);
793
794         if (update_map)
795                 vp9_ctx->active_segment = 1 - vp9_ctx->active_segment;
796 }
797
798 static void
799 config_source(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params,
800               struct vb2_v4l2_buffer *vb2_src)
801 {
802         dma_addr_t stream_base, tmp_addr;
803         unsigned int headres_size;
804         u32 src_len, start_bit, src_buf_len;
805
806         headres_size = dec_params->uncompressed_header_size
807                      + dec_params->compressed_header_size;
808
809         stream_base = vb2_dma_contig_plane_dma_addr(&vb2_src->vb2_buf, 0);
810
811         tmp_addr = stream_base + headres_size;
812         if (ctx->dev->variant->legacy_regs)
813                 hantro_write_addr(ctx->dev, G2_STREAM_ADDR, (tmp_addr & ~0xf));
814         else
815                 hantro_write_addr(ctx->dev, G2_STREAM_ADDR, stream_base);
816
817         start_bit = (tmp_addr & 0xf) * 8;
818         hantro_reg_write(ctx->dev, &g2_start_bit, start_bit);
819
820         src_len = vb2_get_plane_payload(&vb2_src->vb2_buf, 0);
821         src_len += start_bit / 8 - headres_size;
822         hantro_reg_write(ctx->dev, &g2_stream_len, src_len);
823
824         if (!ctx->dev->variant->legacy_regs) {
825                 tmp_addr &= ~0xf;
826                 hantro_reg_write(ctx->dev, &g2_strm_start_offset, tmp_addr - stream_base);
827                 src_buf_len = vb2_plane_size(&vb2_src->vb2_buf, 0);
828                 hantro_reg_write(ctx->dev, &g2_strm_buffer_len, src_buf_len);
829         }
830 }
831
832 static void
833 config_registers(struct hantro_ctx *ctx, const struct v4l2_ctrl_vp9_frame *dec_params,
834                  struct vb2_v4l2_buffer *vb2_src, struct vb2_v4l2_buffer *vb2_dst)
835 {
836         struct hantro_decoded_buffer *dst, *last, *mv_ref;
837         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
838         const struct v4l2_vp9_segmentation *seg;
839         bool intra_only, resolution_change;
840
841         /* vp9 stuff */
842         dst = vb2_to_hantro_decoded_buf(&vb2_dst->vb2_buf);
843
844         if (vp9_ctx->last.valid)
845                 last = get_ref_buf(ctx, &dst->base.vb, vp9_ctx->last.timestamp);
846         else
847                 last = dst;
848
849         update_dec_buf_info(dst, dec_params);
850         update_ctx_cur_info(vp9_ctx, dst, dec_params);
851         seg = &dec_params->seg;
852
853         intra_only = !!(dec_params->flags &
854                         (V4L2_VP9_FRAME_FLAG_KEY_FRAME |
855                         V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
856
857         if (!intra_only &&
858             !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
859             vp9_ctx->last.valid)
860                 mv_ref = last;
861         else
862                 mv_ref = dst;
863
864         resolution_change = dst->vp9.width != last->vp9.width ||
865                             dst->vp9.height != last->vp9.height;
866
867         /* configure basic registers */
868         hantro_reg_write(ctx->dev, &g2_mode, VP9_DEC_MODE);
869         if (!ctx->dev->variant->legacy_regs) {
870                 hantro_reg_write(ctx->dev, &g2_strm_swap, 0xf);
871                 hantro_reg_write(ctx->dev, &g2_dirmv_swap, 0xf);
872                 hantro_reg_write(ctx->dev, &g2_compress_swap, 0xf);
873                 hantro_reg_write(ctx->dev, &g2_ref_compress_bypass, 1);
874         } else {
875                 hantro_reg_write(ctx->dev, &g2_strm_swap_old, 0x1f);
876                 hantro_reg_write(ctx->dev, &g2_pic_swap, 0x10);
877                 hantro_reg_write(ctx->dev, &g2_dirmv_swap_old, 0x10);
878                 hantro_reg_write(ctx->dev, &g2_tab0_swap_old, 0x10);
879                 hantro_reg_write(ctx->dev, &g2_tab1_swap_old, 0x10);
880                 hantro_reg_write(ctx->dev, &g2_tab2_swap_old, 0x10);
881                 hantro_reg_write(ctx->dev, &g2_tab3_swap_old, 0x10);
882                 hantro_reg_write(ctx->dev, &g2_rscan_swap, 0x10);
883         }
884         hantro_reg_write(ctx->dev, &g2_buswidth, BUS_WIDTH_128);
885         hantro_reg_write(ctx->dev, &g2_max_burst, 16);
886         hantro_reg_write(ctx->dev, &g2_apf_threshold, 8);
887         hantro_reg_write(ctx->dev, &g2_clk_gate_e, 1);
888         hantro_reg_write(ctx->dev, &g2_max_cb_size, 6);
889         hantro_reg_write(ctx->dev, &g2_min_cb_size, 3);
890         if (ctx->dev->variant->double_buffer)
891                 hantro_reg_write(ctx->dev, &g2_double_buffer_e, 1);
892
893         config_output(ctx, dst, dec_params);
894
895         if (!intra_only)
896                 config_ref_registers(ctx, dec_params, dst, mv_ref);
897
898         config_tiles(ctx, dec_params, dst);
899         config_segment(ctx, dec_params);
900         config_loop_filter(ctx, dec_params);
901         config_picture_dimensions(ctx, dst);
902         config_bit_depth(ctx, dec_params);
903         config_quant(ctx, dec_params);
904         config_others(ctx, dec_params, intra_only, resolution_change);
905         config_compound_reference(ctx, dec_params);
906         config_probs(ctx, dec_params);
907         config_counts(ctx);
908         config_seg_map(ctx, dec_params, intra_only,
909                        seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP);
910         config_source(ctx, dec_params, vb2_src);
911 }
912
913 int hantro_g2_vp9_dec_run(struct hantro_ctx *ctx)
914 {
915         const struct v4l2_ctrl_vp9_frame *decode_params;
916         struct vb2_v4l2_buffer *src;
917         struct vb2_v4l2_buffer *dst;
918         int ret;
919
920         hantro_g2_check_idle(ctx->dev);
921
922         ret = start_prepare_run(ctx, &decode_params);
923         if (ret) {
924                 hantro_end_prepare_run(ctx);
925                 return ret;
926         }
927
928         src = hantro_get_src_buf(ctx);
929         dst = hantro_get_dst_buf(ctx);
930
931         config_registers(ctx, decode_params, src, dst);
932
933         hantro_end_prepare_run(ctx);
934
935         vdpu_write(ctx->dev, G2_REG_INTERRUPT_DEC_E, G2_REG_INTERRUPT);
936
937         return 0;
938 }
939
940 #define copy_tx_and_skip(p1, p2)                                \
941 do {                                                            \
942         memcpy((p1)->tx8, (p2)->tx8, sizeof((p1)->tx8));        \
943         memcpy((p1)->tx16, (p2)->tx16, sizeof((p1)->tx16));     \
944         memcpy((p1)->tx32, (p2)->tx32, sizeof((p1)->tx32));     \
945         memcpy((p1)->skip, (p2)->skip, sizeof((p1)->skip));     \
946 } while (0)
947
948 void hantro_g2_vp9_dec_done(struct hantro_ctx *ctx)
949 {
950         struct hantro_vp9_dec_hw_ctx *vp9_ctx = &ctx->vp9_dec;
951         unsigned int fctx_idx;
952
953         if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX))
954                 goto out_update_last;
955
956         fctx_idx = vp9_ctx->cur.frame_context_idx;
957
958         if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE)) {
959                 /* error_resilient_mode == 0 && frame_parallel_decoding_mode == 0 */
960                 struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
961                 bool frame_is_intra = vp9_ctx->cur.flags &
962                     (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY);
963                 struct tx_and_skip {
964                         u8 tx8[2][1];
965                         u8 tx16[2][2];
966                         u8 tx32[2][3];
967                         u8 skip[3];
968                 } _tx_skip, *tx_skip = &_tx_skip;
969                 struct v4l2_vp9_frame_symbol_counts *counts;
970                 struct symbol_counts *hantro_cnts;
971                 u32 tx16p[2][4];
972                 int i;
973
974                 /* buffer the forward-updated TX and skip probs */
975                 if (frame_is_intra)
976                         copy_tx_and_skip(tx_skip, probs);
977
978                 /* 6.1.2 refresh_probs(): load_probs() and load_probs2() */
979                 *probs = vp9_ctx->frame_context[fctx_idx];
980
981                 /* if FrameIsIntra then undo the effect of load_probs2() */
982                 if (frame_is_intra)
983                         copy_tx_and_skip(probs, tx_skip);
984
985                 counts = &vp9_ctx->cnts;
986                 hantro_cnts = vp9_ctx->misc.cpu + vp9_ctx->ctx_counters_offset;
987                 for (i = 0; i < ARRAY_SIZE(tx16p); ++i) {
988                         memcpy(tx16p[i],
989                                hantro_cnts->tx16x16_count[i],
990                                sizeof(hantro_cnts->tx16x16_count[0]));
991                         tx16p[i][3] = 0;
992                 }
993                 counts->tx16p = &tx16p;
994
995                 v4l2_vp9_adapt_coef_probs(probs, counts,
996                                           !vp9_ctx->last.valid ||
997                                           vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME,
998                                           frame_is_intra);
999
1000                 if (!frame_is_intra) {
1001                         /* load_probs2() already done */
1002                         u32 mv_mode[7][4];
1003
1004                         for (i = 0; i < ARRAY_SIZE(mv_mode); ++i) {
1005                                 mv_mode[i][0] = hantro_cnts->inter_mode_counts[i][1][0];
1006                                 mv_mode[i][1] = hantro_cnts->inter_mode_counts[i][2][0];
1007                                 mv_mode[i][2] = hantro_cnts->inter_mode_counts[i][0][0];
1008                                 mv_mode[i][3] = hantro_cnts->inter_mode_counts[i][2][1];
1009                         }
1010                         counts->mv_mode = &mv_mode;
1011                         v4l2_vp9_adapt_noncoef_probs(&vp9_ctx->probability_tables, counts,
1012                                                      vp9_ctx->cur.reference_mode,
1013                                                      vp9_ctx->cur.interpolation_filter,
1014                                                      vp9_ctx->cur.tx_mode, vp9_ctx->cur.flags);
1015                 }
1016         }
1017
1018         vp9_ctx->frame_context[fctx_idx] = vp9_ctx->probability_tables;
1019
1020 out_update_last:
1021         vp9_ctx->last = vp9_ctx->cur;
1022 }