2 * Copyright 2012-15 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include "dm_services.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
32 #include "timing_generator.h"
33 #include "transform.h"
35 #include "core_types.h"
36 #include "set_mode_types.h"
37 #include "virtual/virtual_stream_encoder.h"
38 #include "dpcd_defs.h"
40 #include "dce80/dce80_resource.h"
41 #include "dce100/dce100_resource.h"
42 #include "dce110/dce110_resource.h"
43 #include "dce112/dce112_resource.h"
44 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
45 #include "dcn10/dcn10_resource.h"
47 #include "dce120/dce120_resource.h"
49 #define DC_LOGGER_INIT(logger)
51 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
53 enum dce_version dc_version = DCE_VERSION_UNKNOWN;
54 switch (asic_id.chip_family) {
57 dc_version = DCE_VERSION_8_0;
60 if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
61 ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
62 ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
63 dc_version = DCE_VERSION_8_3;
65 dc_version = DCE_VERSION_8_1;
68 dc_version = DCE_VERSION_11_0;
72 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
73 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
74 dc_version = DCE_VERSION_10_0;
77 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
78 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
79 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
80 dc_version = DCE_VERSION_11_2;
82 if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev))
83 dc_version = DCE_VERSION_11_22;
86 dc_version = DCE_VERSION_12_0;
88 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
90 dc_version = DCN_VERSION_1_0;
94 dc_version = DCE_VERSION_UNKNOWN;
100 struct resource_pool *dc_create_resource_pool(
102 int num_virtual_links,
103 enum dce_version dc_version,
104 struct hw_asic_id asic_id)
106 struct resource_pool *res_pool = NULL;
108 switch (dc_version) {
109 case DCE_VERSION_8_0:
110 res_pool = dce80_create_resource_pool(
111 num_virtual_links, dc);
113 case DCE_VERSION_8_1:
114 res_pool = dce81_create_resource_pool(
115 num_virtual_links, dc);
117 case DCE_VERSION_8_3:
118 res_pool = dce83_create_resource_pool(
119 num_virtual_links, dc);
121 case DCE_VERSION_10_0:
122 res_pool = dce100_create_resource_pool(
123 num_virtual_links, dc);
125 case DCE_VERSION_11_0:
126 res_pool = dce110_create_resource_pool(
127 num_virtual_links, dc, asic_id);
129 case DCE_VERSION_11_2:
130 case DCE_VERSION_11_22:
131 res_pool = dce112_create_resource_pool(
132 num_virtual_links, dc);
134 case DCE_VERSION_12_0:
135 res_pool = dce120_create_resource_pool(
136 num_virtual_links, dc);
139 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
140 case DCN_VERSION_1_0:
141 res_pool = dcn10_create_resource_pool(
142 num_virtual_links, dc);
150 if (res_pool != NULL) {
151 struct dc_firmware_info fw_info = { { 0 } };
153 if (dc->ctx->dc_bios->funcs->get_firmware_info(
154 dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
155 res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
157 ASSERT_CRITICAL(false);
163 void dc_destroy_resource_pool(struct dc *dc)
167 dc->res_pool->funcs->destroy(&dc->res_pool);
173 static void update_num_audio(
174 const struct resource_straps *straps,
175 unsigned int *num_audio,
176 struct audio_support *aud_support)
178 aud_support->dp_audio = true;
179 aud_support->hdmi_audio_native = false;
180 aud_support->hdmi_audio_on_dongle = false;
182 if (straps->hdmi_disable == 0) {
183 if (straps->dc_pinstraps_audio & 0x2) {
184 aud_support->hdmi_audio_on_dongle = true;
185 aud_support->hdmi_audio_native = true;
189 switch (straps->audio_stream_number) {
190 case 0: /* multi streams supported */
192 case 1: /* multi streams not supported */
196 DC_ERR("DC: unexpected audio fuse!\n");
200 bool resource_construct(
201 unsigned int num_virtual_links,
203 struct resource_pool *pool,
204 const struct resource_create_funcs *create_funcs)
206 struct dc_context *ctx = dc->ctx;
207 const struct resource_caps *caps = pool->res_cap;
209 unsigned int num_audio = caps->num_audio;
210 struct resource_straps straps = {0};
212 if (create_funcs->read_dce_straps)
213 create_funcs->read_dce_straps(dc->ctx, &straps);
215 pool->audio_count = 0;
216 if (create_funcs->create_audio) {
217 /* find the total number of streams available via the
218 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
219 * registers (one for each pin) starting from pin 1
220 * up to the max number of audio pins.
221 * We stop on the first pin where
222 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
224 update_num_audio(&straps, &num_audio, &pool->audio_support);
225 for (i = 0; i < caps->num_audio; i++) {
226 struct audio *aud = create_funcs->create_audio(ctx, i);
229 DC_ERR("DC: failed to create audio!\n");
232 if (!aud->funcs->endpoint_valid(aud)) {
233 aud->funcs->destroy(&aud);
236 pool->audios[i] = aud;
241 pool->stream_enc_count = 0;
242 if (create_funcs->create_stream_encoder) {
243 for (i = 0; i < caps->num_stream_encoder; i++) {
244 pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
245 if (pool->stream_enc[i] == NULL)
246 DC_ERR("DC: failed to create stream_encoder!\n");
247 pool->stream_enc_count++;
250 dc->caps.dynamic_audio = false;
251 if (pool->audio_count < pool->stream_enc_count) {
252 dc->caps.dynamic_audio = true;
254 for (i = 0; i < num_virtual_links; i++) {
255 pool->stream_enc[pool->stream_enc_count] =
256 virtual_stream_encoder_create(
258 if (pool->stream_enc[pool->stream_enc_count] == NULL) {
259 DC_ERR("DC: failed to create stream_encoder!\n");
262 pool->stream_enc_count++;
265 dc->hwseq = create_funcs->create_hwseq(ctx);
269 static int find_matching_clock_source(
270 const struct resource_pool *pool,
271 struct clock_source *clock_source)
276 for (i = 0; i < pool->clk_src_count; i++) {
277 if (pool->clock_sources[i] == clock_source)
283 void resource_unreference_clock_source(
284 struct resource_context *res_ctx,
285 const struct resource_pool *pool,
286 struct clock_source *clock_source)
288 int i = find_matching_clock_source(pool, clock_source);
291 res_ctx->clock_source_ref_count[i]--;
293 if (pool->dp_clock_source == clock_source)
294 res_ctx->dp_clock_source_ref_count--;
297 void resource_reference_clock_source(
298 struct resource_context *res_ctx,
299 const struct resource_pool *pool,
300 struct clock_source *clock_source)
302 int i = find_matching_clock_source(pool, clock_source);
305 res_ctx->clock_source_ref_count[i]++;
307 if (pool->dp_clock_source == clock_source)
308 res_ctx->dp_clock_source_ref_count++;
311 int resource_get_clock_source_reference(
312 struct resource_context *res_ctx,
313 const struct resource_pool *pool,
314 struct clock_source *clock_source)
316 int i = find_matching_clock_source(pool, clock_source);
319 return res_ctx->clock_source_ref_count[i];
321 if (pool->dp_clock_source == clock_source)
322 return res_ctx->dp_clock_source_ref_count;
327 bool resource_are_streams_timing_synchronizable(
328 struct dc_stream_state *stream1,
329 struct dc_stream_state *stream2)
331 if (stream1->timing.h_total != stream2->timing.h_total)
334 if (stream1->timing.v_total != stream2->timing.v_total)
337 if (stream1->timing.h_addressable
338 != stream2->timing.h_addressable)
341 if (stream1->timing.v_addressable
342 != stream2->timing.v_addressable)
345 if (stream1->timing.pix_clk_khz
346 != stream2->timing.pix_clk_khz)
349 if (stream1->clamping.c_depth != stream2->clamping.c_depth)
352 if (stream1->phy_pix_clk != stream2->phy_pix_clk
353 && (!dc_is_dp_signal(stream1->signal)
354 || !dc_is_dp_signal(stream2->signal)))
359 static bool is_dp_and_hdmi_sharable(
360 struct dc_stream_state *stream1,
361 struct dc_stream_state *stream2)
363 if (stream1->ctx->dc->caps.disable_dp_clk_share)
366 if (stream1->clamping.c_depth != COLOR_DEPTH_888 ||
367 stream2->clamping.c_depth != COLOR_DEPTH_888)
374 static bool is_sharable_clk_src(
375 const struct pipe_ctx *pipe_with_clk_src,
376 const struct pipe_ctx *pipe)
378 if (pipe_with_clk_src->clock_source == NULL)
381 if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
384 if (dc_is_dp_signal(pipe_with_clk_src->stream->signal) ||
385 (dc_is_dp_signal(pipe->stream->signal) &&
386 !is_dp_and_hdmi_sharable(pipe_with_clk_src->stream,
390 if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
391 && dc_is_dual_link_signal(pipe->stream->signal))
394 if (dc_is_hdmi_signal(pipe->stream->signal)
395 && dc_is_dual_link_signal(pipe_with_clk_src->stream->signal))
398 if (!resource_are_streams_timing_synchronizable(
399 pipe_with_clk_src->stream, pipe->stream))
405 struct clock_source *resource_find_used_clk_src_for_sharing(
406 struct resource_context *res_ctx,
407 struct pipe_ctx *pipe_ctx)
411 for (i = 0; i < MAX_PIPES; i++) {
412 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
413 return res_ctx->pipe_ctx[i].clock_source;
419 static enum pixel_format convert_pixel_format_to_dalsurface(
420 enum surface_pixel_format surface_pixel_format)
422 enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
424 switch (surface_pixel_format) {
425 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
426 dal_pixel_format = PIXEL_FORMAT_INDEX8;
428 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
429 dal_pixel_format = PIXEL_FORMAT_RGB565;
431 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
432 dal_pixel_format = PIXEL_FORMAT_RGB565;
434 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
435 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
437 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
438 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
440 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
441 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
443 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
444 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
446 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
447 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
449 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
450 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
451 dal_pixel_format = PIXEL_FORMAT_FP16;
453 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
454 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
455 dal_pixel_format = PIXEL_FORMAT_420BPP8;
457 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
458 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
459 dal_pixel_format = PIXEL_FORMAT_420BPP10;
461 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
463 dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
466 return dal_pixel_format;
469 static void rect_swap_helper(struct rect *rect)
471 swap(rect->height, rect->width);
472 swap(rect->x, rect->y);
475 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
477 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
478 const struct dc_stream_state *stream = pipe_ctx->stream;
479 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
480 struct rect surf_src = plane_state->src_rect;
481 struct rect clip = { 0 };
482 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
483 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
484 bool pri_split = pipe_ctx->bottom_pipe &&
485 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
486 bool sec_split = pipe_ctx->top_pipe &&
487 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
489 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
490 stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
495 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
496 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
497 rect_swap_helper(&surf_src);
499 /* The actual clip is an intersection between stream
500 * source and surface clip
502 clip.x = stream->src.x > plane_state->clip_rect.x ?
503 stream->src.x : plane_state->clip_rect.x;
505 clip.width = stream->src.x + stream->src.width <
506 plane_state->clip_rect.x + plane_state->clip_rect.width ?
507 stream->src.x + stream->src.width - clip.x :
508 plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
510 clip.y = stream->src.y > plane_state->clip_rect.y ?
511 stream->src.y : plane_state->clip_rect.y;
513 clip.height = stream->src.y + stream->src.height <
514 plane_state->clip_rect.y + plane_state->clip_rect.height ?
515 stream->src.y + stream->src.height - clip.y :
516 plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
518 /* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
519 * num_pixels = clip.num_pix * scl_ratio
521 data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
522 surf_src.width / plane_state->dst_rect.width;
523 data->viewport.width = clip.width *
524 surf_src.width / plane_state->dst_rect.width;
526 data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
527 surf_src.height / plane_state->dst_rect.height;
528 data->viewport.height = clip.height *
529 surf_src.height / plane_state->dst_rect.height;
531 /* Round down, compensate in init */
532 data->viewport_c.x = data->viewport.x / vpc_div;
533 data->viewport_c.y = data->viewport.y / vpc_div;
534 data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
535 dc_fixpt_half : dc_fixpt_zero;
536 data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
537 dc_fixpt_half : dc_fixpt_zero;
538 /* Round up, assume original video size always even dimensions */
539 data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
540 data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
544 data->viewport.x += data->viewport.width / 2;
545 data->viewport_c.x += data->viewport_c.width / 2;
546 /* Ceil offset pipe */
547 data->viewport.width = (data->viewport.width + 1) / 2;
548 data->viewport_c.width = (data->viewport_c.width + 1) / 2;
549 } else if (pri_split) {
550 data->viewport.width /= 2;
551 data->viewport_c.width /= 2;
554 if (plane_state->rotation == ROTATION_ANGLE_90 ||
555 plane_state->rotation == ROTATION_ANGLE_270) {
556 rect_swap_helper(&data->viewport_c);
557 rect_swap_helper(&data->viewport);
561 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
563 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
564 const struct dc_stream_state *stream = pipe_ctx->stream;
565 struct rect surf_src = plane_state->src_rect;
566 struct rect surf_clip = plane_state->clip_rect;
567 bool pri_split = pipe_ctx->bottom_pipe &&
568 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
569 bool sec_split = pipe_ctx->top_pipe &&
570 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
571 bool top_bottom_split = stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM;
573 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
574 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
575 rect_swap_helper(&surf_src);
577 pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
578 if (stream->src.x < surf_clip.x)
579 pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
580 - stream->src.x) * stream->dst.width
583 pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
584 stream->dst.width / stream->src.width;
585 if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
586 stream->dst.x + stream->dst.width)
587 pipe_ctx->plane_res.scl_data.recout.width =
588 stream->dst.x + stream->dst.width
589 - pipe_ctx->plane_res.scl_data.recout.x;
591 pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
592 if (stream->src.y < surf_clip.y)
593 pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
594 - stream->src.y) * stream->dst.height
595 / stream->src.height;
597 pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
598 stream->dst.height / stream->src.height;
599 if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
600 stream->dst.y + stream->dst.height)
601 pipe_ctx->plane_res.scl_data.recout.height =
602 stream->dst.y + stream->dst.height
603 - pipe_ctx->plane_res.scl_data.recout.y;
605 /* Handle h & vsplit */
606 if (sec_split && top_bottom_split) {
607 pipe_ctx->plane_res.scl_data.recout.y +=
608 pipe_ctx->plane_res.scl_data.recout.height / 2;
609 /* Floor primary pipe, ceil 2ndary pipe */
610 pipe_ctx->plane_res.scl_data.recout.height =
611 (pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
612 } else if (pri_split && top_bottom_split)
613 pipe_ctx->plane_res.scl_data.recout.height /= 2;
614 else if (pri_split || sec_split) {
615 /* HMirror XOR Secondary_pipe XOR Rotation_180 */
616 bool right_view = (sec_split != plane_state->horizontal_mirror) !=
617 (plane_state->rotation == ROTATION_ANGLE_180);
619 if (plane_state->rotation == ROTATION_ANGLE_90
620 || plane_state->rotation == ROTATION_ANGLE_270)
621 /* Secondary_pipe XOR Rotation_270 */
622 right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
625 pipe_ctx->plane_res.scl_data.recout.x +=
626 pipe_ctx->plane_res.scl_data.recout.width / 2;
627 /* Ceil offset pipe */
628 pipe_ctx->plane_res.scl_data.recout.width =
629 (pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
631 pipe_ctx->plane_res.scl_data.recout.width /= 2;
634 /* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
635 * * 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
638 recout_full->x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
639 * stream->dst.width / stream->src.width -
640 surf_src.x * plane_state->dst_rect.width / surf_src.width
641 * stream->dst.width / stream->src.width;
642 recout_full->y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
643 * stream->dst.height / stream->src.height -
644 surf_src.y * plane_state->dst_rect.height / surf_src.height
645 * stream->dst.height / stream->src.height;
647 recout_full->width = plane_state->dst_rect.width
648 * stream->dst.width / stream->src.width;
649 recout_full->height = plane_state->dst_rect.height
650 * stream->dst.height / stream->src.height;
653 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
655 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
656 const struct dc_stream_state *stream = pipe_ctx->stream;
657 struct rect surf_src = plane_state->src_rect;
658 const int in_w = stream->src.width;
659 const int in_h = stream->src.height;
660 const int out_w = stream->dst.width;
661 const int out_h = stream->dst.height;
663 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
664 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
665 rect_swap_helper(&surf_src);
667 pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_from_fraction(
669 plane_state->dst_rect.width);
670 pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_from_fraction(
672 plane_state->dst_rect.height);
674 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
675 pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
676 else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
677 pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
679 pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
680 pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
681 pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
682 pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
684 pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
685 pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
687 if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
688 || pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
689 pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
690 pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
692 pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_truncate(
693 pipe_ctx->plane_res.scl_data.ratios.horz, 19);
694 pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_truncate(
695 pipe_ctx->plane_res.scl_data.ratios.vert, 19);
696 pipe_ctx->plane_res.scl_data.ratios.horz_c = dc_fixpt_truncate(
697 pipe_ctx->plane_res.scl_data.ratios.horz_c, 19);
698 pipe_ctx->plane_res.scl_data.ratios.vert_c = dc_fixpt_truncate(
699 pipe_ctx->plane_res.scl_data.ratios.vert_c, 19);
702 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
704 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
705 struct rect src = pipe_ctx->plane_state->src_rect;
706 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
707 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
708 bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
711 * Need to calculate the scan direction for viewport to make adjustments
713 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
714 flip_vert_scan_dir = true;
715 flip_horz_scan_dir = true;
716 } else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
717 flip_vert_scan_dir = true;
718 else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
719 flip_horz_scan_dir = true;
721 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
722 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
723 rect_swap_helper(&src);
724 rect_swap_helper(&data->viewport_c);
725 rect_swap_helper(&data->viewport);
726 } else if (pipe_ctx->plane_state->horizontal_mirror)
727 flip_horz_scan_dir = !flip_horz_scan_dir;
730 * Init calculated according to formula:
731 * init = (scaling_ratio + number_of_taps + 1) / 2
732 * init_bot = init + scaling_ratio
733 * init_c = init + truncated_vp_c_offset(from calculate viewport)
735 data->inits.h = dc_fixpt_truncate(dc_fixpt_div_int(
736 dc_fixpt_add_int(data->ratios.horz, data->taps.h_taps + 1), 2), 19);
738 data->inits.h_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.h_c, dc_fixpt_div_int(
739 dc_fixpt_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2)), 19);
741 data->inits.v = dc_fixpt_truncate(dc_fixpt_div_int(
742 dc_fixpt_add_int(data->ratios.vert, data->taps.v_taps + 1), 2), 19);
744 data->inits.v_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.v_c, dc_fixpt_div_int(
745 dc_fixpt_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2)), 19);
747 if (!flip_horz_scan_dir) {
748 /* Adjust for viewport end clip-off */
749 if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
750 int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
751 int int_part = dc_fixpt_floor(
752 dc_fixpt_sub(data->inits.h, data->ratios.horz));
754 int_part = int_part > 0 ? int_part : 0;
755 data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
757 if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
758 int vp_clip = (src.x + src.width) / vpc_div -
759 data->viewport_c.width - data->viewport_c.x;
760 int int_part = dc_fixpt_floor(
761 dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
763 int_part = int_part > 0 ? int_part : 0;
764 data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
767 /* Adjust for non-0 viewport offset */
768 if (data->viewport.x) {
771 data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
772 data->ratios.horz, data->recout.x - recout_full->x));
773 int_part = dc_fixpt_floor(data->inits.h) - data->viewport.x;
774 if (int_part < data->taps.h_taps) {
775 int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
776 (data->taps.h_taps - int_part) : data->viewport.x;
777 data->viewport.x -= int_adj;
778 data->viewport.width += int_adj;
780 } else if (int_part > data->taps.h_taps) {
781 data->viewport.x += int_part - data->taps.h_taps;
782 data->viewport.width -= int_part - data->taps.h_taps;
783 int_part = data->taps.h_taps;
785 data->inits.h.value &= 0xffffffff;
786 data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
789 if (data->viewport_c.x) {
792 data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
793 data->ratios.horz_c, data->recout.x - recout_full->x));
794 int_part = dc_fixpt_floor(data->inits.h_c) - data->viewport_c.x;
795 if (int_part < data->taps.h_taps_c) {
796 int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
797 (data->taps.h_taps_c - int_part) : data->viewport_c.x;
798 data->viewport_c.x -= int_adj;
799 data->viewport_c.width += int_adj;
801 } else if (int_part > data->taps.h_taps_c) {
802 data->viewport_c.x += int_part - data->taps.h_taps_c;
803 data->viewport_c.width -= int_part - data->taps.h_taps_c;
804 int_part = data->taps.h_taps_c;
806 data->inits.h_c.value &= 0xffffffff;
807 data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
810 /* Adjust for non-0 viewport offset */
811 if (data->viewport.x) {
812 int int_part = dc_fixpt_floor(
813 dc_fixpt_sub(data->inits.h, data->ratios.horz));
815 int_part = int_part > 0 ? int_part : 0;
816 data->viewport.width += int_part < data->viewport.x ? int_part : data->viewport.x;
817 data->viewport.x -= int_part < data->viewport.x ? int_part : data->viewport.x;
819 if (data->viewport_c.x) {
820 int int_part = dc_fixpt_floor(
821 dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
823 int_part = int_part > 0 ? int_part : 0;
824 data->viewport_c.width += int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
825 data->viewport_c.x -= int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
828 /* Adjust for viewport end clip-off */
829 if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
831 int end_offset = src.x + src.width
832 - data->viewport.x - data->viewport.width;
835 * this is init if vp had no offset, keep in mind this is from the
836 * right side of vp due to scan direction
838 data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
839 data->ratios.horz, data->recout.x - recout_full->x));
841 * this is the difference between first pixel of viewport available to read
842 * and init position, takning into account scan direction
844 int_part = dc_fixpt_floor(data->inits.h) - end_offset;
845 if (int_part < data->taps.h_taps) {
846 int int_adj = end_offset >= (data->taps.h_taps - int_part) ?
847 (data->taps.h_taps - int_part) : end_offset;
848 data->viewport.width += int_adj;
850 } else if (int_part > data->taps.h_taps) {
851 data->viewport.width += int_part - data->taps.h_taps;
852 int_part = data->taps.h_taps;
854 data->inits.h.value &= 0xffffffff;
855 data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
858 if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
860 int end_offset = (src.x + src.width) / vpc_div
861 - data->viewport_c.x - data->viewport_c.width;
864 * this is init if vp had no offset, keep in mind this is from the
865 * right side of vp due to scan direction
867 data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
868 data->ratios.horz_c, data->recout.x - recout_full->x));
870 * this is the difference between first pixel of viewport available to read
871 * and init position, takning into account scan direction
873 int_part = dc_fixpt_floor(data->inits.h_c) - end_offset;
874 if (int_part < data->taps.h_taps_c) {
875 int int_adj = end_offset >= (data->taps.h_taps_c - int_part) ?
876 (data->taps.h_taps_c - int_part) : end_offset;
877 data->viewport_c.width += int_adj;
879 } else if (int_part > data->taps.h_taps_c) {
880 data->viewport_c.width += int_part - data->taps.h_taps_c;
881 int_part = data->taps.h_taps_c;
883 data->inits.h_c.value &= 0xffffffff;
884 data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
888 if (!flip_vert_scan_dir) {
889 /* Adjust for viewport end clip-off */
890 if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
891 int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
892 int int_part = dc_fixpt_floor(
893 dc_fixpt_sub(data->inits.v, data->ratios.vert));
895 int_part = int_part > 0 ? int_part : 0;
896 data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
898 if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
899 int vp_clip = (src.y + src.height) / vpc_div -
900 data->viewport_c.height - data->viewport_c.y;
901 int int_part = dc_fixpt_floor(
902 dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
904 int_part = int_part > 0 ? int_part : 0;
905 data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
908 /* Adjust for non-0 viewport offset */
909 if (data->viewport.y) {
912 data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
913 data->ratios.vert, data->recout.y - recout_full->y));
914 int_part = dc_fixpt_floor(data->inits.v) - data->viewport.y;
915 if (int_part < data->taps.v_taps) {
916 int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
917 (data->taps.v_taps - int_part) : data->viewport.y;
918 data->viewport.y -= int_adj;
919 data->viewport.height += int_adj;
921 } else if (int_part > data->taps.v_taps) {
922 data->viewport.y += int_part - data->taps.v_taps;
923 data->viewport.height -= int_part - data->taps.v_taps;
924 int_part = data->taps.v_taps;
926 data->inits.v.value &= 0xffffffff;
927 data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
930 if (data->viewport_c.y) {
933 data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
934 data->ratios.vert_c, data->recout.y - recout_full->y));
935 int_part = dc_fixpt_floor(data->inits.v_c) - data->viewport_c.y;
936 if (int_part < data->taps.v_taps_c) {
937 int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
938 (data->taps.v_taps_c - int_part) : data->viewport_c.y;
939 data->viewport_c.y -= int_adj;
940 data->viewport_c.height += int_adj;
942 } else if (int_part > data->taps.v_taps_c) {
943 data->viewport_c.y += int_part - data->taps.v_taps_c;
944 data->viewport_c.height -= int_part - data->taps.v_taps_c;
945 int_part = data->taps.v_taps_c;
947 data->inits.v_c.value &= 0xffffffff;
948 data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
951 /* Adjust for non-0 viewport offset */
952 if (data->viewport.y) {
953 int int_part = dc_fixpt_floor(
954 dc_fixpt_sub(data->inits.v, data->ratios.vert));
956 int_part = int_part > 0 ? int_part : 0;
957 data->viewport.height += int_part < data->viewport.y ? int_part : data->viewport.y;
958 data->viewport.y -= int_part < data->viewport.y ? int_part : data->viewport.y;
960 if (data->viewport_c.y) {
961 int int_part = dc_fixpt_floor(
962 dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
964 int_part = int_part > 0 ? int_part : 0;
965 data->viewport_c.height += int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
966 data->viewport_c.y -= int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
969 /* Adjust for viewport end clip-off */
970 if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
972 int end_offset = src.y + src.height
973 - data->viewport.y - data->viewport.height;
976 * this is init if vp had no offset, keep in mind this is from the
977 * right side of vp due to scan direction
979 data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
980 data->ratios.vert, data->recout.y - recout_full->y));
982 * this is the difference between first pixel of viewport available to read
983 * and init position, taking into account scan direction
985 int_part = dc_fixpt_floor(data->inits.v) - end_offset;
986 if (int_part < data->taps.v_taps) {
987 int int_adj = end_offset >= (data->taps.v_taps - int_part) ?
988 (data->taps.v_taps - int_part) : end_offset;
989 data->viewport.height += int_adj;
991 } else if (int_part > data->taps.v_taps) {
992 data->viewport.height += int_part - data->taps.v_taps;
993 int_part = data->taps.v_taps;
995 data->inits.v.value &= 0xffffffff;
996 data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
999 if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
1001 int end_offset = (src.y + src.height) / vpc_div
1002 - data->viewport_c.y - data->viewport_c.height;
1005 * this is init if vp had no offset, keep in mind this is from the
1006 * right side of vp due to scan direction
1008 data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
1009 data->ratios.vert_c, data->recout.y - recout_full->y));
1011 * this is the difference between first pixel of viewport available to read
1012 * and init position, taking into account scan direction
1014 int_part = dc_fixpt_floor(data->inits.v_c) - end_offset;
1015 if (int_part < data->taps.v_taps_c) {
1016 int int_adj = end_offset >= (data->taps.v_taps_c - int_part) ?
1017 (data->taps.v_taps_c - int_part) : end_offset;
1018 data->viewport_c.height += int_adj;
1019 int_part += int_adj;
1020 } else if (int_part > data->taps.v_taps_c) {
1021 data->viewport_c.height += int_part - data->taps.v_taps_c;
1022 int_part = data->taps.v_taps_c;
1024 data->inits.v_c.value &= 0xffffffff;
1025 data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
1029 /* Interlaced inits based on final vert inits */
1030 data->inits.v_bot = dc_fixpt_add(data->inits.v, data->ratios.vert);
1031 data->inits.v_c_bot = dc_fixpt_add(data->inits.v_c, data->ratios.vert_c);
1033 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
1034 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
1035 rect_swap_helper(&data->viewport_c);
1036 rect_swap_helper(&data->viewport);
1040 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
1042 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1043 struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
1044 struct rect recout_full = { 0 };
1046 DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
1047 /* Important: scaling ratio calculation requires pixel format,
1048 * lb depth calculation requires recout and taps require scaling ratios.
1049 * Inits require viewport, taps, ratios and recout of split pipe
1051 pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
1052 pipe_ctx->plane_state->format);
1054 if (pipe_ctx->stream->timing.flags.INTERLACE)
1055 pipe_ctx->stream->dst.height *= 2;
1057 calculate_scaling_ratios(pipe_ctx);
1059 calculate_viewport(pipe_ctx);
1061 if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
1064 calculate_recout(pipe_ctx, &recout_full);
1067 * Setting line buffer pixel depth to 24bpp yields banding
1068 * on certain displays, such as the Sharp 4k
1070 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
1072 pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
1073 pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
1075 pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
1076 pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
1077 if (pipe_ctx->stream->timing.flags.INTERLACE)
1078 pipe_ctx->plane_res.scl_data.v_active *= 2;
1081 /* Taps calculations */
1082 if (pipe_ctx->plane_res.xfm != NULL)
1083 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1084 pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1086 if (pipe_ctx->plane_res.dpp != NULL)
1087 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1088 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1090 /* Try 24 bpp linebuffer */
1091 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
1093 if (pipe_ctx->plane_res.xfm != NULL)
1094 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1095 pipe_ctx->plane_res.xfm,
1096 &pipe_ctx->plane_res.scl_data,
1097 &plane_state->scaling_quality);
1099 if (pipe_ctx->plane_res.dpp != NULL)
1100 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1101 pipe_ctx->plane_res.dpp,
1102 &pipe_ctx->plane_res.scl_data,
1103 &plane_state->scaling_quality);
1107 /* May need to re-check lb size after this in some obscure scenario */
1108 calculate_inits_and_adj_vp(pipe_ctx, &recout_full);
1111 "%s: Viewport:\nheight:%d width:%d x:%d "
1112 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
1115 pipe_ctx->plane_res.scl_data.viewport.height,
1116 pipe_ctx->plane_res.scl_data.viewport.width,
1117 pipe_ctx->plane_res.scl_data.viewport.x,
1118 pipe_ctx->plane_res.scl_data.viewport.y,
1119 plane_state->dst_rect.height,
1120 plane_state->dst_rect.width,
1121 plane_state->dst_rect.x,
1122 plane_state->dst_rect.y);
1124 if (pipe_ctx->stream->timing.flags.INTERLACE)
1125 pipe_ctx->stream->dst.height /= 2;
1131 enum dc_status resource_build_scaling_params_for_context(
1132 const struct dc *dc,
1133 struct dc_state *context)
1137 for (i = 0; i < MAX_PIPES; i++) {
1138 if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
1139 context->res_ctx.pipe_ctx[i].stream != NULL)
1140 if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
1141 return DC_FAIL_SCALING;
1147 struct pipe_ctx *find_idle_secondary_pipe(
1148 struct resource_context *res_ctx,
1149 const struct resource_pool *pool)
1152 struct pipe_ctx *secondary_pipe = NULL;
1155 * search backwards for the second pipe to keep pipe
1156 * assignment more consistent
1159 for (i = pool->pipe_count - 1; i >= 0; i--) {
1160 if (res_ctx->pipe_ctx[i].stream == NULL) {
1161 secondary_pipe = &res_ctx->pipe_ctx[i];
1162 secondary_pipe->pipe_idx = i;
1168 return secondary_pipe;
1171 struct pipe_ctx *resource_get_head_pipe_for_stream(
1172 struct resource_context *res_ctx,
1173 struct dc_stream_state *stream)
1176 for (i = 0; i < MAX_PIPES; i++) {
1177 if (res_ctx->pipe_ctx[i].stream == stream &&
1178 !res_ctx->pipe_ctx[i].top_pipe) {
1179 return &res_ctx->pipe_ctx[i];
1186 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
1187 struct resource_context *res_ctx,
1188 struct dc_stream_state *stream)
1190 struct pipe_ctx *head_pipe, *tail_pipe;
1191 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1196 tail_pipe = head_pipe->bottom_pipe;
1199 head_pipe = tail_pipe;
1200 tail_pipe = tail_pipe->bottom_pipe;
1207 * A free_pipe for a stream is defined here as a pipe
1208 * that has no surface attached yet
1210 static struct pipe_ctx *acquire_free_pipe_for_stream(
1211 struct dc_state *context,
1212 const struct resource_pool *pool,
1213 struct dc_stream_state *stream)
1216 struct resource_context *res_ctx = &context->res_ctx;
1218 struct pipe_ctx *head_pipe = NULL;
1220 /* Find head pipe, which has the back end set up*/
1222 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1229 if (!head_pipe->plane_state)
1232 /* Re-use pipe already acquired for this stream if available*/
1233 for (i = pool->pipe_count - 1; i >= 0; i--) {
1234 if (res_ctx->pipe_ctx[i].stream == stream &&
1235 !res_ctx->pipe_ctx[i].plane_state) {
1236 return &res_ctx->pipe_ctx[i];
1241 * At this point we have no re-useable pipe for this stream and we need
1242 * to acquire an idle one to satisfy the request
1245 if (!pool->funcs->acquire_idle_pipe_for_layer)
1248 return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1252 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1253 static int acquire_first_split_pipe(
1254 struct resource_context *res_ctx,
1255 const struct resource_pool *pool,
1256 struct dc_stream_state *stream)
1260 for (i = 0; i < pool->pipe_count; i++) {
1261 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1263 if (pipe_ctx->top_pipe &&
1264 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1265 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1266 if (pipe_ctx->bottom_pipe)
1267 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1269 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1270 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1271 pipe_ctx->plane_res.hubp = pool->hubps[i];
1272 pipe_ctx->plane_res.ipp = pool->ipps[i];
1273 pipe_ctx->plane_res.dpp = pool->dpps[i];
1274 pipe_ctx->stream_res.opp = pool->opps[i];
1275 pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1276 pipe_ctx->pipe_idx = i;
1278 pipe_ctx->stream = stream;
1286 bool dc_add_plane_to_context(
1287 const struct dc *dc,
1288 struct dc_stream_state *stream,
1289 struct dc_plane_state *plane_state,
1290 struct dc_state *context)
1293 struct resource_pool *pool = dc->res_pool;
1294 struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1295 struct dc_stream_status *stream_status = NULL;
1297 for (i = 0; i < context->stream_count; i++)
1298 if (context->streams[i] == stream) {
1299 stream_status = &context->stream_status[i];
1302 if (stream_status == NULL) {
1303 dm_error("Existing stream not found; failed to attach surface!\n");
1308 if (stream_status->plane_count == MAX_SURFACE_NUM) {
1309 dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1310 plane_state, MAX_SURFACE_NUM);
1314 head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1317 dm_error("Head pipe not found for stream_state %p !\n", stream);
1321 free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1323 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1325 int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1327 free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1333 /* retain new surfaces */
1334 dc_plane_state_retain(plane_state);
1335 free_pipe->plane_state = plane_state;
1337 if (head_pipe != free_pipe) {
1339 tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1342 free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1343 free_pipe->stream_res.abm = tail_pipe->stream_res.abm;
1344 free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1345 free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1346 free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1347 free_pipe->clock_source = tail_pipe->clock_source;
1348 free_pipe->top_pipe = tail_pipe;
1349 tail_pipe->bottom_pipe = free_pipe;
1352 /* assign new surfaces*/
1353 stream_status->plane_states[stream_status->plane_count] = plane_state;
1355 stream_status->plane_count++;
1360 bool dc_remove_plane_from_context(
1361 const struct dc *dc,
1362 struct dc_stream_state *stream,
1363 struct dc_plane_state *plane_state,
1364 struct dc_state *context)
1367 struct dc_stream_status *stream_status = NULL;
1368 struct resource_pool *pool = dc->res_pool;
1370 for (i = 0; i < context->stream_count; i++)
1371 if (context->streams[i] == stream) {
1372 stream_status = &context->stream_status[i];
1376 if (stream_status == NULL) {
1377 dm_error("Existing stream not found; failed to remove plane.\n");
1381 /* release pipe for plane*/
1382 for (i = pool->pipe_count - 1; i >= 0; i--) {
1383 struct pipe_ctx *pipe_ctx;
1385 if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1386 pipe_ctx = &context->res_ctx.pipe_ctx[i];
1388 if (pipe_ctx->top_pipe)
1389 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1391 /* Second condition is to avoid setting NULL to top pipe
1392 * of tail pipe making it look like head pipe in subsequent
1395 if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1396 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1399 * For head pipe detach surfaces from pipe for tail
1400 * pipe just zero it out
1402 if (!pipe_ctx->top_pipe || (!pipe_ctx->top_pipe->top_pipe &&
1403 pipe_ctx->top_pipe->stream_res.opp != pipe_ctx->stream_res.opp)) {
1404 pipe_ctx->top_pipe = NULL;
1405 pipe_ctx->plane_state = NULL;
1406 pipe_ctx->bottom_pipe = NULL;
1408 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1414 for (i = 0; i < stream_status->plane_count; i++) {
1415 if (stream_status->plane_states[i] == plane_state) {
1417 dc_plane_state_release(stream_status->plane_states[i]);
1422 if (i == stream_status->plane_count) {
1423 dm_error("Existing plane_state not found; failed to detach it!\n");
1427 stream_status->plane_count--;
1429 /* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1430 for (; i < stream_status->plane_count; i++)
1431 stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1433 stream_status->plane_states[stream_status->plane_count] = NULL;
1438 bool dc_rem_all_planes_for_stream(
1439 const struct dc *dc,
1440 struct dc_stream_state *stream,
1441 struct dc_state *context)
1443 int i, old_plane_count;
1444 struct dc_stream_status *stream_status = NULL;
1445 struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1447 for (i = 0; i < context->stream_count; i++)
1448 if (context->streams[i] == stream) {
1449 stream_status = &context->stream_status[i];
1453 if (stream_status == NULL) {
1454 dm_error("Existing stream %p not found!\n", stream);
1458 old_plane_count = stream_status->plane_count;
1460 for (i = 0; i < old_plane_count; i++)
1461 del_planes[i] = stream_status->plane_states[i];
1463 for (i = 0; i < old_plane_count; i++)
1464 if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1470 static bool add_all_planes_for_stream(
1471 const struct dc *dc,
1472 struct dc_stream_state *stream,
1473 const struct dc_validation_set set[],
1475 struct dc_state *context)
1479 for (i = 0; i < set_count; i++)
1480 if (set[i].stream == stream)
1483 if (i == set_count) {
1484 dm_error("Stream %p not found in set!\n", stream);
1488 for (j = 0; j < set[i].plane_count; j++)
1489 if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1495 bool dc_add_all_planes_for_stream(
1496 const struct dc *dc,
1497 struct dc_stream_state *stream,
1498 struct dc_plane_state * const *plane_states,
1500 struct dc_state *context)
1502 struct dc_validation_set set;
1505 set.stream = stream;
1506 set.plane_count = plane_count;
1508 for (i = 0; i < plane_count; i++)
1509 set.plane_states[i] = plane_states[i];
1511 return add_all_planes_for_stream(dc, stream, &set, 1, context);
1515 static bool is_hdr_static_meta_changed(struct dc_stream_state *cur_stream,
1516 struct dc_stream_state *new_stream)
1518 if (cur_stream == NULL)
1521 if (memcmp(&cur_stream->hdr_static_metadata,
1522 &new_stream->hdr_static_metadata,
1523 sizeof(struct dc_info_packet)) != 0)
1529 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1530 struct dc_stream_state *new_stream)
1532 if (cur_stream == NULL)
1535 /* If sink pointer changed, it means this is a hotplug, we should do
1538 if (cur_stream->sink != new_stream->sink)
1541 /* If output color space is changed, need to reprogram info frames */
1542 if (cur_stream->output_color_space != new_stream->output_color_space)
1546 &cur_stream->timing,
1547 &new_stream->timing,
1548 sizeof(struct dc_crtc_timing)) != 0;
1551 static bool are_stream_backends_same(
1552 struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1554 if (stream_a == stream_b)
1557 if (stream_a == NULL || stream_b == NULL)
1560 if (is_timing_changed(stream_a, stream_b))
1563 if (is_hdr_static_meta_changed(stream_a, stream_b))
1569 bool dc_is_stream_unchanged(
1570 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1573 if (!are_stream_backends_same(old_stream, stream))
1579 bool dc_is_stream_scaling_unchanged(
1580 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1582 if (old_stream == stream)
1585 if (old_stream == NULL || stream == NULL)
1588 if (memcmp(&old_stream->src,
1590 sizeof(struct rect)) != 0)
1593 if (memcmp(&old_stream->dst,
1595 sizeof(struct rect)) != 0)
1601 static void update_stream_engine_usage(
1602 struct resource_context *res_ctx,
1603 const struct resource_pool *pool,
1604 struct stream_encoder *stream_enc,
1609 for (i = 0; i < pool->stream_enc_count; i++) {
1610 if (pool->stream_enc[i] == stream_enc)
1611 res_ctx->is_stream_enc_acquired[i] = acquired;
1615 /* TODO: release audio object */
1616 void update_audio_usage(
1617 struct resource_context *res_ctx,
1618 const struct resource_pool *pool,
1619 struct audio *audio,
1623 for (i = 0; i < pool->audio_count; i++) {
1624 if (pool->audios[i] == audio)
1625 res_ctx->is_audio_acquired[i] = acquired;
1629 static int acquire_first_free_pipe(
1630 struct resource_context *res_ctx,
1631 const struct resource_pool *pool,
1632 struct dc_stream_state *stream)
1636 for (i = 0; i < pool->pipe_count; i++) {
1637 if (!res_ctx->pipe_ctx[i].stream) {
1638 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1640 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1641 pipe_ctx->plane_res.mi = pool->mis[i];
1642 pipe_ctx->plane_res.hubp = pool->hubps[i];
1643 pipe_ctx->plane_res.ipp = pool->ipps[i];
1644 pipe_ctx->plane_res.xfm = pool->transforms[i];
1645 pipe_ctx->plane_res.dpp = pool->dpps[i];
1646 pipe_ctx->stream_res.opp = pool->opps[i];
1648 pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1649 pipe_ctx->pipe_idx = i;
1652 pipe_ctx->stream = stream;
1659 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1660 struct resource_context *res_ctx,
1661 const struct resource_pool *pool,
1662 struct dc_stream_state *stream)
1666 struct dc_link *link = stream->sink->link;
1668 for (i = 0; i < pool->stream_enc_count; i++) {
1669 if (!res_ctx->is_stream_enc_acquired[i] &&
1670 pool->stream_enc[i]) {
1671 /* Store first available for MST second display
1672 * in daisy chain use case */
1674 if (pool->stream_enc[i]->id ==
1675 link->link_enc->preferred_engine)
1676 return pool->stream_enc[i];
1681 * below can happen in cases when stream encoder is acquired:
1682 * 1) for second MST display in chain, so preferred engine already
1684 * 2) for another link, which preferred engine already acquired by any
1685 * MST configuration.
1687 * If signal is of DP type and preferred engine not found, return last available
1689 * TODO - This is just a patch up and a generic solution is
1690 * required for non DP connectors.
1693 if (j >= 0 && dc_is_dp_signal(stream->signal))
1694 return pool->stream_enc[j];
1699 static struct audio *find_first_free_audio(
1700 struct resource_context *res_ctx,
1701 const struct resource_pool *pool,
1704 int i, available_audio_count;
1706 available_audio_count = pool->audio_count;
1708 for (i = 0; i < available_audio_count; i++) {
1709 if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1710 /*we have enough audio endpoint, find the matching inst*/
1713 return pool->audios[i];
1717 /* use engine id to find free audio */
1718 if ((id < available_audio_count) && (res_ctx->is_audio_acquired[id] == false)) {
1719 return pool->audios[id];
1721 /*not found the matching one, first come first serve*/
1722 for (i = 0; i < available_audio_count; i++) {
1723 if (res_ctx->is_audio_acquired[i] == false) {
1724 return pool->audios[i];
1730 bool resource_is_stream_unchanged(
1731 struct dc_state *old_context, struct dc_stream_state *stream)
1735 for (i = 0; i < old_context->stream_count; i++) {
1736 struct dc_stream_state *old_stream = old_context->streams[i];
1738 if (are_stream_backends_same(old_stream, stream))
1745 enum dc_status dc_add_stream_to_ctx(
1747 struct dc_state *new_ctx,
1748 struct dc_stream_state *stream)
1750 struct dc_context *dc_ctx = dc->ctx;
1753 if (new_ctx->stream_count >= dc->res_pool->timing_generator_count) {
1754 DC_ERROR("Max streams reached, can't add stream %p !\n", stream);
1755 return DC_ERROR_UNEXPECTED;
1758 new_ctx->streams[new_ctx->stream_count] = stream;
1759 dc_stream_retain(stream);
1760 new_ctx->stream_count++;
1762 res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1764 DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1769 enum dc_status dc_remove_stream_from_ctx(
1771 struct dc_state *new_ctx,
1772 struct dc_stream_state *stream)
1775 struct dc_context *dc_ctx = dc->ctx;
1776 struct pipe_ctx *del_pipe = NULL;
1778 /* Release primary pipe */
1779 for (i = 0; i < MAX_PIPES; i++) {
1780 if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1781 !new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1782 del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1784 ASSERT(del_pipe->stream_res.stream_enc);
1785 update_stream_engine_usage(
1788 del_pipe->stream_res.stream_enc,
1791 if (del_pipe->stream_res.audio)
1795 del_pipe->stream_res.audio,
1798 resource_unreference_clock_source(&new_ctx->res_ctx,
1800 del_pipe->clock_source);
1802 if (dc->res_pool->funcs->remove_stream_from_ctx)
1803 dc->res_pool->funcs->remove_stream_from_ctx(dc, new_ctx, stream);
1805 memset(del_pipe, 0, sizeof(*del_pipe));
1810 DC_ERROR("Pipe not found for stream %p !\n", stream);
1811 return DC_ERROR_UNEXPECTED;
1814 for (i = 0; i < new_ctx->stream_count; i++)
1815 if (new_ctx->streams[i] == stream)
1818 if (new_ctx->streams[i] != stream) {
1819 DC_ERROR("Context doesn't have stream %p !\n", stream);
1820 return DC_ERROR_UNEXPECTED;
1823 dc_stream_release(new_ctx->streams[i]);
1824 new_ctx->stream_count--;
1826 /* Trim back arrays */
1827 for (; i < new_ctx->stream_count; i++) {
1828 new_ctx->streams[i] = new_ctx->streams[i + 1];
1829 new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1832 new_ctx->streams[new_ctx->stream_count] = NULL;
1834 &new_ctx->stream_status[new_ctx->stream_count],
1836 sizeof(new_ctx->stream_status[0]));
1841 static struct dc_stream_state *find_pll_sharable_stream(
1842 struct dc_stream_state *stream_needs_pll,
1843 struct dc_state *context)
1847 for (i = 0; i < context->stream_count; i++) {
1848 struct dc_stream_state *stream_has_pll = context->streams[i];
1850 /* We are looking for non dp, non virtual stream */
1851 if (resource_are_streams_timing_synchronizable(
1852 stream_needs_pll, stream_has_pll)
1853 && !dc_is_dp_signal(stream_has_pll->signal)
1854 && stream_has_pll->sink->link->connector_signal
1855 != SIGNAL_TYPE_VIRTUAL)
1856 return stream_has_pll;
1863 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1865 uint32_t pix_clk = timing->pix_clk_khz;
1866 uint32_t normalized_pix_clk = pix_clk;
1868 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1870 if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1871 switch (timing->display_color_depth) {
1872 case COLOR_DEPTH_666:
1873 case COLOR_DEPTH_888:
1874 normalized_pix_clk = pix_clk;
1876 case COLOR_DEPTH_101010:
1877 normalized_pix_clk = (pix_clk * 30) / 24;
1879 case COLOR_DEPTH_121212:
1880 normalized_pix_clk = (pix_clk * 36) / 24;
1882 case COLOR_DEPTH_161616:
1883 normalized_pix_clk = (pix_clk * 48) / 24;
1890 return normalized_pix_clk;
1893 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1895 /* update actual pixel clock on all streams */
1896 if (dc_is_hdmi_signal(stream->signal))
1897 stream->phy_pix_clk = get_norm_pix_clk(
1900 stream->phy_pix_clk =
1901 stream->timing.pix_clk_khz;
1904 enum dc_status resource_map_pool_resources(
1905 const struct dc *dc,
1906 struct dc_state *context,
1907 struct dc_stream_state *stream)
1909 const struct resource_pool *pool = dc->res_pool;
1911 struct dc_context *dc_ctx = dc->ctx;
1912 struct pipe_ctx *pipe_ctx = NULL;
1915 /* TODO Check if this is needed */
1916 /*if (!resource_is_stream_unchanged(old_context, stream)) {
1917 if (stream != NULL && old_context->streams[i] != NULL) {
1918 stream->bit_depth_params =
1919 old_context->streams[i]->bit_depth_params;
1920 stream->clamping = old_context->streams[i]->clamping;
1926 calculate_phy_pix_clks(stream);
1928 /* acquire new resources */
1929 pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1931 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1933 pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1936 if (pipe_idx < 0 || context->res_ctx.pipe_ctx[pipe_idx].stream_res.tg == NULL)
1937 return DC_NO_CONTROLLER_RESOURCE;
1939 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1941 pipe_ctx->stream_res.stream_enc =
1942 find_first_free_match_stream_enc_for_link(
1943 &context->res_ctx, pool, stream);
1945 if (!pipe_ctx->stream_res.stream_enc)
1946 return DC_NO_STREAM_ENG_RESOURCE;
1948 update_stream_engine_usage(
1949 &context->res_ctx, pool,
1950 pipe_ctx->stream_res.stream_enc,
1953 /* TODO: Add check if ASIC support and EDID audio */
1954 if (!stream->sink->converter_disable_audio &&
1955 dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1956 stream->audio_info.mode_count && stream->audio_info.flags.all) {
1957 pipe_ctx->stream_res.audio = find_first_free_audio(
1958 &context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
1961 * Audio assigned in order first come first get.
1962 * There are asics which has number of audio
1963 * resources less then number of pipes
1965 if (pipe_ctx->stream_res.audio)
1966 update_audio_usage(&context->res_ctx, pool,
1967 pipe_ctx->stream_res.audio, true);
1970 /* Add ABM to the resource if on EDP */
1971 if (pipe_ctx->stream && dc_is_embedded_signal(pipe_ctx->stream->signal))
1972 pipe_ctx->stream_res.abm = pool->abm;
1974 for (i = 0; i < context->stream_count; i++)
1975 if (context->streams[i] == stream) {
1976 context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
1977 context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
1981 DC_ERROR("Stream %p not found in new ctx!\n", stream);
1982 return DC_ERROR_UNEXPECTED;
1985 void dc_resource_state_copy_construct_current(
1986 const struct dc *dc,
1987 struct dc_state *dst_ctx)
1989 dc_resource_state_copy_construct(dc->current_state, dst_ctx);
1993 void dc_resource_state_construct(
1994 const struct dc *dc,
1995 struct dc_state *dst_ctx)
1997 dst_ctx->dis_clk = dc->res_pool->dccg;
2000 enum dc_status dc_validate_global_state(
2002 struct dc_state *new_ctx)
2004 enum dc_status result = DC_ERROR_UNEXPECTED;
2008 return DC_ERROR_UNEXPECTED;
2010 if (dc->res_pool->funcs->validate_global) {
2011 result = dc->res_pool->funcs->validate_global(dc, new_ctx);
2012 if (result != DC_OK)
2016 for (i = 0; i < new_ctx->stream_count; i++) {
2017 struct dc_stream_state *stream = new_ctx->streams[i];
2019 for (j = 0; j < dc->res_pool->pipe_count; j++) {
2020 struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
2022 if (pipe_ctx->stream != stream)
2025 /* Switch to dp clock source only if there is
2026 * no non dp stream that shares the same timing
2027 * with the dp stream.
2029 if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
2030 !find_pll_sharable_stream(stream, new_ctx)) {
2032 resource_unreference_clock_source(
2035 pipe_ctx->clock_source);
2037 pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
2038 resource_reference_clock_source(
2041 pipe_ctx->clock_source);
2046 result = resource_build_scaling_params_for_context(dc, new_ctx);
2048 if (result == DC_OK)
2049 if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
2050 result = DC_FAIL_BANDWIDTH_VALIDATE;
2055 static void patch_gamut_packet_checksum(
2056 struct dc_info_packet *gamut_packet)
2058 /* For gamut we recalc checksum */
2059 if (gamut_packet->valid) {
2060 uint8_t chk_sum = 0;
2064 /*start of the Gamut data. */
2065 ptr = &gamut_packet->sb[3];
2067 for (i = 0; i <= gamut_packet->sb[1]; i++)
2070 gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
2074 static void set_avi_info_frame(
2075 struct dc_info_packet *info_packet,
2076 struct pipe_ctx *pipe_ctx)
2078 struct dc_stream_state *stream = pipe_ctx->stream;
2079 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
2080 uint32_t pixel_encoding = 0;
2081 enum scanning_type scan_type = SCANNING_TYPE_NODATA;
2082 enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
2084 uint8_t itc_value = 0;
2085 uint8_t cn0_cn1 = 0;
2086 unsigned int cn0_cn1_value = 0;
2087 uint8_t *check_sum = NULL;
2088 uint8_t byte_index = 0;
2089 union hdmi_info_packet hdmi_info;
2090 union display_content_support support = {0};
2091 unsigned int vic = pipe_ctx->stream->timing.vic;
2092 enum dc_timing_3d_format format;
2094 memset(&hdmi_info, 0, sizeof(union hdmi_info_packet));
2096 color_space = pipe_ctx->stream->output_color_space;
2097 if (color_space == COLOR_SPACE_UNKNOWN)
2098 color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
2099 COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
2101 /* Initialize header */
2102 hdmi_info.bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
2103 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
2104 * not be used in HDMI 2.0 (Section 10.1) */
2105 hdmi_info.bits.header.version = 2;
2106 hdmi_info.bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
2109 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
2110 * according to HDMI 2.0 spec (Section 10.1)
2113 switch (stream->timing.pixel_encoding) {
2114 case PIXEL_ENCODING_YCBCR422:
2118 case PIXEL_ENCODING_YCBCR444:
2121 case PIXEL_ENCODING_YCBCR420:
2125 case PIXEL_ENCODING_RGB:
2130 /* Y0_Y1_Y2 : The pixel encoding */
2131 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
2132 hdmi_info.bits.Y0_Y1_Y2 = pixel_encoding;
2134 /* A0 = 1 Active Format Information valid */
2135 hdmi_info.bits.A0 = ACTIVE_FORMAT_VALID;
2137 /* B0, B1 = 3; Bar info data is valid */
2138 hdmi_info.bits.B0_B1 = BAR_INFO_BOTH_VALID;
2140 hdmi_info.bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
2142 /* S0, S1 : Underscan / Overscan */
2143 /* TODO: un-hardcode scan type */
2144 scan_type = SCANNING_TYPE_UNDERSCAN;
2145 hdmi_info.bits.S0_S1 = scan_type;
2147 /* C0, C1 : Colorimetry */
2148 if (color_space == COLOR_SPACE_YCBCR709 ||
2149 color_space == COLOR_SPACE_YCBCR709_LIMITED)
2150 hdmi_info.bits.C0_C1 = COLORIMETRY_ITU709;
2151 else if (color_space == COLOR_SPACE_YCBCR601 ||
2152 color_space == COLOR_SPACE_YCBCR601_LIMITED)
2153 hdmi_info.bits.C0_C1 = COLORIMETRY_ITU601;
2155 hdmi_info.bits.C0_C1 = COLORIMETRY_NO_DATA;
2157 if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
2158 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
2159 color_space == COLOR_SPACE_2020_YCBCR) {
2160 hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
2161 hdmi_info.bits.C0_C1 = COLORIMETRY_EXTENDED;
2162 } else if (color_space == COLOR_SPACE_ADOBERGB) {
2163 hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
2164 hdmi_info.bits.C0_C1 = COLORIMETRY_EXTENDED;
2167 /* TODO: un-hardcode aspect ratio */
2168 aspect = stream->timing.aspect_ratio;
2171 case ASPECT_RATIO_4_3:
2172 case ASPECT_RATIO_16_9:
2173 hdmi_info.bits.M0_M1 = aspect;
2176 case ASPECT_RATIO_NO_DATA:
2177 case ASPECT_RATIO_64_27:
2178 case ASPECT_RATIO_256_135:
2180 hdmi_info.bits.M0_M1 = 0;
2183 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
2184 hdmi_info.bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
2186 /* TODO: un-hardcode cn0_cn1 and itc */
2194 support = stream->sink->edid_caps.content_support;
2197 if (!support.bits.valid_content_type) {
2200 if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
2201 if (support.bits.graphics_content == 1) {
2204 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
2205 if (support.bits.photo_content == 1) {
2211 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
2212 if (support.bits.cinema_content == 1) {
2218 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
2219 if (support.bits.game_content == 1) {
2227 hdmi_info.bits.CN0_CN1 = cn0_cn1_value;
2228 hdmi_info.bits.ITC = itc_value;
2231 /* TODO : We should handle YCC quantization */
2232 /* but we do not have matrix calculation */
2233 if (stream->sink->edid_caps.qs_bit == 1 &&
2234 stream->sink->edid_caps.qy_bit == 1) {
2235 if (color_space == COLOR_SPACE_SRGB ||
2236 color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2237 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_FULL_RANGE;
2238 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2239 } else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2240 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2241 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_LIMITED_RANGE;
2242 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2244 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2245 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2248 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2249 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2253 format = stream->timing.timing_3d_format;
2254 /*todo, add 3DStereo support*/
2255 if (format != TIMING_3D_FORMAT_NONE) {
2256 // Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2257 switch (pipe_ctx->stream->timing.hdmi_vic) {
2274 hdmi_info.bits.VIC0_VIC7 = vic;
2277 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2278 * repetition start from 1 */
2279 hdmi_info.bits.PR0_PR3 = 0;
2282 * barTop: Line Number of End of Top Bar.
2283 * barBottom: Line Number of Start of Bottom Bar.
2284 * barLeft: Pixel Number of End of Left Bar.
2285 * barRight: Pixel Number of Start of Right Bar. */
2286 hdmi_info.bits.bar_top = stream->timing.v_border_top;
2287 hdmi_info.bits.bar_bottom = (stream->timing.v_total
2288 - stream->timing.v_border_bottom + 1);
2289 hdmi_info.bits.bar_left = stream->timing.h_border_left;
2290 hdmi_info.bits.bar_right = (stream->timing.h_total
2291 - stream->timing.h_border_right + 1);
2293 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2294 check_sum = &hdmi_info.packet_raw_data.sb[0];
2296 *check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2298 for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2299 *check_sum += hdmi_info.packet_raw_data.sb[byte_index];
2301 /* one byte complement */
2302 *check_sum = (uint8_t) (0x100 - *check_sum);
2304 /* Store in hw_path_mode */
2305 info_packet->hb0 = hdmi_info.packet_raw_data.hb0;
2306 info_packet->hb1 = hdmi_info.packet_raw_data.hb1;
2307 info_packet->hb2 = hdmi_info.packet_raw_data.hb2;
2309 for (byte_index = 0; byte_index < sizeof(hdmi_info.packet_raw_data.sb); byte_index++)
2310 info_packet->sb[byte_index] = hdmi_info.packet_raw_data.sb[byte_index];
2312 info_packet->valid = true;
2315 static void set_vendor_info_packet(
2316 struct dc_info_packet *info_packet,
2317 struct dc_stream_state *stream)
2319 uint32_t length = 0;
2320 bool hdmi_vic_mode = false;
2321 uint8_t checksum = 0;
2323 enum dc_timing_3d_format format;
2324 // Can be different depending on packet content /*todo*/
2325 // unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2327 info_packet->valid = false;
2329 format = stream->timing.timing_3d_format;
2330 if (stream->view_format == VIEW_3D_FORMAT_NONE)
2331 format = TIMING_3D_FORMAT_NONE;
2333 /* Can be different depending on packet content */
2336 if (stream->timing.hdmi_vic != 0
2337 && stream->timing.h_total >= 3840
2338 && stream->timing.v_total >= 2160)
2339 hdmi_vic_mode = true;
2341 /* According to HDMI 1.4a CTS, VSIF should be sent
2342 * for both 3D stereo and HDMI VIC modes.
2343 * For all other modes, there is no VSIF sent. */
2345 if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2348 /* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2349 info_packet->sb[1] = 0x03;
2350 info_packet->sb[2] = 0x0C;
2351 info_packet->sb[3] = 0x00;
2353 /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2354 * The value for HDMI_Video_Format are:
2355 * 0x0 (0b000) - No additional HDMI video format is presented in this
2357 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2359 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2360 * potentially 3D_Ext_Data follows
2361 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2362 if (format != TIMING_3D_FORMAT_NONE)
2363 info_packet->sb[4] = (2 << 5);
2364 else if (hdmi_vic_mode)
2365 info_packet->sb[4] = (1 << 5);
2367 /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2368 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2369 * The value for 3D_Structure are:
2370 * 0x0 - Frame Packing
2371 * 0x1 - Field Alternative
2372 * 0x2 - Line Alternative
2373 * 0x3 - Side-by-Side (full)
2375 * 0x5 - L + depth + graphics + graphics-depth
2376 * 0x6 - Top-and-Bottom
2377 * 0x7 - Reserved for future use
2378 * 0x8 - Side-by-Side (Half)
2379 * 0x9..0xE - Reserved for future use
2382 case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2383 case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2384 info_packet->sb[5] = (0x0 << 4);
2387 case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2388 case TIMING_3D_FORMAT_SBS_SW_PACKED:
2389 info_packet->sb[5] = (0x8 << 4);
2393 case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2394 case TIMING_3D_FORMAT_TB_SW_PACKED:
2395 info_packet->sb[5] = (0x6 << 4);
2402 /*PB5: If PB4 is set to 0x1 (extended resolution format)
2403 * fill PB5 with the correct HDMI VIC code */
2405 info_packet->sb[5] = stream->timing.hdmi_vic;
2408 info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2409 info_packet->hb1 = 0x01; /* Version */
2411 /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2412 info_packet->hb2 = (uint8_t) (length);
2414 /* Calculate checksum */
2416 checksum += info_packet->hb0;
2417 checksum += info_packet->hb1;
2418 checksum += info_packet->hb2;
2420 for (i = 1; i <= length; i++)
2421 checksum += info_packet->sb[i];
2423 info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2425 info_packet->valid = true;
2428 static void set_spd_info_packet(
2429 struct dc_info_packet *info_packet,
2430 struct dc_stream_state *stream)
2432 /* SPD info packet for FreeSync */
2434 unsigned char checksum = 0;
2435 unsigned int idx, payload_size = 0;
2437 /* Check if Freesync is supported. Return if false. If true,
2438 * set the corresponding bit in the info packet
2440 if (stream->freesync_ctx.supported == false)
2443 if (dc_is_hdmi_signal(stream->signal)) {
2447 /* HB0 = Packet Type = 0x83 (Source Product
2448 * Descriptor InfoFrame)
2450 info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
2452 /* HB1 = Version = 0x01 */
2453 info_packet->hb1 = 0x01;
2455 /* HB2 = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
2456 info_packet->hb2 = 0x08;
2458 payload_size = 0x08;
2460 } else if (dc_is_dp_signal(stream->signal)) {
2464 /* HB0 = Secondary-data Packet ID = 0 - Only non-zero
2465 * when used to associate audio related info packets
2467 info_packet->hb0 = 0x00;
2469 /* HB1 = Packet Type = 0x83 (Source Product
2470 * Descriptor InfoFrame)
2472 info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
2474 /* HB2 = [Bits 7:0 = Least significant eight bits -
2475 * For INFOFRAME, the value must be 1Bh]
2477 info_packet->hb2 = 0x1B;
2479 /* HB3 = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
2480 * [Bits 1:0 = Most significant two bits = 0x00]
2482 info_packet->hb3 = 0x04;
2484 payload_size = 0x1B;
2487 /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
2488 info_packet->sb[1] = 0x1A;
2490 /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
2491 info_packet->sb[2] = 0x00;
2493 /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2494 info_packet->sb[3] = 0x00;
2496 /* PB4 = Reserved */
2497 info_packet->sb[4] = 0x00;
2499 /* PB5 = Reserved */
2500 info_packet->sb[5] = 0x00;
2502 /* PB6 = [Bits 7:3 = Reserved] */
2503 info_packet->sb[6] = 0x00;
2505 if (stream->freesync_ctx.supported == true)
2506 /* PB6 = [Bit 0 = FreeSync Supported] */
2507 info_packet->sb[6] |= 0x01;
2509 if (stream->freesync_ctx.enabled == true)
2510 /* PB6 = [Bit 1 = FreeSync Enabled] */
2511 info_packet->sb[6] |= 0x02;
2513 if (stream->freesync_ctx.active == true)
2514 /* PB6 = [Bit 2 = FreeSync Active] */
2515 info_packet->sb[6] |= 0x04;
2517 /* PB7 = FreeSync Minimum refresh rate (Hz) */
2518 info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2519 min_refresh_in_micro_hz / 1000000);
2521 /* PB8 = FreeSync Maximum refresh rate (Hz)
2523 * Note: We do not use the maximum capable refresh rate
2524 * of the panel, because we should never go above the field
2525 * rate of the mode timing set.
2527 info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2528 nominal_refresh_in_micro_hz / 1000000);
2530 /* PB9 - PB27 = Reserved */
2531 for (idx = 9; idx <= 27; idx++)
2532 info_packet->sb[idx] = 0x00;
2534 /* Calculate checksum */
2535 checksum += info_packet->hb0;
2536 checksum += info_packet->hb1;
2537 checksum += info_packet->hb2;
2538 checksum += info_packet->hb3;
2540 for (idx = 1; idx <= payload_size; idx++)
2541 checksum += info_packet->sb[idx];
2543 /* PB0 = Checksum (one byte complement) */
2544 info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2546 info_packet->valid = true;
2549 static void set_hdr_static_info_packet(
2550 struct dc_info_packet *info_packet,
2551 struct dc_stream_state *stream)
2553 /* HDR Static Metadata info packet for HDR10 */
2555 if (!stream->hdr_static_metadata.valid ||
2556 stream->use_dynamic_meta)
2559 *info_packet = stream->hdr_static_metadata;
2562 static void set_vsc_info_packet(
2563 struct dc_info_packet *info_packet,
2564 struct dc_stream_state *stream)
2566 unsigned int vscPacketRevision = 0;
2569 /*VSC packet set to 2 when DP revision >= 1.2*/
2570 if (stream->psr_version != 0) {
2571 vscPacketRevision = 2;
2574 /* VSC packet not needed based on the features
2575 * supported by this DP display
2577 if (vscPacketRevision == 0)
2580 if (vscPacketRevision == 0x2) {
2581 /* Secondary-data Packet ID = 0*/
2582 info_packet->hb0 = 0x00;
2583 /* 07h - Packet Type Value indicating Video
2584 * Stream Configuration packet
2586 info_packet->hb1 = 0x07;
2587 /* 02h = VSC SDP supporting 3D stereo and PSR
2588 * (applies to eDP v1.3 or higher).
2590 info_packet->hb2 = 0x02;
2591 /* 08h = VSC packet supporting 3D stereo + PSR
2594 info_packet->hb3 = 0x08;
2596 for (i = 0; i < 28; i++)
2597 info_packet->sb[i] = 0;
2599 info_packet->valid = true;
2602 /*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2605 void dc_resource_state_destruct(struct dc_state *context)
2609 for (i = 0; i < context->stream_count; i++) {
2610 for (j = 0; j < context->stream_status[i].plane_count; j++)
2611 dc_plane_state_release(
2612 context->stream_status[i].plane_states[j]);
2614 context->stream_status[i].plane_count = 0;
2615 dc_stream_release(context->streams[i]);
2616 context->streams[i] = NULL;
2621 * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2624 void dc_resource_state_copy_construct(
2625 const struct dc_state *src_ctx,
2626 struct dc_state *dst_ctx)
2629 struct kref refcount = dst_ctx->refcount;
2631 *dst_ctx = *src_ctx;
2633 for (i = 0; i < MAX_PIPES; i++) {
2634 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2636 if (cur_pipe->top_pipe)
2637 cur_pipe->top_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2639 if (cur_pipe->bottom_pipe)
2640 cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2644 for (i = 0; i < dst_ctx->stream_count; i++) {
2645 dc_stream_retain(dst_ctx->streams[i]);
2646 for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2647 dc_plane_state_retain(
2648 dst_ctx->stream_status[i].plane_states[j]);
2651 /* context refcount should not be overridden */
2652 dst_ctx->refcount = refcount;
2656 struct clock_source *dc_resource_find_first_free_pll(
2657 struct resource_context *res_ctx,
2658 const struct resource_pool *pool)
2662 for (i = 0; i < pool->clk_src_count; ++i) {
2663 if (res_ctx->clock_source_ref_count[i] == 0)
2664 return pool->clock_sources[i];
2670 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2672 enum signal_type signal = SIGNAL_TYPE_NONE;
2673 struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2675 /* default all packets to invalid */
2676 info->avi.valid = false;
2677 info->gamut.valid = false;
2678 info->vendor.valid = false;
2679 info->spd.valid = false;
2680 info->hdrsmd.valid = false;
2681 info->vsc.valid = false;
2683 signal = pipe_ctx->stream->signal;
2685 /* HDMi and DP have different info packets*/
2686 if (dc_is_hdmi_signal(signal)) {
2687 set_avi_info_frame(&info->avi, pipe_ctx);
2689 set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2691 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2693 set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2695 } else if (dc_is_dp_signal(signal)) {
2696 set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2698 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2700 set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2703 patch_gamut_packet_checksum(&info->gamut);
2706 enum dc_status resource_map_clock_resources(
2707 const struct dc *dc,
2708 struct dc_state *context,
2709 struct dc_stream_state *stream)
2711 /* acquire new resources */
2712 const struct resource_pool *pool = dc->res_pool;
2713 struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2714 &context->res_ctx, stream);
2717 return DC_ERROR_UNEXPECTED;
2719 if (dc_is_dp_signal(pipe_ctx->stream->signal)
2720 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2721 pipe_ctx->clock_source = pool->dp_clock_source;
2723 pipe_ctx->clock_source = NULL;
2725 if (!dc->config.disable_disp_pll_sharing)
2726 pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2730 if (pipe_ctx->clock_source == NULL)
2731 pipe_ctx->clock_source =
2732 dc_resource_find_first_free_pll(
2737 if (pipe_ctx->clock_source == NULL)
2738 return DC_NO_CLOCK_SOURCE_RESOURCE;
2740 resource_reference_clock_source(
2741 &context->res_ctx, pool,
2742 pipe_ctx->clock_source);
2748 * Note: We need to disable output if clock sources change,
2749 * since bios does optimization and doesn't apply if changing
2750 * PHY when not already disabled.
2752 bool pipe_need_reprogram(
2753 struct pipe_ctx *pipe_ctx_old,
2754 struct pipe_ctx *pipe_ctx)
2756 if (!pipe_ctx_old->stream)
2759 if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2762 if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2765 if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2768 if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2769 && pipe_ctx_old->stream != pipe_ctx->stream)
2772 if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2775 if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2778 if (is_hdr_static_meta_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2784 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2785 struct bit_depth_reduction_params *fmt_bit_depth)
2787 enum dc_dither_option option = stream->dither_option;
2788 enum dc_pixel_encoding pixel_encoding =
2789 stream->timing.pixel_encoding;
2791 memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2793 if (option == DITHER_OPTION_DEFAULT) {
2794 switch (stream->timing.display_color_depth) {
2795 case COLOR_DEPTH_666:
2796 option = DITHER_OPTION_SPATIAL6;
2798 case COLOR_DEPTH_888:
2799 option = DITHER_OPTION_SPATIAL8;
2801 case COLOR_DEPTH_101010:
2802 option = DITHER_OPTION_SPATIAL10;
2805 option = DITHER_OPTION_DISABLE;
2809 if (option == DITHER_OPTION_DISABLE)
2812 if (option == DITHER_OPTION_TRUN6) {
2813 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2814 fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2815 } else if (option == DITHER_OPTION_TRUN8 ||
2816 option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2817 option == DITHER_OPTION_TRUN8_FM6) {
2818 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2819 fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2820 } else if (option == DITHER_OPTION_TRUN10 ||
2821 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2822 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2823 option == DITHER_OPTION_TRUN10_FM8 ||
2824 option == DITHER_OPTION_TRUN10_FM6 ||
2825 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2826 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2827 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2830 /* special case - Formatter can only reduce by 4 bits at most.
2831 * When reducing from 12 to 6 bits,
2832 * HW recommends we use trunc with round mode
2833 * (if we did nothing, trunc to 10 bits would be used)
2834 * note that any 12->10 bit reduction is ignored prior to DCE8,
2835 * as the input was 10 bits.
2837 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2838 option == DITHER_OPTION_SPATIAL6 ||
2839 option == DITHER_OPTION_FM6) {
2840 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2841 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2842 fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2846 * note that spatial modes 1-3 are never used
2848 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2849 option == DITHER_OPTION_SPATIAL6 ||
2850 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2851 option == DITHER_OPTION_TRUN8_SPATIAL6) {
2852 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2853 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2854 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2855 fmt_bit_depth->flags.RGB_RANDOM =
2856 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2857 } else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM ||
2858 option == DITHER_OPTION_SPATIAL8 ||
2859 option == DITHER_OPTION_SPATIAL8_FM6 ||
2860 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2861 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2862 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2863 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2864 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2865 fmt_bit_depth->flags.RGB_RANDOM =
2866 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2867 } else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2868 option == DITHER_OPTION_SPATIAL10 ||
2869 option == DITHER_OPTION_SPATIAL10_FM8 ||
2870 option == DITHER_OPTION_SPATIAL10_FM6) {
2871 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2872 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2873 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2874 fmt_bit_depth->flags.RGB_RANDOM =
2875 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2878 if (option == DITHER_OPTION_SPATIAL6 ||
2879 option == DITHER_OPTION_SPATIAL8 ||
2880 option == DITHER_OPTION_SPATIAL10) {
2881 fmt_bit_depth->flags.FRAME_RANDOM = 0;
2883 fmt_bit_depth->flags.FRAME_RANDOM = 1;
2886 //////////////////////
2887 //// temporal dither
2888 //////////////////////
2889 if (option == DITHER_OPTION_FM6 ||
2890 option == DITHER_OPTION_SPATIAL8_FM6 ||
2891 option == DITHER_OPTION_SPATIAL10_FM6 ||
2892 option == DITHER_OPTION_TRUN10_FM6 ||
2893 option == DITHER_OPTION_TRUN8_FM6 ||
2894 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2895 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2896 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2897 } else if (option == DITHER_OPTION_FM8 ||
2898 option == DITHER_OPTION_SPATIAL10_FM8 ||
2899 option == DITHER_OPTION_TRUN10_FM8) {
2900 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2901 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2902 } else if (option == DITHER_OPTION_FM10) {
2903 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2904 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2907 fmt_bit_depth->pixel_encoding = pixel_encoding;
2910 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2912 struct dc *core_dc = dc;
2913 struct dc_link *link = stream->sink->link;
2914 struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2915 enum dc_status res = DC_OK;
2917 calculate_phy_pix_clks(stream);
2919 if (!tg->funcs->validate_timing(tg, &stream->timing))
2920 res = DC_FAIL_CONTROLLER_VALIDATE;
2923 if (!link->link_enc->funcs->validate_output_with_stream(
2924 link->link_enc, stream))
2925 res = DC_FAIL_ENC_VALIDATE;
2927 /* TODO: validate audio ASIC caps, encoder */
2930 res = dc_link_validate_mode_timing(stream,
2937 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2939 enum dc_status res = DC_OK;
2941 /* TODO For now validates pixel format only */
2942 if (dc->res_pool->funcs->validate_plane)
2943 return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);