Lines Matching +full:a +full:- +full:display

4  * Permission is hereby granted, free of charge, to any person obtaining a
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
52 * Since Haswell Display controller supports Panel Self-Refresh on display
53 * panels witch have a remote frame buffer (RFB) implemented according to PSR
54 * spec in eDP1.3. PSR feature allows the display to go to lower standby states
55 * when system is idle but display is on as it eliminates display refresh
57 * display is unchanged.
66 * The implementation uses the hardware-based PSR support which automatically
67 * enters/exits self-refresh mode. The hardware takes care of sending the
70 * changes to know when to exit self-refresh mode again. Unfortunately that
72 * software frontbuffer tracking to make sure it doesn't miss a screen
75 * issues the self-refresh re-enable code is done from a work queue, which
80 * On top of PSR2, GEN12 adds a intermediate power savings state that turns
83 * entry/exit allows the HW to enter a low-power state even when page flipping
84 * periodically (for instance a 30fps video playback scenario).
86 * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
92 * would bring a lot of complexity and most of the moderns systems will only
99 * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
101 * When unmasked (nearly) all display register writes (eg. even
102 * SWF) trigger a PSR exit. Some registers are excluded from this
103 * and they have a more specific mask (described below). On icl+
109 * trigger a PSR exit. Some plane registers are excluded from this
110 * and they have a more specific mask (described below).
116 * When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
126 * longer have a mask bit like this, and no plane being
132 * When umasked CURPOS writes trigger a PSR exit. On skl+
152 * No idea as of now why there is a difference. HSW/BDW (which don't
161 * one a chicken/egg bit instead on skl+.
163 * In standby mode (as opposed to link-off) this makes no difference
177 * The rest of the bits are more self-explanatory and/or
197 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
198 (intel_dp)->psr.source_support)
202 if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST) in intel_encoder_can_psr()
214 * the output is enabled. For non-eDP outputs the main link is always in intel_psr_needs_aux_io_power()
215 * on, hence it doesn't require the HW initiated AUX wake-up signaling used in intel_psr_needs_aux_io_power()
219 * - Consider leaving AUX IO disabled for eDP / PR as well, in case in intel_psr_needs_aux_io_power()
220 * the ALPM with main-link off mode is not enabled. in intel_psr_needs_aux_io_power()
221 * - Leave AUX IO enabled for DP / PR, once support for ALPM with in intel_psr_needs_aux_io_power()
222 * main-link off mode is added for it and this mode gets enabled. in intel_psr_needs_aux_io_power()
230 struct intel_display *display = to_intel_display(intel_dp); in psr_global_enabled() local
231 struct intel_connector *connector = intel_dp->attached_connector; in psr_global_enabled()
233 switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) { in psr_global_enabled()
235 if (display->params.enable_psr == -1) in psr_global_enabled()
237 connector->panel.vbt.psr.enable : in psr_global_enabled()
239 return display->params.enable_psr; in psr_global_enabled()
249 struct intel_display *display = to_intel_display(intel_dp); in psr2_global_enabled() local
251 switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) { in psr2_global_enabled()
256 if (display->params.enable_psr == 1) in psr2_global_enabled()
264 struct intel_display *display = to_intel_display(intel_dp); in psr2_su_region_et_global_enabled() local
266 if (display->params.enable_psr != -1) in psr2_su_region_et_global_enabled()
274 struct intel_display *display = to_intel_display(intel_dp); in panel_replay_global_enabled() local
276 if ((display->params.enable_psr != -1) || in panel_replay_global_enabled()
277 (intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE)) in panel_replay_global_enabled()
284 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_psr_error_bit_get() local
286 return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR : in psr_irq_psr_error_bit_get()
287 EDP_PSR_ERROR(intel_dp->psr.transcoder); in psr_irq_psr_error_bit_get()
292 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_post_exit_bit_get() local
294 return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT : in psr_irq_post_exit_bit_get()
295 EDP_PSR_POST_EXIT(intel_dp->psr.transcoder); in psr_irq_post_exit_bit_get()
300 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_pre_entry_bit_get() local
302 return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY : in psr_irq_pre_entry_bit_get()
303 EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder); in psr_irq_pre_entry_bit_get()
308 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_mask_get() local
310 return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK : in psr_irq_mask_get()
311 EDP_PSR_MASK(intel_dp->psr.transcoder); in psr_irq_mask_get()
314 static i915_reg_t psr_ctl_reg(struct intel_display *display, in psr_ctl_reg() argument
317 if (DISPLAY_VER(display) >= 8) in psr_ctl_reg()
318 return EDP_PSR_CTL(display, cpu_transcoder); in psr_ctl_reg()
323 static i915_reg_t psr_debug_reg(struct intel_display *display, in psr_debug_reg() argument
326 if (DISPLAY_VER(display) >= 8) in psr_debug_reg()
327 return EDP_PSR_DEBUG(display, cpu_transcoder); in psr_debug_reg()
332 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display, in psr_perf_cnt_reg() argument
335 if (DISPLAY_VER(display) >= 8) in psr_perf_cnt_reg()
336 return EDP_PSR_PERF_CNT(display, cpu_transcoder); in psr_perf_cnt_reg()
341 static i915_reg_t psr_status_reg(struct intel_display *display, in psr_status_reg() argument
344 if (DISPLAY_VER(display) >= 8) in psr_status_reg()
345 return EDP_PSR_STATUS(display, cpu_transcoder); in psr_status_reg()
350 static i915_reg_t psr_imr_reg(struct intel_display *display, in psr_imr_reg() argument
353 if (DISPLAY_VER(display) >= 12) in psr_imr_reg()
354 return TRANS_PSR_IMR(display, cpu_transcoder); in psr_imr_reg()
359 static i915_reg_t psr_iir_reg(struct intel_display *display, in psr_iir_reg() argument
362 if (DISPLAY_VER(display) >= 12) in psr_iir_reg()
363 return TRANS_PSR_IIR(display, cpu_transcoder); in psr_iir_reg()
368 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display, in psr_aux_ctl_reg() argument
371 if (DISPLAY_VER(display) >= 8) in psr_aux_ctl_reg()
372 return EDP_PSR_AUX_CTL(display, cpu_transcoder); in psr_aux_ctl_reg()
377 static i915_reg_t psr_aux_data_reg(struct intel_display *display, in psr_aux_data_reg() argument
380 if (DISPLAY_VER(display) >= 8) in psr_aux_data_reg()
381 return EDP_PSR_AUX_DATA(display, cpu_transcoder, i); in psr_aux_data_reg()
388 struct intel_display *display = to_intel_display(intel_dp); in psr_irq_control() local
389 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in psr_irq_control()
392 if (intel_dp->psr.panel_replay_enabled) in psr_irq_control()
396 if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ) in psr_irq_control()
400 intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder), in psr_irq_control()
404 static void psr_event_print(struct intel_display *display, in psr_event_print() argument
407 drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val); in psr_event_print()
409 drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n"); in psr_event_print()
411 drm_dbg_kms(display->drm, "\tPSR2 disabled\n"); in psr_event_print()
413 drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n"); in psr_event_print()
415 drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n"); in psr_event_print()
417 drm_dbg_kms(display->drm, "\tGraphics reset\n"); in psr_event_print()
419 drm_dbg_kms(display->drm, "\tPCH interrupt\n"); in psr_event_print()
421 drm_dbg_kms(display->drm, "\tMemory up\n"); in psr_event_print()
423 drm_dbg_kms(display->drm, "\tFront buffer modification\n"); in psr_event_print()
425 drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n"); in psr_event_print()
427 drm_dbg_kms(display->drm, "\tPIPE registers updated\n"); in psr_event_print()
429 drm_dbg_kms(display->drm, "\tRegister updated\n"); in psr_event_print()
431 drm_dbg_kms(display->drm, "\tHDCP enabled\n"); in psr_event_print()
433 drm_dbg_kms(display->drm, "\tKVMR session enabled\n"); in psr_event_print()
435 drm_dbg_kms(display->drm, "\tVBI enabled\n"); in psr_event_print()
437 drm_dbg_kms(display->drm, "\tLPSP mode exited\n"); in psr_event_print()
439 drm_dbg_kms(display->drm, "\tPSR disabled\n"); in psr_event_print()
444 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_irq_handler() local
445 struct drm_i915_private *dev_priv = to_i915(display->drm); in intel_psr_irq_handler()
446 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in intel_psr_irq_handler()
450 intel_dp->psr.last_entry_attempt = time_ns; in intel_psr_irq_handler()
451 drm_dbg_kms(display->drm, in intel_psr_irq_handler()
457 intel_dp->psr.last_exit = time_ns; in intel_psr_irq_handler()
458 drm_dbg_kms(display->drm, in intel_psr_irq_handler()
462 if (DISPLAY_VER(display) >= 9) { in intel_psr_irq_handler()
469 psr_event_print(display, val, intel_dp->psr.sel_update_enabled); in intel_psr_irq_handler()
474 drm_warn(display->drm, "[transcoder %s] PSR aux error\n", in intel_psr_irq_handler()
477 intel_dp->psr.irq_aux_error = true; in intel_psr_irq_handler()
483 * Also after a PSR error, we don't want to arm PSR in intel_psr_irq_handler()
487 intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder), in intel_psr_irq_handler()
490 queue_work(dev_priv->unordered_wq, &intel_dp->psr.work); in intel_psr_irq_handler()
496 struct intel_display *display = to_intel_display(intel_dp); in intel_dp_get_sink_sync_latency() local
499 if (drm_dp_dpcd_readb(&intel_dp->aux, in intel_dp_get_sink_sync_latency()
503 drm_dbg_kms(display->drm, in intel_dp_get_sink_sync_latency()
512 if (intel_dp->psr.sink_panel_replay_su_support) in intel_dp_get_su_capability()
513 drm_dp_dpcd_readb(&intel_dp->aux, in intel_dp_get_su_capability()
517 su_capability = intel_dp->psr_dpcd[1]; in intel_dp_get_su_capability()
525 return intel_dp->psr.sink_panel_replay_su_support ? in intel_dp_get_su_x_granularity_offset()
533 return intel_dp->psr.sink_panel_replay_su_support ? in intel_dp_get_su_y_granularity_offset()
544 struct intel_display *display = to_intel_display(intel_dp); in intel_dp_get_su_granularity() local
566 r = drm_dp_dpcd_read(&intel_dp->aux, in intel_dp_get_su_granularity()
570 drm_dbg_kms(display->drm, in intel_dp_get_su_granularity()
579 r = drm_dp_dpcd_read(&intel_dp->aux, in intel_dp_get_su_granularity()
583 drm_dbg_kms(display->drm, in intel_dp_get_su_granularity()
591 intel_dp->psr.su_w_granularity = w; in intel_dp_get_su_granularity()
592 intel_dp->psr.su_y_granularity = y; in intel_dp_get_su_granularity()
597 struct intel_display *display = to_intel_display(intel_dp); in _panel_replay_init_dpcd() local
601 drm_dbg_kms(display->drm, in _panel_replay_init_dpcd()
602 "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n"); in _panel_replay_init_dpcd()
606 if (!(intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) { in _panel_replay_init_dpcd()
607 drm_dbg_kms(display->drm, in _panel_replay_init_dpcd()
613 intel_dp->psr.sink_panel_replay_support = true; in _panel_replay_init_dpcd()
615 if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SU_SUPPORT) in _panel_replay_init_dpcd()
616 intel_dp->psr.sink_panel_replay_su_support = true; in _panel_replay_init_dpcd()
618 drm_dbg_kms(display->drm, in _panel_replay_init_dpcd()
620 intel_dp->psr.sink_panel_replay_su_support ? in _panel_replay_init_dpcd()
626 struct intel_display *display = to_intel_display(intel_dp); in _psr_init_dpcd() local
628 drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n", in _psr_init_dpcd()
629 intel_dp->psr_dpcd[0]); in _psr_init_dpcd()
631 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) { in _psr_init_dpcd()
632 drm_dbg_kms(display->drm, in _psr_init_dpcd()
637 if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) { in _psr_init_dpcd()
638 drm_dbg_kms(display->drm, in _psr_init_dpcd()
643 intel_dp->psr.sink_support = true; in _psr_init_dpcd()
644 intel_dp->psr.sink_sync_latency = in _psr_init_dpcd()
647 if (DISPLAY_VER(display) >= 9 && in _psr_init_dpcd()
648 intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) { in _psr_init_dpcd()
649 bool y_req = intel_dp->psr_dpcd[1] & in _psr_init_dpcd()
654 * Y-coordinate) can handle Y-coordinates in VSC but we are in _psr_init_dpcd()
657 * without a aux frame sync. in _psr_init_dpcd()
660 * Y-coordinate requirement panels we would need to enable in _psr_init_dpcd()
663 intel_dp->psr.sink_psr2_support = y_req && in _psr_init_dpcd()
665 drm_dbg_kms(display->drm, "PSR2 %ssupported\n", in _psr_init_dpcd()
666 intel_dp->psr.sink_psr2_support ? "" : "not "); in _psr_init_dpcd()
672 drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd, in intel_psr_init_dpcd()
673 sizeof(intel_dp->psr_dpcd)); in intel_psr_init_dpcd()
674 drm_dp_dpcd_readb(&intel_dp->aux, DP_PANEL_REPLAY_CAP, in intel_psr_init_dpcd()
675 &intel_dp->pr_dpcd); in intel_psr_init_dpcd()
677 if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SUPPORT) in intel_psr_init_dpcd()
680 if (intel_dp->psr_dpcd[0]) in intel_psr_init_dpcd()
683 if (intel_dp->psr.sink_psr2_support || in intel_psr_init_dpcd()
684 intel_dp->psr.sink_panel_replay_su_support) in intel_psr_init_dpcd()
690 struct intel_display *display = to_intel_display(intel_dp); in hsw_psr_setup_aux() local
691 struct drm_i915_private *dev_priv = to_i915(display->drm); in hsw_psr_setup_aux()
692 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in hsw_psr_setup_aux()
699 [3] = 1 - 1, in hsw_psr_setup_aux()
707 psr_aux_data_reg(display, cpu_transcoder, i >> 2), in hsw_psr_setup_aux()
708 intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i)); in hsw_psr_setup_aux()
710 aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0); in hsw_psr_setup_aux()
713 aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg), in hsw_psr_setup_aux()
722 intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder), in hsw_psr_setup_aux()
728 struct intel_display *display = to_intel_display(intel_dp); in psr2_su_region_et_valid() local
730 if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) || in psr2_su_region_et_valid()
731 intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE) in psr2_su_region_et_valid()
735 intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT : in psr2_su_region_et_valid()
736 intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED && in psr2_su_region_et_valid()
750 if (crtc_state->has_sel_update) in _panel_replay_enable_sink()
753 if (crtc_state->enable_psr2_su_region_et) in _panel_replay_enable_sink()
756 if (crtc_state->req_psr2_sdp_prior_scanline) in _panel_replay_enable_sink()
760 drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val); in _panel_replay_enable_sink()
762 drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2, in _panel_replay_enable_sink()
769 struct intel_display *display = to_intel_display(intel_dp); in _psr_enable_sink() local
772 if (crtc_state->has_sel_update) { in _psr_enable_sink()
775 if (intel_dp->psr.link_standby) in _psr_enable_sink()
778 if (DISPLAY_VER(display) >= 8) in _psr_enable_sink()
782 if (crtc_state->req_psr2_sdp_prior_scanline) in _psr_enable_sink()
785 if (crtc_state->enable_psr2_su_region_et) in _psr_enable_sink()
788 if (intel_dp->psr.entry_setup_frames > 0) in _psr_enable_sink()
790 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val); in _psr_enable_sink()
793 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val); in _psr_enable_sink()
805 if (!intel_dp_is_edp(intel_dp) || (!crtc_state->has_panel_replay && in intel_psr_enable_sink_alpm()
806 !crtc_state->has_sel_update)) in intel_psr_enable_sink_alpm()
811 if (crtc_state->has_panel_replay) in intel_psr_enable_sink_alpm()
814 drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG, val); in intel_psr_enable_sink_alpm()
822 crtc_state->has_panel_replay ? in intel_psr_enable_sink()
827 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0); in intel_psr_enable_sink()
832 struct intel_display *display = to_intel_display(intel_dp); in intel_psr1_get_tp_time() local
833 struct intel_connector *connector = intel_dp->attached_connector; in intel_psr1_get_tp_time()
834 struct drm_i915_private *dev_priv = to_i915(display->drm); in intel_psr1_get_tp_time()
837 if (DISPLAY_VER(display) >= 11) in intel_psr1_get_tp_time()
840 if (display->params.psr_safest_params) { in intel_psr1_get_tp_time()
846 if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0) in intel_psr1_get_tp_time()
848 else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100) in intel_psr1_get_tp_time()
850 else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500) in intel_psr1_get_tp_time()
855 if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0) in intel_psr1_get_tp_time()
857 else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100) in intel_psr1_get_tp_time()
859 else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500) in intel_psr1_get_tp_time()
869 connector->panel.vbt.psr.tp1_wakeup_time_us == 0 && in intel_psr1_get_tp_time()
870 connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0) in intel_psr1_get_tp_time()
874 if (intel_dp_source_supports_tps3(display) && in intel_psr1_get_tp_time()
875 drm_dp_tps3_supported(intel_dp->dpcd)) in intel_psr1_get_tp_time()
885 struct intel_display *display = to_intel_display(intel_dp); in psr_compute_idle_frames() local
886 struct intel_connector *connector = intel_dp->attached_connector; in psr_compute_idle_frames()
890 * off-by-one issue that HW has in some cases. in psr_compute_idle_frames()
892 idle_frames = max(6, connector->panel.vbt.psr.idle_frames); in psr_compute_idle_frames()
893 idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1); in psr_compute_idle_frames()
895 if (drm_WARN_ON(display->drm, idle_frames > 0xf)) in psr_compute_idle_frames()
903 struct intel_display *display = to_intel_display(intel_dp); in hsw_activate_psr1() local
904 struct drm_i915_private *dev_priv = to_i915(display->drm); in hsw_activate_psr1()
905 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in hsw_activate_psr1()
911 if (DISPLAY_VER(display) < 20) in hsw_activate_psr1()
917 if (intel_dp->psr.link_standby) in hsw_activate_psr1()
922 if (DISPLAY_VER(display) >= 8) in hsw_activate_psr1()
925 if (DISPLAY_VER(display) >= 20) in hsw_activate_psr1()
926 val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames); in hsw_activate_psr1()
928 intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder), in hsw_activate_psr1()
934 struct intel_display *display = to_intel_display(intel_dp); in intel_psr2_get_tp_time() local
935 struct intel_connector *connector = intel_dp->attached_connector; in intel_psr2_get_tp_time()
938 if (display->params.psr_safest_params) in intel_psr2_get_tp_time()
941 if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 && in intel_psr2_get_tp_time()
942 connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50) in intel_psr2_get_tp_time()
944 else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100) in intel_psr2_get_tp_time()
946 else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500) in intel_psr2_get_tp_time()
956 return intel_dp->alpm_parameters.io_wake_lines < 9 && in psr2_block_count_lines()
957 intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12; in psr2_block_count_lines()
970 intel_dp->psr.sink_sync_latency + 1, in frames_before_su_entry()
974 if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry) in frames_before_su_entry()
975 frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1; in frames_before_su_entry()
982 struct intel_display *display = to_intel_display(intel_dp); in dg2_activate_panel_replay() local
983 struct intel_psr *psr = &intel_dp->psr; in dg2_activate_panel_replay()
984 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in dg2_activate_panel_replay()
986 if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) { in dg2_activate_panel_replay()
987 u32 val = psr->su_region_et_enabled ? in dg2_activate_panel_replay()
990 if (intel_dp->psr.req_psr2_sdp_prior_scanline) in dg2_activate_panel_replay()
993 intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), in dg2_activate_panel_replay()
997 intel_de_rmw(display, in dg2_activate_panel_replay()
998 PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder), in dg2_activate_panel_replay()
1001 intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0, in dg2_activate_panel_replay()
1007 struct intel_display *display = to_intel_display(intel_dp); in hsw_activate_psr2() local
1008 struct drm_i915_private *dev_priv = to_i915(display->drm); in hsw_activate_psr2()
1009 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in hsw_activate_psr2()
1015 if (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv)) in hsw_activate_psr2()
1018 if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13) in hsw_activate_psr2()
1025 if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) { in hsw_activate_psr2()
1032 /* Wa_22012278275:adl-p */ in hsw_activate_psr2()
1033 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) { in hsw_activate_psr2()
1050 tmp = map[intel_dp->alpm_parameters.io_wake_lines - in hsw_activate_psr2()
1054 tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES]; in hsw_activate_psr2()
1056 } else if (DISPLAY_VER(display) >= 20) { in hsw_activate_psr2()
1057 val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines); in hsw_activate_psr2()
1058 } else if (DISPLAY_VER(display) >= 12) { in hsw_activate_psr2()
1059 val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines); in hsw_activate_psr2()
1060 val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines); in hsw_activate_psr2()
1061 } else if (DISPLAY_VER(display) >= 9) { in hsw_activate_psr2()
1062 val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines); in hsw_activate_psr2()
1063 val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines); in hsw_activate_psr2()
1066 if (intel_dp->psr.req_psr2_sdp_prior_scanline) in hsw_activate_psr2()
1069 if (DISPLAY_VER(display) >= 20) in hsw_activate_psr2()
1070 psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames); in hsw_activate_psr2()
1072 if (intel_dp->psr.psr2_sel_fetch_enabled) { in hsw_activate_psr2()
1075 tmp = intel_de_read(display, in hsw_activate_psr2()
1076 PSR2_MAN_TRK_CTL(display, cpu_transcoder)); in hsw_activate_psr2()
1077 drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE)); in hsw_activate_psr2()
1078 } else if (HAS_PSR2_SEL_FETCH(display)) { in hsw_activate_psr2()
1079 intel_de_write(display, in hsw_activate_psr2()
1080 PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0); in hsw_activate_psr2()
1083 if (intel_dp->psr.su_region_et_enabled) in hsw_activate_psr2()
1090 intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val); in hsw_activate_psr2()
1092 intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val); in hsw_activate_psr2()
1096 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder) in transcoder_has_psr2() argument
1098 struct drm_i915_private *dev_priv = to_i915(display->drm); in transcoder_has_psr2()
1100 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14) in transcoder_has_psr2()
1102 else if (DISPLAY_VER(display) >= 12) in transcoder_has_psr2()
1104 else if (DISPLAY_VER(display) >= 9) in transcoder_has_psr2()
1112 if (!crtc_state->hw.active) in intel_get_frame_time_us()
1116 drm_mode_vrefresh(&crtc_state->hw.adjusted_mode)); in intel_get_frame_time_us()
1122 struct intel_display *display = to_intel_display(intel_dp); in psr2_program_idle_frames() local
1123 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in psr2_program_idle_frames()
1125 intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder), in psr2_program_idle_frames()
1132 struct intel_display *display = to_intel_display(intel_dp); in tgl_psr2_enable_dc3co() local
1135 intel_display_power_set_target_dc_state(display, DC_STATE_EN_DC3CO); in tgl_psr2_enable_dc3co()
1140 struct intel_display *display = to_intel_display(intel_dp); in tgl_psr2_disable_dc3co() local
1142 intel_display_power_set_target_dc_state(display, DC_STATE_EN_UPTO_DC6); in tgl_psr2_disable_dc3co()
1151 mutex_lock(&intel_dp->psr.lock); in tgl_dc3co_disable_work()
1153 if (delayed_work_pending(&intel_dp->psr.dc3co_work)) in tgl_dc3co_disable_work()
1158 mutex_unlock(&intel_dp->psr.lock); in tgl_dc3co_disable_work()
1163 if (!intel_dp->psr.dc3co_exitline) in tgl_disallow_dc3co_on_psr2_exit()
1166 cancel_delayed_work(&intel_dp->psr.dc3co_work); in tgl_disallow_dc3co_on_psr2_exit()
1175 struct intel_display *display = to_intel_display(intel_dp); in dc3co_is_pipe_port_compatible() local
1177 enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe; in dc3co_is_pipe_port_compatible()
1178 struct drm_i915_private *dev_priv = to_i915(display->drm); in dc3co_is_pipe_port_compatible()
1179 enum port port = dig_port->base.port; in dc3co_is_pipe_port_compatible()
1181 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14) in dc3co_is_pipe_port_compatible()
1191 struct intel_display *display = to_intel_display(intel_dp); in tgl_dc3co_exitline_compute_config() local
1192 struct drm_i915_private *dev_priv = to_i915(display->drm); in tgl_dc3co_exitline_compute_config()
1193 const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay; in tgl_dc3co_exitline_compute_config()
1194 struct i915_power_domains *power_domains = &display->power.domains; in tgl_dc3co_exitline_compute_config()
1208 if (crtc_state->enable_psr2_sel_fetch) in tgl_dc3co_exitline_compute_config()
1211 if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO)) in tgl_dc3co_exitline_compute_config()
1217 /* Wa_16011303918:adl-p */ in tgl_dc3co_exitline_compute_config()
1218 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) in tgl_dc3co_exitline_compute_config()
1226 intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1; in tgl_dc3co_exitline_compute_config()
1228 if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay)) in tgl_dc3co_exitline_compute_config()
1231 crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines; in tgl_dc3co_exitline_compute_config()
1237 struct intel_display *display = to_intel_display(intel_dp); in intel_psr2_sel_fetch_config_valid() local
1239 if (!display->params.enable_psr2_sel_fetch && in intel_psr2_sel_fetch_config_valid()
1240 intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) { in intel_psr2_sel_fetch_config_valid()
1241 drm_dbg_kms(display->drm, in intel_psr2_sel_fetch_config_valid()
1246 if (crtc_state->uapi.async_flip) { in intel_psr2_sel_fetch_config_valid()
1247 drm_dbg_kms(display->drm, in intel_psr2_sel_fetch_config_valid()
1252 return crtc_state->enable_psr2_sel_fetch = true; in intel_psr2_sel_fetch_config_valid()
1258 struct intel_display *display = to_intel_display(intel_dp); in psr2_granularity_check() local
1259 struct drm_i915_private *dev_priv = to_i915(display->drm); in psr2_granularity_check()
1260 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; in psr2_granularity_check()
1261 const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay; in psr2_granularity_check()
1262 const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay; in psr2_granularity_check()
1266 if (crtc_hdisplay % intel_dp->psr.su_w_granularity) in psr2_granularity_check()
1269 if (crtc_vdisplay % intel_dp->psr.su_y_granularity) in psr2_granularity_check()
1273 if (!crtc_state->enable_psr2_sel_fetch) in psr2_granularity_check()
1274 return intel_dp->psr.su_y_granularity == 4; in psr2_granularity_check()
1281 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14) in psr2_granularity_check()
1282 y_granularity = intel_dp->psr.su_y_granularity; in psr2_granularity_check()
1283 else if (intel_dp->psr.su_y_granularity <= 2) in psr2_granularity_check()
1285 else if ((intel_dp->psr.su_y_granularity % 4) == 0) in psr2_granularity_check()
1286 y_granularity = intel_dp->psr.su_y_granularity; in psr2_granularity_check()
1291 if (crtc_state->dsc.compression_enable && in psr2_granularity_check()
1292 vdsc_cfg->slice_height % y_granularity) in psr2_granularity_check()
1295 crtc_state->su_y_granularity = y_granularity; in psr2_granularity_check()
1302 struct intel_display *display = to_intel_display(intel_dp); in _compute_psr2_sdp_prior_scanline_indication() local
1303 const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode; in _compute_psr2_sdp_prior_scanline_indication()
1306 hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start; in _compute_psr2_sdp_prior_scanline_indication()
1307 hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock); in _compute_psr2_sdp_prior_scanline_indication()
1310 req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000); in _compute_psr2_sdp_prior_scanline_indication()
1312 if ((hblank_ns - req_ns) > 100) in _compute_psr2_sdp_prior_scanline_indication()
1315 /* Not supported <13 / Wa_22012279113:adl-p */ in _compute_psr2_sdp_prior_scanline_indication()
1316 if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b) in _compute_psr2_sdp_prior_scanline_indication()
1319 crtc_state->req_psr2_sdp_prior_scanline = true; in _compute_psr2_sdp_prior_scanline_indication()
1326 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_entry_setup_frames() local
1327 int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd); in intel_psr_entry_setup_frames()
1331 drm_dbg_kms(display->drm, in intel_psr_entry_setup_frames()
1333 intel_dp->psr_dpcd[1]); in intel_psr_entry_setup_frames()
1334 return -ETIME; in intel_psr_entry_setup_frames()
1338 adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) { in intel_psr_entry_setup_frames()
1339 if (DISPLAY_VER(display) >= 20) { in intel_psr_entry_setup_frames()
1342 drm_dbg_kms(display->drm, in intel_psr_entry_setup_frames()
1346 drm_dbg_kms(display->drm, in intel_psr_entry_setup_frames()
1349 return -ETIME; in intel_psr_entry_setup_frames()
1360 struct intel_display *display = to_intel_display(intel_dp); in wake_lines_fit_into_vblank() local
1361 int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end - in wake_lines_fit_into_vblank()
1362 crtc_state->hw.adjusted_mode.crtc_vblank_start; in wake_lines_fit_into_vblank()
1366 wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines; in wake_lines_fit_into_vblank()
1368 wake_lines = DISPLAY_VER(display) < 20 ? in wake_lines_fit_into_vblank()
1370 intel_dp->alpm_parameters.io_wake_lines; in wake_lines_fit_into_vblank()
1372 if (crtc_state->req_psr2_sdp_prior_scanline) in wake_lines_fit_into_vblank()
1373 vblank -= 1; in wake_lines_fit_into_vblank()
1386 struct intel_display *display = to_intel_display(intel_dp); in alpm_config_valid() local
1389 drm_dbg_kms(display->drm, in alpm_config_valid()
1395 drm_dbg_kms(display->drm, in alpm_config_valid()
1406 struct intel_display *display = to_intel_display(intel_dp); in intel_psr2_config_valid() local
1407 struct drm_i915_private *dev_priv = to_i915(display->drm); in intel_psr2_config_valid()
1408 int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay; in intel_psr2_config_valid()
1409 int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay; in intel_psr2_config_valid()
1412 if (!intel_dp->psr.sink_psr2_support) in intel_psr2_config_valid()
1417 drm_dbg_kms(display->drm, "PSR2 not supported by phy\n"); in intel_psr2_config_valid()
1424 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1429 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) { in intel_psr2_config_valid()
1430 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1435 if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) { in intel_psr2_config_valid()
1436 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1438 transcoder_name(crtc_state->cpu_transcoder)); in intel_psr2_config_valid()
1443 * DSC and PSR2 cannot be enabled simultaneously. If a requested in intel_psr2_config_valid()
1447 if (crtc_state->dsc.compression_enable && in intel_psr2_config_valid()
1448 (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))) { in intel_psr2_config_valid()
1449 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1454 if (DISPLAY_VER(display) >= 20) { in intel_psr2_config_valid()
1457 max_bpp = crtc_state->pipe_bpp; in intel_psr2_config_valid()
1458 } else if (IS_DISPLAY_VER(display, 12, 14)) { in intel_psr2_config_valid()
1462 } else if (IS_DISPLAY_VER(display, 10, 11)) { in intel_psr2_config_valid()
1466 } else if (DISPLAY_VER(display) == 9) { in intel_psr2_config_valid()
1472 if (crtc_state->pipe_bpp > max_bpp) { in intel_psr2_config_valid()
1473 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1475 crtc_state->pipe_bpp, max_bpp); in intel_psr2_config_valid()
1479 /* Wa_16011303918:adl-p */ in intel_psr2_config_valid()
1480 if (crtc_state->vrr.enable && in intel_psr2_config_valid()
1481 IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) { in intel_psr2_config_valid()
1482 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1490 if (!crtc_state->enable_psr2_sel_fetch && in intel_psr2_config_valid()
1492 drm_dbg_kms(display->drm, in intel_psr2_config_valid()
1507 struct intel_display *display = to_intel_display(intel_dp); in intel_sel_update_config_valid() local
1509 if (HAS_PSR2_SEL_FETCH(display) && in intel_sel_update_config_valid()
1511 !HAS_PSR_HW_TRACKING(display)) { in intel_sel_update_config_valid()
1512 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1518 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1523 if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state)) in intel_sel_update_config_valid()
1527 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1532 if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 || in intel_sel_update_config_valid()
1533 !intel_dp->psr.sink_panel_replay_su_support)) in intel_sel_update_config_valid()
1536 if (crtc_state->crc_enabled) { in intel_sel_update_config_valid()
1537 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1543 drm_dbg_kms(display->drm, in intel_sel_update_config_valid()
1548 crtc_state->enable_psr2_su_region_et = in intel_sel_update_config_valid()
1549 psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay); in intel_sel_update_config_valid()
1554 crtc_state->enable_psr2_sel_fetch = false; in intel_sel_update_config_valid()
1561 struct intel_display *display = to_intel_display(intel_dp); in _psr_compute_config() local
1562 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; in _psr_compute_config()
1571 intel_dp->psr.entry_setup_frames = entry_setup_frames; in _psr_compute_config()
1573 drm_dbg_kms(display->drm, in _psr_compute_config()
1586 struct intel_display *display = to_intel_display(intel_dp); in _panel_replay_compute_config() local
1588 to_intel_connector(conn_state->connector); in _panel_replay_compute_config()
1589 struct intel_hdcp *hdcp = &connector->hdcp; in _panel_replay_compute_config()
1595 drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n"); in _panel_replay_compute_config()
1604 if (to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_A && in _panel_replay_compute_config()
1605 to_intel_crtc(crtc_state->uapi.crtc)->pipe != PIPE_B) in _panel_replay_compute_config()
1610 drm_dbg_kms(display->drm, in _panel_replay_compute_config()
1616 if (conn_state->content_protection == in _panel_replay_compute_config()
1618 (conn_state->content_protection == in _panel_replay_compute_config()
1619 DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value == in _panel_replay_compute_config()
1621 drm_dbg_kms(display->drm, in _panel_replay_compute_config()
1629 if (crtc_state->crc_enabled) { in _panel_replay_compute_config()
1630 drm_dbg_kms(display->drm, in _panel_replay_compute_config()
1641 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_needs_wa_18037818876() local
1643 return (DISPLAY_VER(display) == 20 && intel_dp->psr.entry_setup_frames > 0 && in intel_psr_needs_wa_18037818876()
1644 !crtc_state->has_sel_update); in intel_psr_needs_wa_18037818876()
1651 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_compute_config() local
1652 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; in intel_psr_compute_config()
1655 drm_dbg_kms(display->drm, "PSR disabled by flag\n"); in intel_psr_compute_config()
1659 if (intel_dp->psr.sink_not_reliable) { in intel_psr_compute_config()
1660 drm_dbg_kms(display->drm, in intel_psr_compute_config()
1665 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) { in intel_psr_compute_config()
1666 drm_dbg_kms(display->drm, in intel_psr_compute_config()
1674 * PSR is a transcoder level feature. in intel_psr_compute_config()
1676 if (crtc_state->joiner_pipes) { in intel_psr_compute_config()
1677 drm_dbg_kms(display->drm, in intel_psr_compute_config()
1685 if (crtc_state->vrr.enable) in intel_psr_compute_config()
1688 crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp, in intel_psr_compute_config()
1692 crtc_state->has_psr = crtc_state->has_panel_replay ? true : in intel_psr_compute_config()
1695 if (!crtc_state->has_psr) in intel_psr_compute_config()
1698 crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state); in intel_psr_compute_config()
1702 crtc_state->has_psr = false; in intel_psr_compute_config()
1703 drm_dbg_kms(display->drm, in intel_psr_compute_config()
1711 struct intel_display *display = to_intel_display(encoder); in intel_psr_get_config() local
1713 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; in intel_psr_get_config()
1720 intel_dp = &dig_port->dp; in intel_psr_get_config()
1724 mutex_lock(&intel_dp->psr.lock); in intel_psr_get_config()
1725 if (!intel_dp->psr.enabled) in intel_psr_get_config()
1728 if (intel_dp->psr.panel_replay_enabled) { in intel_psr_get_config()
1729 pipe_config->has_psr = pipe_config->has_panel_replay = true; in intel_psr_get_config()
1735 pipe_config->has_psr = true; in intel_psr_get_config()
1738 pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled; in intel_psr_get_config()
1739 pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC); in intel_psr_get_config()
1741 if (!intel_dp->psr.sel_update_enabled) in intel_psr_get_config()
1744 if (HAS_PSR2_SEL_FETCH(display)) { in intel_psr_get_config()
1745 val = intel_de_read(display, in intel_psr_get_config()
1746 PSR2_MAN_TRK_CTL(display, cpu_transcoder)); in intel_psr_get_config()
1748 pipe_config->enable_psr2_sel_fetch = true; in intel_psr_get_config()
1751 pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled; in intel_psr_get_config()
1753 if (DISPLAY_VER(display) >= 12) { in intel_psr_get_config()
1754 val = intel_de_read(display, in intel_psr_get_config()
1755 TRANS_EXITLINE(display, cpu_transcoder)); in intel_psr_get_config()
1756 pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val); in intel_psr_get_config()
1759 mutex_unlock(&intel_dp->psr.lock); in intel_psr_get_config()
1764 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_activate() local
1765 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in intel_psr_activate()
1767 drm_WARN_ON(display->drm, in intel_psr_activate()
1768 transcoder_has_psr2(display, cpu_transcoder) && in intel_psr_activate()
1769 intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE); in intel_psr_activate()
1771 drm_WARN_ON(display->drm, in intel_psr_activate()
1772 intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE); in intel_psr_activate()
1774 drm_WARN_ON(display->drm, intel_dp->psr.active); in intel_psr_activate()
1776 lockdep_assert_held(&intel_dp->psr.lock); in intel_psr_activate()
1778 /* psr1, psr2 and panel-replay are mutually exclusive.*/ in intel_psr_activate()
1779 if (intel_dp->psr.panel_replay_enabled) in intel_psr_activate()
1781 else if (intel_dp->psr.sel_update_enabled) in intel_psr_activate()
1786 intel_dp->psr.active = true; in intel_psr_activate()
1796 struct intel_display *display = to_intel_display(intel_dp); in wm_optimization_wa() local
1797 enum pipe pipe = intel_dp->psr.pipe; in wm_optimization_wa()
1801 if (IS_DISPLAY_VER(display, 11, 14) && crtc_state->wm_level_disabled) in wm_optimization_wa()
1805 if (DISPLAY_VER(display) == 12 && in wm_optimization_wa()
1806 crtc_state->hw.adjusted_mode.crtc_vblank_start != in wm_optimization_wa()
1807 crtc_state->hw.adjusted_mode.crtc_vdisplay) in wm_optimization_wa()
1811 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1, in wm_optimization_wa()
1814 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1, in wm_optimization_wa()
1821 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_enable_source() local
1822 struct drm_i915_private *dev_priv = to_i915(display->drm); in intel_psr_enable_source()
1823 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in intel_psr_enable_source()
1830 if (DISPLAY_VER(display) < 9) in intel_psr_enable_source()
1847 if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp)) in intel_psr_enable_source()
1854 * For some unknown reason on HSW non-ULT (or at least on in intel_psr_enable_source()
1857 * higher than should be possible with an external display. in intel_psr_enable_source()
1858 * As a workaround leave LPSP unmasked to prevent PSR entry in intel_psr_enable_source()
1861 if (DISPLAY_VER(display) >= 8 || IS_HASWELL_ULT(dev_priv)) in intel_psr_enable_source()
1864 if (DISPLAY_VER(display) < 20) in intel_psr_enable_source()
1871 if (IS_DISPLAY_VER(display, 9, 10)) in intel_psr_enable_source()
1879 intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask); in intel_psr_enable_source()
1887 if (intel_dp->psr.dc3co_exitline) in intel_psr_enable_source()
1888 intel_de_rmw(display, in intel_psr_enable_source()
1889 TRANS_EXITLINE(display, cpu_transcoder), in intel_psr_enable_source()
1891 intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE); in intel_psr_enable_source()
1893 if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display)) in intel_psr_enable_source()
1894 intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING, in intel_psr_enable_source()
1895 intel_dp->psr.psr2_sel_fetch_enabled ? in intel_psr_enable_source()
1907 if (intel_dp->psr.sel_update_enabled) { in intel_psr_enable_source()
1908 if (DISPLAY_VER(display) == 9) in intel_psr_enable_source()
1909 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 0, in intel_psr_enable_source()
1915 * All supported adlp panels have 1-based X granularity, this may in intel_psr_enable_source()
1916 * cause issues if non-supported panels are used. in intel_psr_enable_source()
1918 if (!intel_dp->psr.panel_replay_enabled && in intel_psr_enable_source()
1919 (IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) || in intel_psr_enable_source()
1921 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), in intel_psr_enable_source()
1925 if (!intel_dp->psr.panel_replay_enabled && in intel_psr_enable_source()
1926 IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0)) in intel_psr_enable_source()
1927 intel_de_rmw(display, in intel_psr_enable_source()
1928 MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder), in intel_psr_enable_source()
1932 intel_de_rmw(display, CLKGATE_DIS_MISC, 0, in intel_psr_enable_source()
1939 struct intel_display *display = to_intel_display(intel_dp); in psr_interrupt_error_check() local
1940 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in psr_interrupt_error_check()
1943 if (intel_dp->psr.panel_replay_enabled) in psr_interrupt_error_check()
1947 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR in psr_interrupt_error_check()
1954 val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder)); in psr_interrupt_error_check()
1957 intel_dp->psr.sink_not_reliable = true; in psr_interrupt_error_check()
1958 drm_dbg_kms(display->drm, in psr_interrupt_error_check()
1970 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_enable_locked() local
1974 drm_WARN_ON(display->drm, intel_dp->psr.enabled); in intel_psr_enable_locked()
1976 intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update; in intel_psr_enable_locked()
1977 intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay; in intel_psr_enable_locked()
1978 intel_dp->psr.busy_frontbuffer_bits = 0; in intel_psr_enable_locked()
1979 intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe; in intel_psr_enable_locked()
1980 intel_dp->psr.transcoder = crtc_state->cpu_transcoder; in intel_psr_enable_locked()
1983 intel_dp->psr.dc3co_exit_delay = val; in intel_psr_enable_locked()
1984 intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline; in intel_psr_enable_locked()
1985 intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch; in intel_psr_enable_locked()
1986 intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et; in intel_psr_enable_locked()
1987 intel_dp->psr.psr2_sel_fetch_cff_enabled = false; in intel_psr_enable_locked()
1988 intel_dp->psr.req_psr2_sdp_prior_scanline = in intel_psr_enable_locked()
1989 crtc_state->req_psr2_sdp_prior_scanline; in intel_psr_enable_locked()
1994 if (intel_dp->psr.panel_replay_enabled) { in intel_psr_enable_locked()
1995 drm_dbg_kms(display->drm, "Enabling Panel Replay\n"); in intel_psr_enable_locked()
1997 drm_dbg_kms(display->drm, "Enabling PSR%s\n", in intel_psr_enable_locked()
1998 intel_dp->psr.sel_update_enabled ? "2" : "1"); in intel_psr_enable_locked()
2008 intel_snps_phy_update_psr_power_state(&dig_port->base, true); in intel_psr_enable_locked()
2011 intel_dp->psr.enabled = true; in intel_psr_enable_locked()
2012 intel_dp->psr.paused = false; in intel_psr_enable_locked()
2021 intel_dp->psr.link_ok = true; in intel_psr_enable_locked()
2028 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_exit() local
2029 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in intel_psr_exit()
2032 if (!intel_dp->psr.active) { in intel_psr_exit()
2033 if (transcoder_has_psr2(display, cpu_transcoder)) { in intel_psr_exit()
2034 val = intel_de_read(display, in intel_psr_exit()
2035 EDP_PSR2_CTL(display, cpu_transcoder)); in intel_psr_exit()
2036 drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE); in intel_psr_exit()
2039 val = intel_de_read(display, in intel_psr_exit()
2040 psr_ctl_reg(display, cpu_transcoder)); in intel_psr_exit()
2041 drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE); in intel_psr_exit()
2046 if (intel_dp->psr.panel_replay_enabled) { in intel_psr_exit()
2047 intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), in intel_psr_exit()
2049 } else if (intel_dp->psr.sel_update_enabled) { in intel_psr_exit()
2052 val = intel_de_rmw(display, in intel_psr_exit()
2053 EDP_PSR2_CTL(display, cpu_transcoder), in intel_psr_exit()
2056 drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE)); in intel_psr_exit()
2058 val = intel_de_rmw(display, in intel_psr_exit()
2059 psr_ctl_reg(display, cpu_transcoder), in intel_psr_exit()
2062 drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE)); in intel_psr_exit()
2064 intel_dp->psr.active = false; in intel_psr_exit()
2069 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_wait_exit_locked() local
2070 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in intel_psr_wait_exit_locked()
2074 if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled || in intel_psr_wait_exit_locked()
2075 intel_dp->psr.panel_replay_enabled)) { in intel_psr_wait_exit_locked()
2076 psr_status = EDP_PSR2_STATUS(display, cpu_transcoder); in intel_psr_wait_exit_locked()
2079 psr_status = psr_status_reg(display, cpu_transcoder); in intel_psr_wait_exit_locked()
2084 if (intel_de_wait_for_clear(display, psr_status, in intel_psr_wait_exit_locked()
2086 drm_err(display->drm, "Timed out waiting PSR idle state\n"); in intel_psr_wait_exit_locked()
2091 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_disable_locked() local
2092 struct drm_i915_private *dev_priv = to_i915(display->drm); in intel_psr_disable_locked()
2093 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in intel_psr_disable_locked()
2095 lockdep_assert_held(&intel_dp->psr.lock); in intel_psr_disable_locked()
2097 if (!intel_dp->psr.enabled) in intel_psr_disable_locked()
2100 if (intel_dp->psr.panel_replay_enabled) in intel_psr_disable_locked()
2101 drm_dbg_kms(display->drm, "Disabling Panel Replay\n"); in intel_psr_disable_locked()
2103 drm_dbg_kms(display->drm, "Disabling PSR%s\n", in intel_psr_disable_locked()
2104 intel_dp->psr.sel_update_enabled ? "2" : "1"); in intel_psr_disable_locked()
2113 if (DISPLAY_VER(display) >= 11) in intel_psr_disable_locked()
2114 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1, in intel_psr_disable_locked()
2115 LATENCY_REPORTING_REMOVED(intel_dp->psr.pipe), 0); in intel_psr_disable_locked()
2117 if (intel_dp->psr.sel_update_enabled) { in intel_psr_disable_locked()
2119 if (!intel_dp->psr.panel_replay_enabled && in intel_psr_disable_locked()
2120 IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0)) in intel_psr_disable_locked()
2121 intel_de_rmw(display, in intel_psr_disable_locked()
2122 MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder), in intel_psr_disable_locked()
2125 intel_de_rmw(display, CLKGATE_DIS_MISC, in intel_psr_disable_locked()
2130 intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false); in intel_psr_disable_locked()
2133 if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp)) { in intel_psr_disable_locked()
2134 intel_de_rmw(display, ALPM_CTL(display, cpu_transcoder), in intel_psr_disable_locked()
2138 intel_de_rmw(display, in intel_psr_disable_locked()
2144 if (!intel_dp->psr.panel_replay_enabled) { in intel_psr_disable_locked()
2145 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0); in intel_psr_disable_locked()
2147 if (intel_dp->psr.sel_update_enabled) in intel_psr_disable_locked()
2148 drm_dp_dpcd_writeb(&intel_dp->aux, in intel_psr_disable_locked()
2152 intel_dp->psr.enabled = false; in intel_psr_disable_locked()
2153 intel_dp->psr.panel_replay_enabled = false; in intel_psr_disable_locked()
2154 intel_dp->psr.sel_update_enabled = false; in intel_psr_disable_locked()
2155 intel_dp->psr.psr2_sel_fetch_enabled = false; in intel_psr_disable_locked()
2156 intel_dp->psr.su_region_et_enabled = false; in intel_psr_disable_locked()
2157 intel_dp->psr.psr2_sel_fetch_cff_enabled = false; in intel_psr_disable_locked()
2161 * intel_psr_disable - Disable PSR
2170 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_disable() local
2172 if (!old_crtc_state->has_psr) in intel_psr_disable()
2175 if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp))) in intel_psr_disable()
2178 mutex_lock(&intel_dp->psr.lock); in intel_psr_disable()
2182 intel_dp->psr.link_ok = false; in intel_psr_disable()
2184 mutex_unlock(&intel_dp->psr.lock); in intel_psr_disable()
2185 cancel_work_sync(&intel_dp->psr.work); in intel_psr_disable()
2186 cancel_delayed_work_sync(&intel_dp->psr.dc3co_work); in intel_psr_disable()
2190 * intel_psr_pause - Pause PSR
2197 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_pause() local
2198 struct intel_psr *psr = &intel_dp->psr; in intel_psr_pause()
2203 mutex_lock(&psr->lock); in intel_psr_pause()
2205 if (!psr->enabled) { in intel_psr_pause()
2206 mutex_unlock(&psr->lock); in intel_psr_pause()
2211 drm_WARN_ON(display->drm, psr->paused); in intel_psr_pause()
2215 psr->paused = true; in intel_psr_pause()
2217 mutex_unlock(&psr->lock); in intel_psr_pause()
2219 cancel_work_sync(&psr->work); in intel_psr_pause()
2220 cancel_delayed_work_sync(&psr->dc3co_work); in intel_psr_pause()
2224 * intel_psr_resume - Resume PSR
2231 struct intel_psr *psr = &intel_dp->psr; in intel_psr_resume()
2236 mutex_lock(&psr->lock); in intel_psr_resume()
2238 if (!psr->paused) in intel_psr_resume()
2241 psr->paused = false; in intel_psr_resume()
2245 mutex_unlock(&psr->lock); in intel_psr_resume()
2249 * intel_psr_needs_block_dc_vblank - Check if block dc entry is needed
2254 * DC entry. This means vblank interrupts are not fired and is a problem if
2255 * user-space is polling for vblank events.
2259 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in intel_psr_needs_block_dc_vblank()
2262 for_each_encoder_on_crtc(crtc->base.dev, &crtc->base, encoder) { in intel_psr_needs_block_dc_vblank()
2278 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display) in man_trk_ctl_enable_bit_get() argument
2280 struct drm_i915_private *dev_priv = to_i915(display->drm); in man_trk_ctl_enable_bit_get()
2282 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ? 0 : in man_trk_ctl_enable_bit_get()
2286 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display) in man_trk_ctl_single_full_frame_bit_get() argument
2288 struct drm_i915_private *dev_priv = to_i915(display->drm); in man_trk_ctl_single_full_frame_bit_get()
2290 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ? in man_trk_ctl_single_full_frame_bit_get()
2295 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display) in man_trk_ctl_partial_frame_bit_get() argument
2297 struct drm_i915_private *dev_priv = to_i915(display->drm); in man_trk_ctl_partial_frame_bit_get()
2299 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ? in man_trk_ctl_partial_frame_bit_get()
2304 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display) in man_trk_ctl_continuos_full_frame() argument
2306 struct drm_i915_private *dev_priv = to_i915(display->drm); in man_trk_ctl_continuos_full_frame()
2308 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ? in man_trk_ctl_continuos_full_frame()
2315 struct intel_display *display = to_intel_display(intel_dp); in psr_force_hw_tracking_exit() local
2316 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in psr_force_hw_tracking_exit()
2318 if (intel_dp->psr.psr2_sel_fetch_enabled) in psr_force_hw_tracking_exit()
2319 intel_de_write(display, in psr_force_hw_tracking_exit()
2320 PSR2_MAN_TRK_CTL(display, cpu_transcoder), in psr_force_hw_tracking_exit()
2321 man_trk_ctl_enable_bit_get(display) | in psr_force_hw_tracking_exit()
2322 man_trk_ctl_partial_frame_bit_get(display) | in psr_force_hw_tracking_exit()
2323 man_trk_ctl_single_full_frame_bit_get(display) | in psr_force_hw_tracking_exit()
2324 man_trk_ctl_continuos_full_frame(display)); in psr_force_hw_tracking_exit()
2327 * Display WA #0884: skl+ in psr_force_hw_tracking_exit()
2330 * instead of disabling and re-enabling. in psr_force_hw_tracking_exit()
2335 * This workaround do not exist for platforms with display 10 or newer in psr_force_hw_tracking_exit()
2336 * but testing proved that it works for up display 13, for newer in psr_force_hw_tracking_exit()
2339 intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0); in psr_force_hw_tracking_exit()
2344 struct intel_display *display = to_intel_display(crtc_state); in intel_psr2_program_trans_man_trk_ctl() local
2345 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in intel_psr2_program_trans_man_trk_ctl()
2346 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; in intel_psr2_program_trans_man_trk_ctl()
2349 if (!crtc_state->enable_psr2_sel_fetch) in intel_psr2_program_trans_man_trk_ctl()
2352 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr2_program_trans_man_trk_ctl()
2353 crtc_state->uapi.encoder_mask) { in intel_psr2_program_trans_man_trk_ctl()
2356 lockdep_assert_held(&intel_dp->psr.lock); in intel_psr2_program_trans_man_trk_ctl()
2357 if (intel_dp->psr.psr2_sel_fetch_cff_enabled) in intel_psr2_program_trans_man_trk_ctl()
2362 intel_de_write(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder), in intel_psr2_program_trans_man_trk_ctl()
2363 crtc_state->psr2_man_track_ctl); in intel_psr2_program_trans_man_trk_ctl()
2365 if (!crtc_state->enable_psr2_su_region_et) in intel_psr2_program_trans_man_trk_ctl()
2368 intel_de_write(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe), in intel_psr2_program_trans_man_trk_ctl()
2369 crtc_state->pipe_srcsz_early_tpt); in intel_psr2_program_trans_man_trk_ctl()
2375 struct intel_display *display = to_intel_display(crtc_state); in psr2_man_trk_ctl_calc() local
2376 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in psr2_man_trk_ctl_calc()
2377 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); in psr2_man_trk_ctl_calc()
2378 u32 val = man_trk_ctl_enable_bit_get(display); in psr2_man_trk_ctl_calc()
2381 val |= man_trk_ctl_partial_frame_bit_get(display); in psr2_man_trk_ctl_calc()
2384 val |= man_trk_ctl_single_full_frame_bit_get(display); in psr2_man_trk_ctl_calc()
2385 val |= man_trk_ctl_continuos_full_frame(display); in psr2_man_trk_ctl_calc()
2389 if (crtc_state->psr2_su_area.y1 == -1) in psr2_man_trk_ctl_calc()
2392 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14) { in psr2_man_trk_ctl_calc()
2393 val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1); in psr2_man_trk_ctl_calc()
2394 val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1); in psr2_man_trk_ctl_calc()
2396 drm_WARN_ON(crtc_state->uapi.crtc->dev, in psr2_man_trk_ctl_calc()
2397 crtc_state->psr2_su_area.y1 % 4 || in psr2_man_trk_ctl_calc()
2398 crtc_state->psr2_su_area.y2 % 4); in psr2_man_trk_ctl_calc()
2401 crtc_state->psr2_su_area.y1 / 4 + 1); in psr2_man_trk_ctl_calc()
2403 crtc_state->psr2_su_area.y2 / 4 + 1); in psr2_man_trk_ctl_calc()
2406 crtc_state->psr2_man_track_ctl = val; in psr2_man_trk_ctl_calc()
2414 if (!crtc_state->enable_psr2_su_region_et || full_update) in psr2_pipe_srcsz_early_tpt_calc()
2417 width = drm_rect_width(&crtc_state->psr2_su_area); in psr2_pipe_srcsz_early_tpt_calc()
2418 height = drm_rect_height(&crtc_state->psr2_su_area); in psr2_pipe_srcsz_early_tpt_calc()
2420 return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1); in psr2_pipe_srcsz_early_tpt_calc()
2430 if (overlap_damage_area->y1 == -1) { in clip_area_update()
2431 overlap_damage_area->y1 = damage_area->y1; in clip_area_update()
2432 overlap_damage_area->y2 = damage_area->y2; in clip_area_update()
2436 if (damage_area->y1 < overlap_damage_area->y1) in clip_area_update()
2437 overlap_damage_area->y1 = damage_area->y1; in clip_area_update()
2439 if (damage_area->y2 > overlap_damage_area->y2) in clip_area_update()
2440 overlap_damage_area->y2 = damage_area->y2; in clip_area_update()
2445 struct intel_display *display = to_intel_display(crtc_state); in intel_psr2_sel_fetch_pipe_alignment() local
2446 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); in intel_psr2_sel_fetch_pipe_alignment()
2447 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; in intel_psr2_sel_fetch_pipe_alignment()
2451 if (crtc_state->dsc.compression_enable && in intel_psr2_sel_fetch_pipe_alignment()
2452 (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)) in intel_psr2_sel_fetch_pipe_alignment()
2453 y_alignment = vdsc_cfg->slice_height; in intel_psr2_sel_fetch_pipe_alignment()
2455 y_alignment = crtc_state->su_y_granularity; in intel_psr2_sel_fetch_pipe_alignment()
2457 crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment; in intel_psr2_sel_fetch_pipe_alignment()
2458 if (crtc_state->psr2_su_area.y2 % y_alignment) in intel_psr2_sel_fetch_pipe_alignment()
2459 crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 / in intel_psr2_sel_fetch_pipe_alignment()
2477 if (!crtc_state->enable_psr2_su_region_et) in intel_psr2_sel_fetch_et_alignment()
2483 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc) in intel_psr2_sel_fetch_et_alignment()
2486 if (plane->id != PLANE_CURSOR) in intel_psr2_sel_fetch_et_alignment()
2489 if (!new_plane_state->uapi.visible) in intel_psr2_sel_fetch_et_alignment()
2492 inter = crtc_state->psr2_su_area; in intel_psr2_sel_fetch_et_alignment()
2493 if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) in intel_psr2_sel_fetch_et_alignment()
2496 clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst, in intel_psr2_sel_fetch_et_alignment()
2497 &crtc_state->pipe_src); in intel_psr2_sel_fetch_et_alignment()
2504 * also planes are not updated if they have a negative X
2505 * position so for now doing a full update in this cases
2508 * properties can change without a modeset, so need to be check at every
2513 if (plane_state->uapi.dst.y1 < 0 || in psr2_sel_fetch_plane_state_supported()
2514 plane_state->uapi.dst.x1 < 0 || in psr2_sel_fetch_plane_state_supported()
2515 plane_state->scaler_id >= 0 || in psr2_sel_fetch_plane_state_supported()
2516 plane_state->uapi.rotation != DRM_MODE_ROTATE_0) in psr2_sel_fetch_plane_state_supported()
2525 * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2531 if (crtc_state->scaler_state.scaler_id >= 0) in psr2_sel_fetch_pipe_state_supported()
2540 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_apply_pr_link_on_su_wa() local
2544 if (crtc_state->psr2_su_area.y1 != 0 || in intel_psr_apply_pr_link_on_su_wa()
2545 crtc_state->psr2_su_area.y2 != 0) in intel_psr_apply_pr_link_on_su_wa()
2548 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) in intel_psr_apply_pr_link_on_su_wa()
2553 if (crtc_state->hw.adjusted_mode.hdisplay < hactive_limit) in intel_psr_apply_pr_link_on_su_wa()
2556 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr_apply_pr_link_on_su_wa()
2557 crtc_state->uapi.encoder_mask) { in intel_psr_apply_pr_link_on_su_wa()
2561 intel_dp->psr.panel_replay_enabled && in intel_psr_apply_pr_link_on_su_wa()
2562 intel_dp->psr.sel_update_enabled) { in intel_psr_apply_pr_link_on_su_wa()
2563 crtc_state->psr2_su_area.y2++; in intel_psr_apply_pr_link_on_su_wa()
2572 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_apply_su_area_workarounds() local
2573 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in intel_psr_apply_su_area_workarounds()
2576 if (!crtc_state->has_panel_replay && in intel_psr_apply_su_area_workarounds()
2577 ((IS_DISPLAY_VERx100_STEP(display, 1400, STEP_A0, STEP_B0) || in intel_psr_apply_su_area_workarounds()
2579 crtc_state->splitter.enable) in intel_psr_apply_su_area_workarounds()
2580 crtc_state->psr2_su_area.y1 = 0; in intel_psr_apply_su_area_workarounds()
2583 if (DISPLAY_VER(display) == 30) in intel_psr_apply_su_area_workarounds()
2590 struct intel_display *display = to_intel_display(state); in intel_psr2_sel_fetch_update() local
2597 if (!crtc_state->enable_psr2_sel_fetch) in intel_psr2_sel_fetch_update()
2605 crtc_state->psr2_su_area.x1 = 0; in intel_psr2_sel_fetch_update()
2606 crtc_state->psr2_su_area.y1 = -1; in intel_psr2_sel_fetch_update()
2607 crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src); in intel_psr2_sel_fetch_update()
2608 crtc_state->psr2_su_area.y2 = -1; in intel_psr2_sel_fetch_update()
2618 struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1, in intel_psr2_sel_fetch_update()
2621 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc) in intel_psr2_sel_fetch_update()
2624 if (!new_plane_state->uapi.visible && in intel_psr2_sel_fetch_update()
2625 !old_plane_state->uapi.visible) in intel_psr2_sel_fetch_update()
2638 if (new_plane_state->uapi.visible != old_plane_state->uapi.visible || in intel_psr2_sel_fetch_update()
2639 !drm_rect_equals(&new_plane_state->uapi.dst, in intel_psr2_sel_fetch_update()
2640 &old_plane_state->uapi.dst)) { in intel_psr2_sel_fetch_update()
2641 if (old_plane_state->uapi.visible) { in intel_psr2_sel_fetch_update()
2642 damaged_area.y1 = old_plane_state->uapi.dst.y1; in intel_psr2_sel_fetch_update()
2643 damaged_area.y2 = old_plane_state->uapi.dst.y2; in intel_psr2_sel_fetch_update()
2644 clip_area_update(&crtc_state->psr2_su_area, &damaged_area, in intel_psr2_sel_fetch_update()
2645 &crtc_state->pipe_src); in intel_psr2_sel_fetch_update()
2648 if (new_plane_state->uapi.visible) { in intel_psr2_sel_fetch_update()
2649 damaged_area.y1 = new_plane_state->uapi.dst.y1; in intel_psr2_sel_fetch_update()
2650 damaged_area.y2 = new_plane_state->uapi.dst.y2; in intel_psr2_sel_fetch_update()
2651 clip_area_update(&crtc_state->psr2_su_area, &damaged_area, in intel_psr2_sel_fetch_update()
2652 &crtc_state->pipe_src); in intel_psr2_sel_fetch_update()
2655 } else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) { in intel_psr2_sel_fetch_update()
2657 damaged_area.y1 = new_plane_state->uapi.dst.y1; in intel_psr2_sel_fetch_update()
2658 damaged_area.y2 = new_plane_state->uapi.dst.y2; in intel_psr2_sel_fetch_update()
2659 clip_area_update(&crtc_state->psr2_su_area, &damaged_area, in intel_psr2_sel_fetch_update()
2660 &crtc_state->pipe_src); in intel_psr2_sel_fetch_update()
2664 src = drm_plane_state_src(&new_plane_state->uapi); in intel_psr2_sel_fetch_update()
2667 if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi, in intel_psr2_sel_fetch_update()
2668 &new_plane_state->uapi, &damaged_area)) in intel_psr2_sel_fetch_update()
2671 damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1; in intel_psr2_sel_fetch_update()
2672 damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1; in intel_psr2_sel_fetch_update()
2673 damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1; in intel_psr2_sel_fetch_update()
2674 damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1; in intel_psr2_sel_fetch_update()
2676 clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src); in intel_psr2_sel_fetch_update()
2685 if (crtc_state->psr2_su_area.y1 == -1) { in intel_psr2_sel_fetch_update()
2686 drm_info_once(display->drm, in intel_psr2_sel_fetch_update()
2688 pipe_name(crtc->pipe)); in intel_psr2_sel_fetch_update()
2697 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base); in intel_psr2_sel_fetch_update()
2718 struct intel_plane *linked = new_plane_state->planar_linked_plane; in intel_psr2_sel_fetch_update()
2720 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc || in intel_psr2_sel_fetch_update()
2721 !new_plane_state->uapi.visible) in intel_psr2_sel_fetch_update()
2724 inter = crtc_state->psr2_su_area; in intel_psr2_sel_fetch_update()
2725 sel_fetch_area = &new_plane_state->psr2_sel_fetch_area; in intel_psr2_sel_fetch_update()
2726 if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) { in intel_psr2_sel_fetch_update()
2727 sel_fetch_area->y1 = -1; in intel_psr2_sel_fetch_update()
2728 sel_fetch_area->y2 = -1; in intel_psr2_sel_fetch_update()
2730 * if plane sel fetch was previously enabled -> in intel_psr2_sel_fetch_update()
2733 if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0) in intel_psr2_sel_fetch_update()
2734 crtc_state->update_planes |= BIT(plane->id); in intel_psr2_sel_fetch_update()
2744 sel_fetch_area = &new_plane_state->psr2_sel_fetch_area; in intel_psr2_sel_fetch_update()
2745 sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1; in intel_psr2_sel_fetch_update()
2746 sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1; in intel_psr2_sel_fetch_update()
2747 crtc_state->update_planes |= BIT(plane->id); in intel_psr2_sel_fetch_update()
2761 linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area; in intel_psr2_sel_fetch_update()
2762 linked_sel_fetch_area->y1 = sel_fetch_area->y1; in intel_psr2_sel_fetch_update()
2763 linked_sel_fetch_area->y2 = sel_fetch_area->y2; in intel_psr2_sel_fetch_update()
2764 crtc_state->update_planes |= BIT(linked->id); in intel_psr2_sel_fetch_update()
2770 crtc_state->pipe_srcsz_early_tpt = in intel_psr2_sel_fetch_update()
2778 struct intel_display *display = to_intel_display(state); in intel_psr_pre_plane_update() local
2779 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_psr_pre_plane_update()
2786 if (!HAS_PSR(display)) in intel_psr_pre_plane_update()
2789 for_each_intel_encoder_mask_with_psr(state->base.dev, encoder, in intel_psr_pre_plane_update()
2790 old_crtc_state->uapi.encoder_mask) { in intel_psr_pre_plane_update()
2792 struct intel_psr *psr = &intel_dp->psr; in intel_psr_pre_plane_update()
2795 mutex_lock(&psr->lock); in intel_psr_pre_plane_update()
2799 * - PSR disabled in new state in intel_psr_pre_plane_update()
2800 * - All planes will go inactive in intel_psr_pre_plane_update()
2801 * - Changing between PSR versions in intel_psr_pre_plane_update()
2802 * - Region Early Transport changing in intel_psr_pre_plane_update()
2803 * - Display WA #1136: skl, bxt in intel_psr_pre_plane_update()
2806 needs_to_disable |= !new_crtc_state->has_psr; in intel_psr_pre_plane_update()
2807 needs_to_disable |= !new_crtc_state->active_planes; in intel_psr_pre_plane_update()
2808 needs_to_disable |= new_crtc_state->has_sel_update != psr->sel_update_enabled; in intel_psr_pre_plane_update()
2809 needs_to_disable |= new_crtc_state->enable_psr2_su_region_et != in intel_psr_pre_plane_update()
2810 psr->su_region_et_enabled; in intel_psr_pre_plane_update()
2812 new_crtc_state->wm_level_disabled; in intel_psr_pre_plane_update()
2814 if (psr->enabled && needs_to_disable) in intel_psr_pre_plane_update()
2816 else if (psr->enabled && new_crtc_state->wm_level_disabled) in intel_psr_pre_plane_update()
2820 mutex_unlock(&psr->lock); in intel_psr_pre_plane_update()
2827 struct intel_display *display = to_intel_display(state); in intel_psr_post_plane_update() local
2832 if (!crtc_state->has_psr) in intel_psr_post_plane_update()
2835 for_each_intel_encoder_mask_with_psr(state->base.dev, encoder, in intel_psr_post_plane_update()
2836 crtc_state->uapi.encoder_mask) { in intel_psr_post_plane_update()
2838 struct intel_psr *psr = &intel_dp->psr; in intel_psr_post_plane_update()
2841 mutex_lock(&psr->lock); in intel_psr_post_plane_update()
2843 drm_WARN_ON(display->drm, in intel_psr_post_plane_update()
2844 psr->enabled && !crtc_state->active_planes); in intel_psr_post_plane_update()
2846 keep_disabled |= psr->sink_not_reliable; in intel_psr_post_plane_update()
2847 keep_disabled |= !crtc_state->active_planes; in intel_psr_post_plane_update()
2849 /* Display WA #1136: skl, bxt */ in intel_psr_post_plane_update()
2850 keep_disabled |= DISPLAY_VER(display) < 11 && in intel_psr_post_plane_update()
2851 crtc_state->wm_level_disabled; in intel_psr_post_plane_update()
2853 if (!psr->enabled && !keep_disabled) in intel_psr_post_plane_update()
2855 else if (psr->enabled && !crtc_state->wm_level_disabled) in intel_psr_post_plane_update()
2859 /* Force a PSR exit when enabling CRC to avoid CRC timeouts */ in intel_psr_post_plane_update()
2860 if (crtc_state->crc_enabled && psr->enabled) in intel_psr_post_plane_update()
2865 * invalidate -> flip -> flush sequence. in intel_psr_post_plane_update()
2867 intel_dp->psr.busy_frontbuffer_bits = 0; in intel_psr_post_plane_update()
2869 mutex_unlock(&psr->lock); in intel_psr_post_plane_update()
2875 struct intel_display *display = to_intel_display(intel_dp); in _psr2_ready_for_pipe_update_locked() local
2876 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in _psr2_ready_for_pipe_update_locked()
2883 return intel_de_wait_for_clear(display, in _psr2_ready_for_pipe_update_locked()
2884 EDP_PSR2_STATUS(display, cpu_transcoder), in _psr2_ready_for_pipe_update_locked()
2890 struct intel_display *display = to_intel_display(intel_dp); in _psr1_ready_for_pipe_update_locked() local
2891 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in _psr1_ready_for_pipe_update_locked()
2899 return intel_de_wait_for_clear(display, in _psr1_ready_for_pipe_update_locked()
2900 psr_status_reg(display, cpu_transcoder), in _psr1_ready_for_pipe_update_locked()
2905 * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
2913 struct intel_display *display = to_intel_display(new_crtc_state); in intel_psr_wait_for_idle_locked() local
2916 if (!new_crtc_state->has_psr) in intel_psr_wait_for_idle_locked()
2919 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr_wait_for_idle_locked()
2920 new_crtc_state->uapi.encoder_mask) { in intel_psr_wait_for_idle_locked()
2924 lockdep_assert_held(&intel_dp->psr.lock); in intel_psr_wait_for_idle_locked()
2926 if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled) in intel_psr_wait_for_idle_locked()
2929 if (intel_dp->psr.sel_update_enabled) in intel_psr_wait_for_idle_locked()
2935 drm_err(display->drm, in intel_psr_wait_for_idle_locked()
2942 struct intel_display *display = to_intel_display(intel_dp); in __psr_wait_for_idle_locked() local
2943 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in __psr_wait_for_idle_locked()
2948 if (!intel_dp->psr.enabled) in __psr_wait_for_idle_locked()
2951 if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled || in __psr_wait_for_idle_locked()
2952 intel_dp->psr.panel_replay_enabled)) { in __psr_wait_for_idle_locked()
2953 reg = EDP_PSR2_STATUS(display, cpu_transcoder); in __psr_wait_for_idle_locked()
2956 reg = psr_status_reg(display, cpu_transcoder); in __psr_wait_for_idle_locked()
2960 mutex_unlock(&intel_dp->psr.lock); in __psr_wait_for_idle_locked()
2962 err = intel_de_wait_for_clear(display, reg, mask, 50); in __psr_wait_for_idle_locked()
2964 drm_err(display->drm, in __psr_wait_for_idle_locked()
2965 "Timed out waiting for PSR Idle for re-enable\n"); in __psr_wait_for_idle_locked()
2968 mutex_lock(&intel_dp->psr.lock); in __psr_wait_for_idle_locked()
2969 return err == 0 && intel_dp->psr.enabled; in __psr_wait_for_idle_locked()
2972 static int intel_psr_fastset_force(struct intel_display *display) in intel_psr_fastset_force() argument
2980 state = drm_atomic_state_alloc(display->drm); in intel_psr_fastset_force()
2982 return -ENOMEM; in intel_psr_fastset_force()
2986 state->acquire_ctx = &ctx; in intel_psr_fastset_force()
2987 to_intel_atomic_state(state)->internal = true; in intel_psr_fastset_force()
2990 drm_connector_list_iter_begin(display->drm, &conn_iter); in intel_psr_fastset_force()
2995 if (conn->connector_type != DRM_MODE_CONNECTOR_eDP) in intel_psr_fastset_force()
3004 if (!conn_state->crtc) in intel_psr_fastset_force()
3007 crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc); in intel_psr_fastset_force()
3013 /* Mark mode as changed to trigger a pipe->update() */ in intel_psr_fastset_force()
3014 crtc_state->mode_changed = true; in intel_psr_fastset_force()
3021 if (err == -EDEADLK) { in intel_psr_fastset_force()
3037 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_debug_set() local
3048 drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val); in intel_psr_debug_set()
3049 return -EINVAL; in intel_psr_debug_set()
3052 ret = mutex_lock_interruptible(&intel_dp->psr.lock); in intel_psr_debug_set()
3056 old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK; in intel_psr_debug_set()
3057 old_disable_bits = intel_dp->psr.debug & in intel_psr_debug_set()
3061 intel_dp->psr.debug = val; in intel_psr_debug_set()
3067 if (intel_dp->psr.enabled) in intel_psr_debug_set()
3070 mutex_unlock(&intel_dp->psr.lock); in intel_psr_debug_set()
3073 ret = intel_psr_fastset_force(display); in intel_psr_debug_set()
3080 struct intel_psr *psr = &intel_dp->psr; in intel_psr_handle_irq()
3083 psr->sink_not_reliable = true; in intel_psr_handle_irq()
3085 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0); in intel_psr_handle_irq()
3093 mutex_lock(&intel_dp->psr.lock); in intel_psr_work()
3095 if (!intel_dp->psr.enabled) in intel_psr_work()
3098 if (READ_ONCE(intel_dp->psr.irq_aux_error)) in intel_psr_work()
3102 * We have to make sure PSR is ready for re-enable in intel_psr_work()
3105 * and be ready for re-enable. in intel_psr_work()
3113 * won't ever miss a flush when bailing out here. in intel_psr_work()
3115 if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active) in intel_psr_work()
3120 mutex_unlock(&intel_dp->psr.lock); in intel_psr_work()
3125 struct intel_display *display = to_intel_display(intel_dp); in _psr_invalidate_handle() local
3126 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in _psr_invalidate_handle()
3128 if (intel_dp->psr.psr2_sel_fetch_enabled) { in _psr_invalidate_handle()
3131 if (intel_dp->psr.psr2_sel_fetch_cff_enabled) { in _psr_invalidate_handle()
3133 intel_de_write(display, in _psr_invalidate_handle()
3134 CURSURFLIVE(display, intel_dp->psr.pipe), in _psr_invalidate_handle()
3139 val = man_trk_ctl_enable_bit_get(display) | in _psr_invalidate_handle()
3140 man_trk_ctl_partial_frame_bit_get(display) | in _psr_invalidate_handle()
3141 man_trk_ctl_continuos_full_frame(display); in _psr_invalidate_handle()
3142 intel_de_write(display, in _psr_invalidate_handle()
3143 PSR2_MAN_TRK_CTL(display, cpu_transcoder), in _psr_invalidate_handle()
3145 intel_de_write(display, in _psr_invalidate_handle()
3146 CURSURFLIVE(display, intel_dp->psr.pipe), 0); in _psr_invalidate_handle()
3147 intel_dp->psr.psr2_sel_fetch_cff_enabled = true; in _psr_invalidate_handle()
3154 * intel_psr_invalidate - Invalidate PSR
3155 * @display: display device
3161 * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3162 * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3166 void intel_psr_invalidate(struct intel_display *display, in intel_psr_invalidate() argument
3174 for_each_intel_encoder_with_psr(display->drm, encoder) { in intel_psr_invalidate()
3178 mutex_lock(&intel_dp->psr.lock); in intel_psr_invalidate()
3179 if (!intel_dp->psr.enabled) { in intel_psr_invalidate()
3180 mutex_unlock(&intel_dp->psr.lock); in intel_psr_invalidate()
3185 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe); in intel_psr_invalidate()
3186 intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits; in intel_psr_invalidate()
3191 mutex_unlock(&intel_dp->psr.lock); in intel_psr_invalidate()
3204 struct intel_display *display = to_intel_display(intel_dp); in tgl_dc3co_flush_locked() local
3205 struct drm_i915_private *i915 = to_i915(display->drm); in tgl_dc3co_flush_locked()
3207 if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled || in tgl_dc3co_flush_locked()
3208 !intel_dp->psr.active) in tgl_dc3co_flush_locked()
3213 * when delayed work schedules that means display has been idle. in tgl_dc3co_flush_locked()
3216 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe))) in tgl_dc3co_flush_locked()
3220 mod_delayed_work(i915->unordered_wq, &intel_dp->psr.dc3co_work, in tgl_dc3co_flush_locked()
3221 intel_dp->psr.dc3co_exit_delay); in tgl_dc3co_flush_locked()
3226 struct intel_display *display = to_intel_display(intel_dp); in _psr_flush_handle() local
3227 struct drm_i915_private *dev_priv = to_i915(display->drm); in _psr_flush_handle()
3228 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in _psr_flush_handle()
3230 if (intel_dp->psr.psr2_sel_fetch_enabled) { in _psr_flush_handle()
3231 if (intel_dp->psr.psr2_sel_fetch_cff_enabled) { in _psr_flush_handle()
3233 if (intel_dp->psr.busy_frontbuffer_bits == 0) { in _psr_flush_handle()
3234 u32 val = man_trk_ctl_enable_bit_get(display) | in _psr_flush_handle()
3235 man_trk_ctl_partial_frame_bit_get(display) | in _psr_flush_handle()
3236 man_trk_ctl_single_full_frame_bit_get(display) | in _psr_flush_handle()
3237 man_trk_ctl_continuos_full_frame(display); in _psr_flush_handle()
3245 intel_de_write(display, in _psr_flush_handle()
3246 PSR2_MAN_TRK_CTL(display, cpu_transcoder), in _psr_flush_handle()
3248 intel_de_write(display, in _psr_flush_handle()
3249 CURSURFLIVE(display, intel_dp->psr.pipe), in _psr_flush_handle()
3251 intel_dp->psr.psr2_sel_fetch_cff_enabled = false; in _psr_flush_handle()
3255 * continuous full frame is disabled, only a single full in _psr_flush_handle()
3263 if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits) in _psr_flush_handle()
3264 queue_work(dev_priv->unordered_wq, &intel_dp->psr.work); in _psr_flush_handle()
3269 * intel_psr_flush - Flush PSR
3270 * @display: display device
3281 void intel_psr_flush(struct intel_display *display, in intel_psr_flush() argument
3286 for_each_intel_encoder_with_psr(display->drm, encoder) { in intel_psr_flush()
3290 mutex_lock(&intel_dp->psr.lock); in intel_psr_flush()
3291 if (!intel_dp->psr.enabled) { in intel_psr_flush()
3292 mutex_unlock(&intel_dp->psr.lock); in intel_psr_flush()
3297 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe); in intel_psr_flush()
3298 intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits; in intel_psr_flush()
3305 if (intel_dp->psr.paused) in intel_psr_flush()
3310 !intel_dp->psr.psr2_sel_fetch_enabled)) { in intel_psr_flush()
3321 mutex_unlock(&intel_dp->psr.lock); in intel_psr_flush()
3326 * intel_psr_init - Init basic PSR work and mutex.
3335 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_init() local
3336 struct intel_connector *connector = intel_dp->attached_connector; in intel_psr_init()
3339 if (!(HAS_PSR(display) || HAS_DP20(display))) in intel_psr_init()
3343 * HSW spec explicitly says PSR is tied to port A. in intel_psr_init()
3344 * BDW+ platforms have a instance of PSR registers per transcoder but in intel_psr_init()
3349 * But GEN12 supports a instance of PSR registers per transcoder. in intel_psr_init()
3351 if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) { in intel_psr_init()
3352 drm_dbg_kms(display->drm, in intel_psr_init()
3357 if ((HAS_DP20(display) && !intel_dp_is_edp(intel_dp)) || in intel_psr_init()
3358 DISPLAY_VER(display) >= 20) in intel_psr_init()
3359 intel_dp->psr.source_panel_replay_support = true; in intel_psr_init()
3361 if (HAS_PSR(display) && intel_dp_is_edp(intel_dp)) in intel_psr_init()
3362 intel_dp->psr.source_support = true; in intel_psr_init()
3365 if (DISPLAY_VER(display) < 12) in intel_psr_init()
3367 intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link; in intel_psr_init()
3369 INIT_WORK(&intel_dp->psr.work, intel_psr_work); in intel_psr_init()
3370 INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work); in intel_psr_init()
3371 mutex_init(&intel_dp->psr.lock); in intel_psr_init()
3377 struct drm_dp_aux *aux = &intel_dp->aux; in psr_get_status_and_error_status()
3381 offset = intel_dp->psr.panel_replay_enabled ? in psr_get_status_and_error_status()
3388 offset = intel_dp->psr.panel_replay_enabled ? in psr_get_status_and_error_status()
3402 struct intel_display *display = to_intel_display(intel_dp); in psr_alpm_check() local
3403 struct drm_dp_aux *aux = &intel_dp->aux; in psr_alpm_check()
3404 struct intel_psr *psr = &intel_dp->psr; in psr_alpm_check()
3408 if (!psr->sel_update_enabled) in psr_alpm_check()
3413 drm_err(display->drm, "Error reading ALPM status\n"); in psr_alpm_check()
3419 psr->sink_not_reliable = true; in psr_alpm_check()
3420 drm_dbg_kms(display->drm, in psr_alpm_check()
3430 struct intel_display *display = to_intel_display(intel_dp); in psr_capability_changed_check() local
3431 struct intel_psr *psr = &intel_dp->psr; in psr_capability_changed_check()
3435 r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val); in psr_capability_changed_check()
3437 drm_err(display->drm, "Error reading DP_PSR_ESI\n"); in psr_capability_changed_check()
3443 psr->sink_not_reliable = true; in psr_capability_changed_check()
3444 drm_dbg_kms(display->drm, in psr_capability_changed_check()
3448 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val); in psr_capability_changed_check()
3461 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_short_pulse() local
3462 struct intel_psr *psr = &intel_dp->psr; in intel_psr_short_pulse()
3471 mutex_lock(&psr->lock); in intel_psr_short_pulse()
3473 psr->link_ok = false; in intel_psr_short_pulse()
3475 if (!psr->enabled) in intel_psr_short_pulse()
3479 drm_err(display->drm, in intel_psr_short_pulse()
3484 if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) || in intel_psr_short_pulse()
3487 psr->sink_not_reliable = true; in intel_psr_short_pulse()
3490 if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR && in intel_psr_short_pulse()
3492 drm_dbg_kms(display->drm, in intel_psr_short_pulse()
3495 drm_dbg_kms(display->drm, in intel_psr_short_pulse()
3498 drm_dbg_kms(display->drm, in intel_psr_short_pulse()
3501 drm_dbg_kms(display->drm, in intel_psr_short_pulse()
3505 drm_err(display->drm, in intel_psr_short_pulse()
3509 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status); in intel_psr_short_pulse()
3511 if (!psr->panel_replay_enabled) { in intel_psr_short_pulse()
3517 mutex_unlock(&psr->lock); in intel_psr_short_pulse()
3527 mutex_lock(&intel_dp->psr.lock); in intel_psr_enabled()
3528 ret = intel_dp->psr.enabled; in intel_psr_enabled()
3529 mutex_unlock(&intel_dp->psr.lock); in intel_psr_enabled()
3535 * intel_psr_link_ok - return psr->link_ok
3538 * We are seeing unexpected link re-trainings with some panels. This is caused
3554 mutex_lock(&intel_dp->psr.lock); in intel_psr_link_ok()
3555 ret = intel_dp->psr.link_ok; in intel_psr_link_ok()
3556 mutex_unlock(&intel_dp->psr.lock); in intel_psr_link_ok()
3562 * intel_psr_lock - grab PSR lock
3571 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_lock() local
3574 if (!crtc_state->has_psr) in intel_psr_lock()
3577 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr_lock()
3578 crtc_state->uapi.encoder_mask) { in intel_psr_lock()
3581 mutex_lock(&intel_dp->psr.lock); in intel_psr_lock()
3587 * intel_psr_unlock - release PSR lock
3594 struct intel_display *display = to_intel_display(crtc_state); in intel_psr_unlock() local
3597 if (!crtc_state->has_psr) in intel_psr_unlock()
3600 for_each_intel_encoder_mask_with_psr(display->drm, encoder, in intel_psr_unlock()
3601 crtc_state->uapi.encoder_mask) { in intel_psr_unlock()
3604 mutex_unlock(&intel_dp->psr.lock); in intel_psr_unlock()
3612 struct intel_display *display = to_intel_display(intel_dp); in psr_source_status() local
3613 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in psr_source_status()
3617 if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled || in psr_source_status()
3618 intel_dp->psr.panel_replay_enabled)) { in psr_source_status()
3632 val = intel_de_read(display, in psr_source_status()
3633 EDP_PSR2_STATUS(display, cpu_transcoder)); in psr_source_status()
3648 val = intel_de_read(display, in psr_source_status()
3649 psr_status_reg(display, cpu_transcoder)); in psr_source_status()
3661 struct intel_psr *psr = &intel_dp->psr; in intel_psr_sink_capability()
3664 str_yes_no(psr->sink_support)); in intel_psr_sink_capability()
3666 if (psr->sink_support) in intel_psr_sink_capability()
3667 seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]); in intel_psr_sink_capability()
3668 if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED) in intel_psr_sink_capability()
3670 seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support)); in intel_psr_sink_capability()
3672 str_yes_no(psr->sink_panel_replay_su_support)); in intel_psr_sink_capability()
3673 if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT) in intel_psr_sink_capability()
3681 struct intel_psr *psr = &intel_dp->psr; in intel_psr_print_mode()
3684 if (psr->enabled) in intel_psr_print_mode()
3689 if (psr->panel_replay_enabled && psr->sel_update_enabled) in intel_psr_print_mode()
3691 else if (psr->panel_replay_enabled) in intel_psr_print_mode()
3693 else if (psr->sel_update_enabled) in intel_psr_print_mode()
3695 else if (psr->enabled) in intel_psr_print_mode()
3700 if (psr->su_region_et_enabled) in intel_psr_print_mode()
3710 struct intel_display *display = to_intel_display(intel_dp); in intel_psr_status() local
3711 struct drm_i915_private *dev_priv = to_i915(display->drm); in intel_psr_status()
3712 enum transcoder cpu_transcoder = intel_dp->psr.transcoder; in intel_psr_status()
3713 struct intel_psr *psr = &intel_dp->psr; in intel_psr_status()
3720 if (!(psr->sink_support || psr->sink_panel_replay_support)) in intel_psr_status()
3723 wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm); in intel_psr_status()
3724 mutex_lock(&psr->lock); in intel_psr_status()
3728 if (!psr->enabled) { in intel_psr_status()
3730 str_yes_no(psr->sink_not_reliable)); in intel_psr_status()
3735 if (psr->panel_replay_enabled) { in intel_psr_status()
3736 val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder)); in intel_psr_status()
3739 psr2_ctl = intel_de_read(display, in intel_psr_status()
3740 EDP_PSR2_CTL(display, in intel_psr_status()
3744 } else if (psr->sel_update_enabled) { in intel_psr_status()
3745 val = intel_de_read(display, in intel_psr_status()
3746 EDP_PSR2_CTL(display, cpu_transcoder)); in intel_psr_status()
3749 val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)); in intel_psr_status()
3754 if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp)) in intel_psr_status()
3759 psr->busy_frontbuffer_bits); in intel_psr_status()
3764 val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder)); in intel_psr_status()
3768 if (psr->debug & I915_PSR_DEBUG_IRQ) { in intel_psr_status()
3770 psr->last_entry_attempt); in intel_psr_status()
3771 seq_printf(m, "Last exit at: %lld\n", psr->last_exit); in intel_psr_status()
3774 if (psr->sel_update_enabled) { in intel_psr_status()
3779 * Reading all 3 registers before hand to minimize crossing a in intel_psr_status()
3783 val = intel_de_read(display, in intel_psr_status()
3784 PSR2_SU_STATUS(display, cpu_transcoder, frame)); in intel_psr_status()
3800 str_enabled_disabled(psr->psr2_sel_fetch_enabled)); in intel_psr_status()
3804 mutex_unlock(&psr->lock); in intel_psr_status()
3805 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref); in intel_psr_status()
3812 struct intel_display *display = m->private; in i915_edp_psr_status_show() local
3816 if (!HAS_PSR(display)) in i915_edp_psr_status_show()
3817 return -ENODEV; in i915_edp_psr_status_show()
3820 for_each_intel_encoder_with_psr(display->drm, encoder) { in i915_edp_psr_status_show()
3826 return -ENODEV; in i915_edp_psr_status_show()
3835 struct intel_display *display = data; in i915_edp_psr_debug_set() local
3836 struct drm_i915_private *dev_priv = to_i915(display->drm); in i915_edp_psr_debug_set()
3839 int ret = -ENODEV; in i915_edp_psr_debug_set()
3841 if (!HAS_PSR(display)) in i915_edp_psr_debug_set()
3844 for_each_intel_encoder_with_psr(display->drm, encoder) { in i915_edp_psr_debug_set()
3847 drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val); in i915_edp_psr_debug_set()
3849 wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm); in i915_edp_psr_debug_set()
3854 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref); in i915_edp_psr_debug_set()
3863 struct intel_display *display = data; in i915_edp_psr_debug_get() local
3866 if (!HAS_PSR(display)) in i915_edp_psr_debug_get()
3867 return -ENODEV; in i915_edp_psr_debug_get()
3869 for_each_intel_encoder_with_psr(display->drm, encoder) { in i915_edp_psr_debug_get()
3873 *val = READ_ONCE(intel_dp->psr.debug); in i915_edp_psr_debug_get()
3877 return -ENODEV; in i915_edp_psr_debug_get()
3884 void intel_psr_debugfs_register(struct intel_display *display) in intel_psr_debugfs_register() argument
3886 struct drm_minor *minor = display->drm->primary; in intel_psr_debugfs_register()
3888 debugfs_create_file("i915_edp_psr_debug", 0644, minor->debugfs_root, in intel_psr_debugfs_register()
3889 display, &i915_edp_psr_debug_fops); in intel_psr_debugfs_register()
3891 debugfs_create_file("i915_edp_psr_status", 0444, minor->debugfs_root, in intel_psr_debugfs_register()
3892 display, &i915_edp_psr_status_fops); in intel_psr_debugfs_register()
3897 if (intel_dp->psr.panel_replay_enabled) in psr_mode_str()
3898 return "PANEL-REPLAY"; in psr_mode_str()
3899 else if (intel_dp->psr.enabled) in psr_mode_str()
3907 struct intel_connector *connector = m->private; in i915_psr_sink_status_show()
3911 "transition to active, capture and display", in i915_psr_sink_status_show()
3912 "active, display from RFB", in i915_psr_sink_status_show()
3913 "active, capture and display on sink device timings", in i915_psr_sink_status_show()
3914 "transition to inactive, capture and display, timing re-sync", in i915_psr_sink_status_show()
3924 seq_puts(m, "PSR/Panel-Replay Unsupported\n"); in i915_psr_sink_status_show()
3925 return -ENODEV; in i915_psr_sink_status_show()
3928 if (connector->base.status != connector_status_connected) in i915_psr_sink_status_show()
3929 return -ENODEV; in i915_psr_sink_status_show()
3964 struct intel_connector *connector = m->private; in i915_psr_status_show()
3973 struct intel_display *display = to_intel_display(connector); in intel_psr_connector_debugfs_add() local
3974 struct dentry *root = connector->base.debugfs_entry; in intel_psr_connector_debugfs_add()
3976 if (connector->base.connector_type != DRM_MODE_CONNECTOR_eDP && in intel_psr_connector_debugfs_add()
3977 connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort) in intel_psr_connector_debugfs_add()
3983 if (HAS_PSR(display) || HAS_DP20(display)) in intel_psr_connector_debugfs_add()