1 /* SPDX-License-Identifier: MIT */
2 /*
3  * Copyright © 2019 Intel Corporation
4  */
5 
6 #ifndef __INTEL_DISPLAY_POWER_H__
7 #define __INTEL_DISPLAY_POWER_H__
8 
9 #include "intel_runtime_pm.h"
10 
11 enum dpio_channel;
12 enum dpio_phy;
13 struct drm_i915_private;
14 struct i915_power_well;
15 struct intel_encoder;
16 
17 enum intel_display_power_domain {
18 	POWER_DOMAIN_DISPLAY_CORE,
19 	POWER_DOMAIN_PIPE_A,
20 	POWER_DOMAIN_PIPE_B,
21 	POWER_DOMAIN_PIPE_C,
22 	POWER_DOMAIN_PIPE_D,
23 	POWER_DOMAIN_PIPE_A_PANEL_FITTER,
24 	POWER_DOMAIN_PIPE_B_PANEL_FITTER,
25 	POWER_DOMAIN_PIPE_C_PANEL_FITTER,
26 	POWER_DOMAIN_PIPE_D_PANEL_FITTER,
27 	POWER_DOMAIN_TRANSCODER_A,
28 	POWER_DOMAIN_TRANSCODER_B,
29 	POWER_DOMAIN_TRANSCODER_C,
30 	POWER_DOMAIN_TRANSCODER_D,
31 	POWER_DOMAIN_TRANSCODER_EDP,
32 	/* VDSC/joining for eDP/DSI transcoder (ICL) or pipe A (TGL) */
33 	POWER_DOMAIN_TRANSCODER_VDSC_PW2,
34 	POWER_DOMAIN_TRANSCODER_DSI_A,
35 	POWER_DOMAIN_TRANSCODER_DSI_C,
36 	POWER_DOMAIN_PORT_DDI_A_LANES,
37 	POWER_DOMAIN_PORT_DDI_B_LANES,
38 	POWER_DOMAIN_PORT_DDI_C_LANES,
39 	POWER_DOMAIN_PORT_DDI_D_LANES,
40 	POWER_DOMAIN_PORT_DDI_E_LANES,
41 	POWER_DOMAIN_PORT_DDI_F_LANES,
42 	POWER_DOMAIN_PORT_DDI_G_LANES,
43 	POWER_DOMAIN_PORT_DDI_H_LANES,
44 	POWER_DOMAIN_PORT_DDI_I_LANES,
45 
46 	POWER_DOMAIN_PORT_DDI_LANES_TC1 = POWER_DOMAIN_PORT_DDI_D_LANES, /* tgl+ */
47 	POWER_DOMAIN_PORT_DDI_LANES_TC2,
48 	POWER_DOMAIN_PORT_DDI_LANES_TC3,
49 	POWER_DOMAIN_PORT_DDI_LANES_TC4,
50 	POWER_DOMAIN_PORT_DDI_LANES_TC5,
51 	POWER_DOMAIN_PORT_DDI_LANES_TC6,
52 
53 	POWER_DOMAIN_PORT_DDI_LANES_D_XELPD = POWER_DOMAIN_PORT_DDI_LANES_TC5, /* XELPD */
54 	POWER_DOMAIN_PORT_DDI_LANES_E_XELPD,
55 
56 	POWER_DOMAIN_PORT_DDI_A_IO,
57 	POWER_DOMAIN_PORT_DDI_B_IO,
58 	POWER_DOMAIN_PORT_DDI_C_IO,
59 	POWER_DOMAIN_PORT_DDI_D_IO,
60 	POWER_DOMAIN_PORT_DDI_E_IO,
61 	POWER_DOMAIN_PORT_DDI_F_IO,
62 	POWER_DOMAIN_PORT_DDI_G_IO,
63 	POWER_DOMAIN_PORT_DDI_H_IO,
64 	POWER_DOMAIN_PORT_DDI_I_IO,
65 
66 	POWER_DOMAIN_PORT_DDI_IO_TC1 = POWER_DOMAIN_PORT_DDI_D_IO, /* tgl+ */
67 	POWER_DOMAIN_PORT_DDI_IO_TC2,
68 	POWER_DOMAIN_PORT_DDI_IO_TC3,
69 	POWER_DOMAIN_PORT_DDI_IO_TC4,
70 	POWER_DOMAIN_PORT_DDI_IO_TC5,
71 	POWER_DOMAIN_PORT_DDI_IO_TC6,
72 
73 	POWER_DOMAIN_PORT_DDI_IO_D_XELPD = POWER_DOMAIN_PORT_DDI_IO_TC5, /* XELPD */
74 	POWER_DOMAIN_PORT_DDI_IO_E_XELPD,
75 
76 	POWER_DOMAIN_PORT_DSI,
77 	POWER_DOMAIN_PORT_CRT,
78 	POWER_DOMAIN_PORT_OTHER,
79 	POWER_DOMAIN_VGA,
80 	POWER_DOMAIN_AUDIO_MMIO,
81 	POWER_DOMAIN_AUDIO_PLAYBACK,
82 	POWER_DOMAIN_AUX_A,
83 	POWER_DOMAIN_AUX_B,
84 	POWER_DOMAIN_AUX_C,
85 	POWER_DOMAIN_AUX_D,
86 	POWER_DOMAIN_AUX_E,
87 	POWER_DOMAIN_AUX_F,
88 	POWER_DOMAIN_AUX_G,
89 	POWER_DOMAIN_AUX_H,
90 	POWER_DOMAIN_AUX_I,
91 
92 	POWER_DOMAIN_AUX_USBC1 = POWER_DOMAIN_AUX_D, /* tgl+ */
93 	POWER_DOMAIN_AUX_USBC2,
94 	POWER_DOMAIN_AUX_USBC3,
95 	POWER_DOMAIN_AUX_USBC4,
96 	POWER_DOMAIN_AUX_USBC5,
97 	POWER_DOMAIN_AUX_USBC6,
98 
99 	POWER_DOMAIN_AUX_D_XELPD = POWER_DOMAIN_AUX_USBC5, /* XELPD */
100 	POWER_DOMAIN_AUX_E_XELPD,
101 
102 	POWER_DOMAIN_AUX_IO_A,
103 	POWER_DOMAIN_AUX_C_TBT,
104 	POWER_DOMAIN_AUX_D_TBT,
105 	POWER_DOMAIN_AUX_E_TBT,
106 	POWER_DOMAIN_AUX_F_TBT,
107 	POWER_DOMAIN_AUX_G_TBT,
108 	POWER_DOMAIN_AUX_H_TBT,
109 	POWER_DOMAIN_AUX_I_TBT,
110 
111 	POWER_DOMAIN_AUX_TBT1 = POWER_DOMAIN_AUX_D_TBT, /* tgl+ */
112 	POWER_DOMAIN_AUX_TBT2,
113 	POWER_DOMAIN_AUX_TBT3,
114 	POWER_DOMAIN_AUX_TBT4,
115 	POWER_DOMAIN_AUX_TBT5,
116 	POWER_DOMAIN_AUX_TBT6,
117 
118 	POWER_DOMAIN_GMBUS,
119 	POWER_DOMAIN_MODESET,
120 	POWER_DOMAIN_GT_IRQ,
121 	POWER_DOMAIN_DC_OFF,
122 	POWER_DOMAIN_TC_COLD_OFF,
123 	POWER_DOMAIN_INIT,
124 
125 	POWER_DOMAIN_NUM,
126 };
127 
128 /*
129  * i915_power_well_id:
130  *
131  * IDs used to look up power wells. Power wells accessed directly bypassing
132  * the power domains framework must be assigned a unique ID. The rest of power
133  * wells must be assigned DISP_PW_ID_NONE.
134  */
135 enum i915_power_well_id {
136 	DISP_PW_ID_NONE,
137 
138 	VLV_DISP_PW_DISP2D,
139 	BXT_DISP_PW_DPIO_CMN_A,
140 	VLV_DISP_PW_DPIO_CMN_BC,
141 	GLK_DISP_PW_DPIO_CMN_C,
142 	CHV_DISP_PW_DPIO_CMN_D,
143 	HSW_DISP_PW_GLOBAL,
144 	SKL_DISP_PW_MISC_IO,
145 	SKL_DISP_PW_1,
146 	SKL_DISP_PW_2,
147 	ICL_DISP_PW_3,
148 	SKL_DISP_DC_OFF,
149 	TGL_DISP_PW_TC_COLD_OFF,
150 };
151 
152 #define POWER_DOMAIN_PIPE(pipe) ((pipe) + POWER_DOMAIN_PIPE_A)
153 #define POWER_DOMAIN_PIPE_PANEL_FITTER(pipe) \
154 		((pipe) + POWER_DOMAIN_PIPE_A_PANEL_FITTER)
155 #define POWER_DOMAIN_TRANSCODER(tran) \
156 	((tran) == TRANSCODER_EDP ? POWER_DOMAIN_TRANSCODER_EDP : \
157 	 (tran) + POWER_DOMAIN_TRANSCODER_A)
158 
159 struct i915_power_domains {
160 	/*
161 	 * Power wells needed for initialization at driver init and suspend
162 	 * time are on. They are kept on until after the first modeset.
163 	 */
164 	bool initializing;
165 	bool display_core_suspended;
166 	int power_well_count;
167 
168 	intel_wakeref_t init_wakeref;
169 	intel_wakeref_t disable_wakeref;
170 
171 	struct mutex lock;
172 	int domain_use_count[POWER_DOMAIN_NUM];
173 
174 	struct delayed_work async_put_work;
175 	intel_wakeref_t async_put_wakeref;
176 	u64 async_put_domains[2];
177 
178 	struct i915_power_well *power_wells;
179 };
180 
181 struct intel_display_power_domain_set {
182 	u64 mask;
183 #ifdef CONFIG_DRM_I915_DEBUG_RUNTIME_PM
184 	intel_wakeref_t wakerefs[POWER_DOMAIN_NUM];
185 #endif
186 };
187 
188 #define for_each_power_domain(domain, mask)				\
189 	for ((domain) = 0; (domain) < POWER_DOMAIN_NUM; (domain)++)	\
190 		for_each_if(BIT_ULL(domain) & (mask))
191 
192 #define for_each_power_well(__dev_priv, __power_well)				\
193 	for ((__power_well) = (__dev_priv)->power_domains.power_wells;	\
194 	     (__power_well) - (__dev_priv)->power_domains.power_wells <	\
195 		(__dev_priv)->power_domains.power_well_count;		\
196 	     (__power_well)++)
197 
198 #define for_each_power_well_reverse(__dev_priv, __power_well)			\
199 	for ((__power_well) = (__dev_priv)->power_domains.power_wells +		\
200 			      (__dev_priv)->power_domains.power_well_count - 1;	\
201 	     (__power_well) - (__dev_priv)->power_domains.power_wells >= 0;	\
202 	     (__power_well)--)
203 
204 #define for_each_power_domain_well(__dev_priv, __power_well, __domain_mask)	\
205 	for_each_power_well(__dev_priv, __power_well)				\
206 		for_each_if((__power_well)->desc->domains & (__domain_mask))
207 
208 #define for_each_power_domain_well_reverse(__dev_priv, __power_well, __domain_mask) \
209 	for_each_power_well_reverse(__dev_priv, __power_well)		        \
210 		for_each_if((__power_well)->desc->domains & (__domain_mask))
211 
212 int intel_power_domains_init(struct drm_i915_private *dev_priv);
213 void intel_power_domains_cleanup(struct drm_i915_private *dev_priv);
214 void intel_power_domains_init_hw(struct drm_i915_private *dev_priv, bool resume);
215 void intel_power_domains_driver_remove(struct drm_i915_private *dev_priv);
216 void intel_power_domains_enable(struct drm_i915_private *dev_priv);
217 void intel_power_domains_disable(struct drm_i915_private *dev_priv);
218 void intel_power_domains_suspend(struct drm_i915_private *dev_priv,
219 				 enum i915_drm_suspend_mode);
220 void intel_power_domains_resume(struct drm_i915_private *dev_priv);
221 void intel_power_domains_sanitize_state(struct drm_i915_private *dev_priv);
222 
223 void intel_display_power_suspend_late(struct drm_i915_private *i915);
224 void intel_display_power_resume_early(struct drm_i915_private *i915);
225 void intel_display_power_suspend(struct drm_i915_private *i915);
226 void intel_display_power_resume(struct drm_i915_private *i915);
227 void intel_display_power_set_target_dc_state(struct drm_i915_private *dev_priv,
228 					     u32 state);
229 
230 const char *
231 intel_display_power_domain_str(enum intel_display_power_domain domain);
232 
233 bool intel_display_power_is_enabled(struct drm_i915_private *dev_priv,
234 				    enum intel_display_power_domain domain);
235 bool intel_display_power_well_is_enabled(struct drm_i915_private *dev_priv,
236 					 enum i915_power_well_id power_well_id);
237 bool __intel_display_power_is_enabled(struct drm_i915_private *dev_priv,
238 				      enum intel_display_power_domain domain);
239 intel_wakeref_t intel_display_power_get(struct drm_i915_private *dev_priv,
240 					enum intel_display_power_domain domain);
241 intel_wakeref_t
242 intel_display_power_get_if_enabled(struct drm_i915_private *dev_priv,
243 				   enum intel_display_power_domain domain);
244 void __intel_display_power_put_async(struct drm_i915_private *i915,
245 				     enum intel_display_power_domain domain,
246 				     intel_wakeref_t wakeref);
247 void intel_display_power_flush_work(struct drm_i915_private *i915);
248 #if IS_ENABLED(CONFIG_DRM_I915_DEBUG_RUNTIME_PM)
249 void intel_display_power_put(struct drm_i915_private *dev_priv,
250 			     enum intel_display_power_domain domain,
251 			     intel_wakeref_t wakeref);
252 static inline void
253 intel_display_power_put_async(struct drm_i915_private *i915,
254 			      enum intel_display_power_domain domain,
255 			      intel_wakeref_t wakeref)
256 {
257 	__intel_display_power_put_async(i915, domain, wakeref);
258 }
259 #else
260 void intel_display_power_put_unchecked(struct drm_i915_private *dev_priv,
261 				       enum intel_display_power_domain domain);
262 
263 static inline void
264 intel_display_power_put(struct drm_i915_private *i915,
265 			enum intel_display_power_domain domain,
266 			intel_wakeref_t wakeref)
267 {
268 	intel_display_power_put_unchecked(i915, domain);
269 }
270 
271 static inline void
272 intel_display_power_put_async(struct drm_i915_private *i915,
273 			      enum intel_display_power_domain domain,
274 			      intel_wakeref_t wakeref)
275 {
276 	__intel_display_power_put_async(i915, domain, -1);
277 }
278 #endif
279 
280 void
281 intel_display_power_get_in_set(struct drm_i915_private *i915,
282 			       struct intel_display_power_domain_set *power_domain_set,
283 			       enum intel_display_power_domain domain);
284 
285 bool
286 intel_display_power_get_in_set_if_enabled(struct drm_i915_private *i915,
287 					  struct intel_display_power_domain_set *power_domain_set,
288 					  enum intel_display_power_domain domain);
289 
290 void
291 intel_display_power_put_mask_in_set(struct drm_i915_private *i915,
292 				    struct intel_display_power_domain_set *power_domain_set,
293 				    u64 mask);
294 
295 static inline void
296 intel_display_power_put_all_in_set(struct drm_i915_private *i915,
297 				   struct intel_display_power_domain_set *power_domain_set)
298 {
299 	intel_display_power_put_mask_in_set(i915, power_domain_set, power_domain_set->mask);
300 }
301 
302 void intel_display_power_debug(struct drm_i915_private *i915, struct seq_file *m);
303 
304 /*
305  * FIXME: We should probably switch this to a 0-based scheme to be consistent
306  * with how we now name/number DBUF_CTL instances.
307  */
308 enum dbuf_slice {
309 	DBUF_S1,
310 	DBUF_S2,
311 	DBUF_S3,
312 	DBUF_S4,
313 	I915_MAX_DBUF_SLICES
314 };
315 
316 void gen9_dbuf_slices_update(struct drm_i915_private *dev_priv,
317 			     u8 req_slices);
318 
319 #define with_intel_display_power(i915, domain, wf) \
320 	for ((wf) = intel_display_power_get((i915), (domain)); (wf); \
321 	     intel_display_power_put_async((i915), (domain), (wf)), (wf) = 0)
322 
323 #define with_intel_display_power_if_enabled(i915, domain, wf) \
324 	for ((wf) = intel_display_power_get_if_enabled((i915), (domain)); (wf); \
325 	     intel_display_power_put_async((i915), (domain), (wf)), (wf) = 0)
326 
327 void chv_phy_powergate_lanes(struct intel_encoder *encoder,
328 			     bool override, unsigned int mask);
329 bool chv_phy_powergate_ch(struct drm_i915_private *dev_priv, enum dpio_phy phy,
330 			  enum dpio_channel ch, bool override);
331 
332 #endif /* __INTEL_DISPLAY_POWER_H__ */
333