xref: /linux/drivers/gpu/drm/i915/i915_gpu_error.h (revision 84b9b44b)
1 /*
2  * SPDX-License-Identifier: MIT
3  *
4  * Copyright © 2008-2018 Intel Corporation
5  */
6 
7 #ifndef _I915_GPU_ERROR_H_
8 #define _I915_GPU_ERROR_H_
9 
10 #include <linux/atomic.h>
11 #include <linux/kref.h>
12 #include <linux/ktime.h>
13 #include <linux/sched.h>
14 
15 #include <drm/drm_mm.h>
16 
17 #include "gt/intel_engine.h"
18 #include "gt/intel_gt_types.h"
19 #include "gt/uc/intel_uc_fw.h"
20 
21 #include "intel_device_info.h"
22 
23 #include "i915_gem.h"
24 #include "i915_gem_gtt.h"
25 #include "i915_params.h"
26 #include "i915_scheduler.h"
27 
28 struct drm_i915_private;
29 struct i915_vma_compress;
30 struct intel_engine_capture_vma;
31 struct intel_overlay_error_state;
32 
33 struct i915_vma_coredump {
34 	struct i915_vma_coredump *next;
35 
36 	char name[20];
37 
38 	u64 gtt_offset;
39 	u64 gtt_size;
40 	u32 gtt_page_sizes;
41 
42 	int unused;
43 	struct list_head page_list;
44 };
45 
46 struct i915_request_coredump {
47 	unsigned long flags;
48 	pid_t pid;
49 	u32 context;
50 	u32 seqno;
51 	u32 head;
52 	u32 tail;
53 	struct i915_sched_attr sched_attr;
54 };
55 
56 struct __guc_capture_parsed_output;
57 
58 struct intel_engine_coredump {
59 	const struct intel_engine_cs *engine;
60 
61 	bool hung;
62 	bool simulated;
63 	u32 reset_count;
64 
65 	/* position of active request inside the ring */
66 	u32 rq_head, rq_post, rq_tail;
67 
68 	/* Register state */
69 	u32 ccid;
70 	u32 start;
71 	u32 tail;
72 	u32 head;
73 	u32 ctl;
74 	u32 mode;
75 	u32 hws;
76 	u32 ipeir;
77 	u32 ipehr;
78 	u32 esr;
79 	u32 bbstate;
80 	u32 instpm;
81 	u32 instps;
82 	u64 bbaddr;
83 	u64 acthd;
84 	u32 fault_reg;
85 	u64 faddr;
86 	u32 rc_psmi; /* sleep state */
87 	u32 nopid;
88 	u32 excc;
89 	u32 cmd_cctl;
90 	u32 cscmdop;
91 	u32 ctx_sr_ctl;
92 	u32 dma_faddr_hi;
93 	u32 dma_faddr_lo;
94 	struct intel_instdone instdone;
95 
96 	/* GuC matched capture-lists info */
97 	struct intel_guc_state_capture *guc_capture;
98 	struct __guc_capture_parsed_output *guc_capture_node;
99 
100 	struct i915_gem_context_coredump {
101 		char comm[TASK_COMM_LEN];
102 
103 		u64 total_runtime;
104 		u64 avg_runtime;
105 
106 		pid_t pid;
107 		int active;
108 		int guilty;
109 		struct i915_sched_attr sched_attr;
110 		u32 hwsp_seqno;
111 	} context;
112 
113 	struct i915_vma_coredump *vma;
114 
115 	struct i915_request_coredump execlist[EXECLIST_MAX_PORTS];
116 	unsigned int num_ports;
117 
118 	struct {
119 		u32 gfx_mode;
120 		union {
121 			u64 pdp[4];
122 			u32 pp_dir_base;
123 		};
124 	} vm_info;
125 
126 	struct intel_engine_coredump *next;
127 };
128 
129 struct intel_ctb_coredump {
130 	u32 raw_head, head;
131 	u32 raw_tail, tail;
132 	u32 raw_status;
133 	u32 desc_offset;
134 	u32 cmds_offset;
135 	u32 size;
136 };
137 
138 struct intel_gt_coredump {
139 	const struct intel_gt *_gt;
140 	bool awake;
141 	bool simulated;
142 
143 	struct intel_gt_info info;
144 
145 	/* Generic register state */
146 	u32 eir;
147 	u32 pgtbl_er;
148 	u32 ier;
149 	u32 gtier[6], ngtier;
150 	u32 forcewake;
151 	u32 error; /* gen6+ */
152 	u32 err_int; /* gen7 */
153 	u32 fault_data0; /* gen8, gen9 */
154 	u32 fault_data1; /* gen8, gen9 */
155 	u32 done_reg;
156 	u32 gac_eco;
157 	u32 gam_ecochk;
158 	u32 gab_ctl;
159 	u32 gfx_mode;
160 	u32 gtt_cache;
161 	u32 aux_err; /* gen12 */
162 	u32 gam_done; /* gen12 */
163 	u32 clock_frequency;
164 	u32 clock_period_ns;
165 
166 	/* Display related */
167 	u32 derrmr;
168 	u32 sfc_done[I915_MAX_SFC]; /* gen12 */
169 
170 	u32 nfence;
171 	u64 fence[I915_MAX_NUM_FENCES];
172 
173 	struct intel_engine_coredump *engine;
174 
175 	struct intel_uc_coredump {
176 		struct intel_uc_fw guc_fw;
177 		struct intel_uc_fw huc_fw;
178 		struct guc_info {
179 			struct intel_ctb_coredump ctb[2];
180 			struct i915_vma_coredump *vma_ctb;
181 			struct i915_vma_coredump *vma_log;
182 			u32 timestamp;
183 			u16 last_fence;
184 			bool is_guc_capture;
185 		} guc;
186 	} *uc;
187 
188 	struct intel_gt_coredump *next;
189 };
190 
191 struct i915_gpu_coredump {
192 	struct kref ref;
193 	ktime_t time;
194 	ktime_t boottime;
195 	ktime_t uptime;
196 	unsigned long capture;
197 
198 	struct drm_i915_private *i915;
199 
200 	struct intel_gt_coredump *gt;
201 
202 	char error_msg[128];
203 	bool simulated;
204 	bool wakelock;
205 	bool suspended;
206 	int iommu;
207 	u32 reset_count;
208 	u32 suspend_count;
209 
210 	struct intel_device_info device_info;
211 	struct intel_runtime_info runtime_info;
212 	struct intel_driver_caps driver_caps;
213 	struct i915_params params;
214 
215 	struct intel_overlay_error_state *overlay;
216 
217 	struct scatterlist *sgl, *fit;
218 };
219 
220 struct i915_gpu_error {
221 	/* For reset and error_state handling. */
222 	spinlock_t lock;
223 	/* Protected by the above dev->gpu_error.lock. */
224 	struct i915_gpu_coredump *first_error;
225 
226 	atomic_t pending_fb_pin;
227 
228 	/** Number of times the device has been reset (global) */
229 	atomic_t reset_count;
230 
231 	/** Number of times an engine has been reset */
232 	atomic_t reset_engine_count[I915_NUM_ENGINES];
233 };
234 
235 struct drm_i915_error_state_buf {
236 	struct drm_i915_private *i915;
237 	struct scatterlist *sgl, *cur, *end;
238 
239 	char *buf;
240 	size_t bytes;
241 	size_t size;
242 	loff_t iter;
243 
244 	int err;
245 };
246 
247 static inline u32 i915_reset_count(struct i915_gpu_error *error)
248 {
249 	return atomic_read(&error->reset_count);
250 }
251 
252 static inline u32 i915_reset_engine_count(struct i915_gpu_error *error,
253 					  const struct intel_engine_cs *engine)
254 {
255 	return atomic_read(&error->reset_engine_count[engine->uabi_class]);
256 }
257 
258 #define CORE_DUMP_FLAG_NONE           0x0
259 #define CORE_DUMP_FLAG_IS_GUC_CAPTURE BIT(0)
260 
261 #if IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR)
262 
263 __printf(2, 3)
264 void i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...);
265 void intel_gpu_error_print_vma(struct drm_i915_error_state_buf *m,
266 			       const struct intel_engine_cs *engine,
267 			       const struct i915_vma_coredump *vma);
268 struct i915_vma_coredump *
269 intel_gpu_error_find_batch(const struct intel_engine_coredump *ee);
270 
271 struct i915_gpu_coredump *i915_gpu_coredump(struct intel_gt *gt,
272 					    intel_engine_mask_t engine_mask, u32 dump_flags);
273 void i915_capture_error_state(struct intel_gt *gt,
274 			      intel_engine_mask_t engine_mask, u32 dump_flags);
275 
276 struct i915_gpu_coredump *
277 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp);
278 
279 struct intel_gt_coredump *
280 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp, u32 dump_flags);
281 
282 struct intel_engine_coredump *
283 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp, u32 dump_flags);
284 
285 struct intel_engine_capture_vma *
286 intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
287 				  struct i915_request *rq,
288 				  gfp_t gfp);
289 
290 void intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
291 				   struct intel_engine_capture_vma *capture,
292 				   struct i915_vma_compress *compress);
293 
294 struct i915_vma_compress *
295 i915_vma_capture_prepare(struct intel_gt_coredump *gt);
296 
297 void i915_vma_capture_finish(struct intel_gt_coredump *gt,
298 			     struct i915_vma_compress *compress);
299 
300 void i915_error_state_store(struct i915_gpu_coredump *error);
301 
302 static inline struct i915_gpu_coredump *
303 i915_gpu_coredump_get(struct i915_gpu_coredump *gpu)
304 {
305 	kref_get(&gpu->ref);
306 	return gpu;
307 }
308 
309 ssize_t
310 i915_gpu_coredump_copy_to_buffer(struct i915_gpu_coredump *error,
311 				 char *buf, loff_t offset, size_t count);
312 
313 void __i915_gpu_coredump_free(struct kref *kref);
314 static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
315 {
316 	if (gpu)
317 		kref_put(&gpu->ref, __i915_gpu_coredump_free);
318 }
319 
320 struct i915_gpu_coredump *i915_first_error_state(struct drm_i915_private *i915);
321 void i915_reset_error_state(struct drm_i915_private *i915);
322 void i915_disable_error_state(struct drm_i915_private *i915, int err);
323 
324 #else
325 
326 __printf(2, 3)
327 static inline void
328 i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...)
329 {
330 }
331 
332 static inline void
333 i915_capture_error_state(struct intel_gt *gt, intel_engine_mask_t engine_mask, u32 dump_flags)
334 {
335 }
336 
337 static inline struct i915_gpu_coredump *
338 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp)
339 {
340 	return NULL;
341 }
342 
343 static inline struct intel_gt_coredump *
344 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp, u32 dump_flags)
345 {
346 	return NULL;
347 }
348 
349 static inline struct intel_engine_coredump *
350 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp, u32 dump_flags)
351 {
352 	return NULL;
353 }
354 
355 static inline struct intel_engine_capture_vma *
356 intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
357 				  struct i915_request *rq,
358 				  gfp_t gfp)
359 {
360 	return NULL;
361 }
362 
363 static inline void
364 intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
365 			      struct intel_engine_capture_vma *capture,
366 			      struct i915_vma_compress *compress)
367 {
368 }
369 
370 static inline struct i915_vma_compress *
371 i915_vma_capture_prepare(struct intel_gt_coredump *gt)
372 {
373 	return NULL;
374 }
375 
376 static inline void
377 i915_vma_capture_finish(struct intel_gt_coredump *gt,
378 			struct i915_vma_compress *compress)
379 {
380 }
381 
382 static inline void
383 i915_error_state_store(struct i915_gpu_coredump *error)
384 {
385 }
386 
387 static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
388 {
389 }
390 
391 static inline struct i915_gpu_coredump *
392 i915_first_error_state(struct drm_i915_private *i915)
393 {
394 	return ERR_PTR(-ENODEV);
395 }
396 
397 static inline void i915_reset_error_state(struct drm_i915_private *i915)
398 {
399 }
400 
401 static inline void i915_disable_error_state(struct drm_i915_private *i915,
402 					    int err)
403 {
404 }
405 
406 #endif /* IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR) */
407 
408 #endif /* _I915_GPU_ERROR_H_ */
409