xref: /dragonfly/sys/dev/drm/radeon/evergreen.c (revision a1626531)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <drm/drmP.h>
26 #include "radeon.h"
27 #include "radeon_asic.h"
28 #include "radeon_audio.h"
29 #include <drm/radeon_drm.h>
30 #include "evergreend.h"
31 #include "atom.h"
32 #include "avivod.h"
33 #include "evergreen_reg.h"
34 #include "evergreen_blit_shaders.h"
35 #include "radeon_ucode.h"
36 
37 /*
38  * Indirect registers accessor
39  */
40 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
41 {
42 	u32 r;
43 
44 	lockmgr(&rdev->cg_idx_lock, LK_EXCLUSIVE);
45 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
46 	r = RREG32(EVERGREEN_CG_IND_DATA);
47 	lockmgr(&rdev->cg_idx_lock, LK_RELEASE);
48 	return r;
49 }
50 
51 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
52 {
53 	lockmgr(&rdev->cg_idx_lock, LK_EXCLUSIVE);
54 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
55 	WREG32(EVERGREEN_CG_IND_DATA, (v));
56 	lockmgr(&rdev->cg_idx_lock, LK_RELEASE);
57 }
58 
59 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
60 {
61 	u32 r;
62 
63 	lockmgr(&rdev->pif_idx_lock, LK_EXCLUSIVE);
64 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
65 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
66 	lockmgr(&rdev->pif_idx_lock, LK_RELEASE);
67 	return r;
68 }
69 
70 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
71 {
72 	lockmgr(&rdev->pif_idx_lock, LK_EXCLUSIVE);
73 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
74 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
75 	lockmgr(&rdev->pif_idx_lock, LK_RELEASE);
76 }
77 
78 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
79 {
80 	u32 r;
81 
82 	lockmgr(&rdev->pif_idx_lock, LK_EXCLUSIVE);
83 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
84 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
85 	lockmgr(&rdev->pif_idx_lock, LK_RELEASE);
86 	return r;
87 }
88 
89 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
90 {
91 	lockmgr(&rdev->pif_idx_lock, LK_EXCLUSIVE);
92 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
93 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
94 	lockmgr(&rdev->pif_idx_lock, LK_RELEASE);
95 }
96 
97 static const u32 crtc_offsets[6] =
98 {
99 	EVERGREEN_CRTC0_REGISTER_OFFSET,
100 	EVERGREEN_CRTC1_REGISTER_OFFSET,
101 	EVERGREEN_CRTC2_REGISTER_OFFSET,
102 	EVERGREEN_CRTC3_REGISTER_OFFSET,
103 	EVERGREEN_CRTC4_REGISTER_OFFSET,
104 	EVERGREEN_CRTC5_REGISTER_OFFSET
105 };
106 
107 #include "clearstate_evergreen.h"
108 
109 static const u32 sumo_rlc_save_restore_register_list[] =
110 {
111 	0x98fc,
112 	0x9830,
113 	0x9834,
114 	0x9838,
115 	0x9870,
116 	0x9874,
117 	0x8a14,
118 	0x8b24,
119 	0x8bcc,
120 	0x8b10,
121 	0x8d00,
122 	0x8d04,
123 	0x8c00,
124 	0x8c04,
125 	0x8c08,
126 	0x8c0c,
127 	0x8d8c,
128 	0x8c20,
129 	0x8c24,
130 	0x8c28,
131 	0x8c18,
132 	0x8c1c,
133 	0x8cf0,
134 	0x8e2c,
135 	0x8e38,
136 	0x8c30,
137 	0x9508,
138 	0x9688,
139 	0x9608,
140 	0x960c,
141 	0x9610,
142 	0x9614,
143 	0x88c4,
144 	0x88d4,
145 	0xa008,
146 	0x900c,
147 	0x9100,
148 	0x913c,
149 	0x98f8,
150 	0x98f4,
151 	0x9b7c,
152 	0x3f8c,
153 	0x8950,
154 	0x8954,
155 	0x8a18,
156 	0x8b28,
157 	0x9144,
158 	0x9148,
159 	0x914c,
160 	0x3f90,
161 	0x3f94,
162 	0x915c,
163 	0x9160,
164 	0x9178,
165 	0x917c,
166 	0x9180,
167 	0x918c,
168 	0x9190,
169 	0x9194,
170 	0x9198,
171 	0x919c,
172 	0x91a8,
173 	0x91ac,
174 	0x91b0,
175 	0x91b4,
176 	0x91b8,
177 	0x91c4,
178 	0x91c8,
179 	0x91cc,
180 	0x91d0,
181 	0x91d4,
182 	0x91e0,
183 	0x91e4,
184 	0x91ec,
185 	0x91f0,
186 	0x91f4,
187 	0x9200,
188 	0x9204,
189 	0x929c,
190 	0x9150,
191 	0x802c,
192 };
193 
194 static void evergreen_gpu_init(struct radeon_device *rdev);
195 
196 static const u32 evergreen_golden_registers[] =
197 {
198 	0x3f90, 0xffff0000, 0xff000000,
199 	0x9148, 0xffff0000, 0xff000000,
200 	0x3f94, 0xffff0000, 0xff000000,
201 	0x914c, 0xffff0000, 0xff000000,
202 	0x9b7c, 0xffffffff, 0x00000000,
203 	0x8a14, 0xffffffff, 0x00000007,
204 	0x8b10, 0xffffffff, 0x00000000,
205 	0x960c, 0xffffffff, 0x54763210,
206 	0x88c4, 0xffffffff, 0x000000c2,
207 	0x88d4, 0xffffffff, 0x00000010,
208 	0x8974, 0xffffffff, 0x00000000,
209 	0xc78, 0x00000080, 0x00000080,
210 	0x5eb4, 0xffffffff, 0x00000002,
211 	0x5e78, 0xffffffff, 0x001000f0,
212 	0x6104, 0x01000300, 0x00000000,
213 	0x5bc0, 0x00300000, 0x00000000,
214 	0x7030, 0xffffffff, 0x00000011,
215 	0x7c30, 0xffffffff, 0x00000011,
216 	0x10830, 0xffffffff, 0x00000011,
217 	0x11430, 0xffffffff, 0x00000011,
218 	0x12030, 0xffffffff, 0x00000011,
219 	0x12c30, 0xffffffff, 0x00000011,
220 	0xd02c, 0xffffffff, 0x08421000,
221 	0x240c, 0xffffffff, 0x00000380,
222 	0x8b24, 0xffffffff, 0x00ff0fff,
223 	0x28a4c, 0x06000000, 0x06000000,
224 	0x10c, 0x00000001, 0x00000001,
225 	0x8d00, 0xffffffff, 0x100e4848,
226 	0x8d04, 0xffffffff, 0x00164745,
227 	0x8c00, 0xffffffff, 0xe4000003,
228 	0x8c04, 0xffffffff, 0x40600060,
229 	0x8c08, 0xffffffff, 0x001c001c,
230 	0x8cf0, 0xffffffff, 0x08e00620,
231 	0x8c20, 0xffffffff, 0x00800080,
232 	0x8c24, 0xffffffff, 0x00800080,
233 	0x8c18, 0xffffffff, 0x20202078,
234 	0x8c1c, 0xffffffff, 0x00001010,
235 	0x28350, 0xffffffff, 0x00000000,
236 	0xa008, 0xffffffff, 0x00010000,
237 	0x5c4, 0xffffffff, 0x00000001,
238 	0x9508, 0xffffffff, 0x00000002,
239 	0x913c, 0x0000000f, 0x0000000a
240 };
241 
242 static const u32 evergreen_golden_registers2[] =
243 {
244 	0x2f4c, 0xffffffff, 0x00000000,
245 	0x54f4, 0xffffffff, 0x00000000,
246 	0x54f0, 0xffffffff, 0x00000000,
247 	0x5498, 0xffffffff, 0x00000000,
248 	0x549c, 0xffffffff, 0x00000000,
249 	0x5494, 0xffffffff, 0x00000000,
250 	0x53cc, 0xffffffff, 0x00000000,
251 	0x53c8, 0xffffffff, 0x00000000,
252 	0x53c4, 0xffffffff, 0x00000000,
253 	0x53c0, 0xffffffff, 0x00000000,
254 	0x53bc, 0xffffffff, 0x00000000,
255 	0x53b8, 0xffffffff, 0x00000000,
256 	0x53b4, 0xffffffff, 0x00000000,
257 	0x53b0, 0xffffffff, 0x00000000
258 };
259 
260 static const u32 cypress_mgcg_init[] =
261 {
262 	0x802c, 0xffffffff, 0xc0000000,
263 	0x5448, 0xffffffff, 0x00000100,
264 	0x55e4, 0xffffffff, 0x00000100,
265 	0x160c, 0xffffffff, 0x00000100,
266 	0x5644, 0xffffffff, 0x00000100,
267 	0xc164, 0xffffffff, 0x00000100,
268 	0x8a18, 0xffffffff, 0x00000100,
269 	0x897c, 0xffffffff, 0x06000100,
270 	0x8b28, 0xffffffff, 0x00000100,
271 	0x9144, 0xffffffff, 0x00000100,
272 	0x9a60, 0xffffffff, 0x00000100,
273 	0x9868, 0xffffffff, 0x00000100,
274 	0x8d58, 0xffffffff, 0x00000100,
275 	0x9510, 0xffffffff, 0x00000100,
276 	0x949c, 0xffffffff, 0x00000100,
277 	0x9654, 0xffffffff, 0x00000100,
278 	0x9030, 0xffffffff, 0x00000100,
279 	0x9034, 0xffffffff, 0x00000100,
280 	0x9038, 0xffffffff, 0x00000100,
281 	0x903c, 0xffffffff, 0x00000100,
282 	0x9040, 0xffffffff, 0x00000100,
283 	0xa200, 0xffffffff, 0x00000100,
284 	0xa204, 0xffffffff, 0x00000100,
285 	0xa208, 0xffffffff, 0x00000100,
286 	0xa20c, 0xffffffff, 0x00000100,
287 	0x971c, 0xffffffff, 0x00000100,
288 	0x977c, 0xffffffff, 0x00000100,
289 	0x3f80, 0xffffffff, 0x00000100,
290 	0xa210, 0xffffffff, 0x00000100,
291 	0xa214, 0xffffffff, 0x00000100,
292 	0x4d8, 0xffffffff, 0x00000100,
293 	0x9784, 0xffffffff, 0x00000100,
294 	0x9698, 0xffffffff, 0x00000100,
295 	0x4d4, 0xffffffff, 0x00000200,
296 	0x30cc, 0xffffffff, 0x00000100,
297 	0xd0c0, 0xffffffff, 0xff000100,
298 	0x802c, 0xffffffff, 0x40000000,
299 	0x915c, 0xffffffff, 0x00010000,
300 	0x9160, 0xffffffff, 0x00030002,
301 	0x9178, 0xffffffff, 0x00070000,
302 	0x917c, 0xffffffff, 0x00030002,
303 	0x9180, 0xffffffff, 0x00050004,
304 	0x918c, 0xffffffff, 0x00010006,
305 	0x9190, 0xffffffff, 0x00090008,
306 	0x9194, 0xffffffff, 0x00070000,
307 	0x9198, 0xffffffff, 0x00030002,
308 	0x919c, 0xffffffff, 0x00050004,
309 	0x91a8, 0xffffffff, 0x00010006,
310 	0x91ac, 0xffffffff, 0x00090008,
311 	0x91b0, 0xffffffff, 0x00070000,
312 	0x91b4, 0xffffffff, 0x00030002,
313 	0x91b8, 0xffffffff, 0x00050004,
314 	0x91c4, 0xffffffff, 0x00010006,
315 	0x91c8, 0xffffffff, 0x00090008,
316 	0x91cc, 0xffffffff, 0x00070000,
317 	0x91d0, 0xffffffff, 0x00030002,
318 	0x91d4, 0xffffffff, 0x00050004,
319 	0x91e0, 0xffffffff, 0x00010006,
320 	0x91e4, 0xffffffff, 0x00090008,
321 	0x91e8, 0xffffffff, 0x00000000,
322 	0x91ec, 0xffffffff, 0x00070000,
323 	0x91f0, 0xffffffff, 0x00030002,
324 	0x91f4, 0xffffffff, 0x00050004,
325 	0x9200, 0xffffffff, 0x00010006,
326 	0x9204, 0xffffffff, 0x00090008,
327 	0x9208, 0xffffffff, 0x00070000,
328 	0x920c, 0xffffffff, 0x00030002,
329 	0x9210, 0xffffffff, 0x00050004,
330 	0x921c, 0xffffffff, 0x00010006,
331 	0x9220, 0xffffffff, 0x00090008,
332 	0x9224, 0xffffffff, 0x00070000,
333 	0x9228, 0xffffffff, 0x00030002,
334 	0x922c, 0xffffffff, 0x00050004,
335 	0x9238, 0xffffffff, 0x00010006,
336 	0x923c, 0xffffffff, 0x00090008,
337 	0x9240, 0xffffffff, 0x00070000,
338 	0x9244, 0xffffffff, 0x00030002,
339 	0x9248, 0xffffffff, 0x00050004,
340 	0x9254, 0xffffffff, 0x00010006,
341 	0x9258, 0xffffffff, 0x00090008,
342 	0x925c, 0xffffffff, 0x00070000,
343 	0x9260, 0xffffffff, 0x00030002,
344 	0x9264, 0xffffffff, 0x00050004,
345 	0x9270, 0xffffffff, 0x00010006,
346 	0x9274, 0xffffffff, 0x00090008,
347 	0x9278, 0xffffffff, 0x00070000,
348 	0x927c, 0xffffffff, 0x00030002,
349 	0x9280, 0xffffffff, 0x00050004,
350 	0x928c, 0xffffffff, 0x00010006,
351 	0x9290, 0xffffffff, 0x00090008,
352 	0x9294, 0xffffffff, 0x00000000,
353 	0x929c, 0xffffffff, 0x00000001,
354 	0x802c, 0xffffffff, 0x40010000,
355 	0x915c, 0xffffffff, 0x00010000,
356 	0x9160, 0xffffffff, 0x00030002,
357 	0x9178, 0xffffffff, 0x00070000,
358 	0x917c, 0xffffffff, 0x00030002,
359 	0x9180, 0xffffffff, 0x00050004,
360 	0x918c, 0xffffffff, 0x00010006,
361 	0x9190, 0xffffffff, 0x00090008,
362 	0x9194, 0xffffffff, 0x00070000,
363 	0x9198, 0xffffffff, 0x00030002,
364 	0x919c, 0xffffffff, 0x00050004,
365 	0x91a8, 0xffffffff, 0x00010006,
366 	0x91ac, 0xffffffff, 0x00090008,
367 	0x91b0, 0xffffffff, 0x00070000,
368 	0x91b4, 0xffffffff, 0x00030002,
369 	0x91b8, 0xffffffff, 0x00050004,
370 	0x91c4, 0xffffffff, 0x00010006,
371 	0x91c8, 0xffffffff, 0x00090008,
372 	0x91cc, 0xffffffff, 0x00070000,
373 	0x91d0, 0xffffffff, 0x00030002,
374 	0x91d4, 0xffffffff, 0x00050004,
375 	0x91e0, 0xffffffff, 0x00010006,
376 	0x91e4, 0xffffffff, 0x00090008,
377 	0x91e8, 0xffffffff, 0x00000000,
378 	0x91ec, 0xffffffff, 0x00070000,
379 	0x91f0, 0xffffffff, 0x00030002,
380 	0x91f4, 0xffffffff, 0x00050004,
381 	0x9200, 0xffffffff, 0x00010006,
382 	0x9204, 0xffffffff, 0x00090008,
383 	0x9208, 0xffffffff, 0x00070000,
384 	0x920c, 0xffffffff, 0x00030002,
385 	0x9210, 0xffffffff, 0x00050004,
386 	0x921c, 0xffffffff, 0x00010006,
387 	0x9220, 0xffffffff, 0x00090008,
388 	0x9224, 0xffffffff, 0x00070000,
389 	0x9228, 0xffffffff, 0x00030002,
390 	0x922c, 0xffffffff, 0x00050004,
391 	0x9238, 0xffffffff, 0x00010006,
392 	0x923c, 0xffffffff, 0x00090008,
393 	0x9240, 0xffffffff, 0x00070000,
394 	0x9244, 0xffffffff, 0x00030002,
395 	0x9248, 0xffffffff, 0x00050004,
396 	0x9254, 0xffffffff, 0x00010006,
397 	0x9258, 0xffffffff, 0x00090008,
398 	0x925c, 0xffffffff, 0x00070000,
399 	0x9260, 0xffffffff, 0x00030002,
400 	0x9264, 0xffffffff, 0x00050004,
401 	0x9270, 0xffffffff, 0x00010006,
402 	0x9274, 0xffffffff, 0x00090008,
403 	0x9278, 0xffffffff, 0x00070000,
404 	0x927c, 0xffffffff, 0x00030002,
405 	0x9280, 0xffffffff, 0x00050004,
406 	0x928c, 0xffffffff, 0x00010006,
407 	0x9290, 0xffffffff, 0x00090008,
408 	0x9294, 0xffffffff, 0x00000000,
409 	0x929c, 0xffffffff, 0x00000001,
410 	0x802c, 0xffffffff, 0xc0000000
411 };
412 
413 static const u32 redwood_mgcg_init[] =
414 {
415 	0x802c, 0xffffffff, 0xc0000000,
416 	0x5448, 0xffffffff, 0x00000100,
417 	0x55e4, 0xffffffff, 0x00000100,
418 	0x160c, 0xffffffff, 0x00000100,
419 	0x5644, 0xffffffff, 0x00000100,
420 	0xc164, 0xffffffff, 0x00000100,
421 	0x8a18, 0xffffffff, 0x00000100,
422 	0x897c, 0xffffffff, 0x06000100,
423 	0x8b28, 0xffffffff, 0x00000100,
424 	0x9144, 0xffffffff, 0x00000100,
425 	0x9a60, 0xffffffff, 0x00000100,
426 	0x9868, 0xffffffff, 0x00000100,
427 	0x8d58, 0xffffffff, 0x00000100,
428 	0x9510, 0xffffffff, 0x00000100,
429 	0x949c, 0xffffffff, 0x00000100,
430 	0x9654, 0xffffffff, 0x00000100,
431 	0x9030, 0xffffffff, 0x00000100,
432 	0x9034, 0xffffffff, 0x00000100,
433 	0x9038, 0xffffffff, 0x00000100,
434 	0x903c, 0xffffffff, 0x00000100,
435 	0x9040, 0xffffffff, 0x00000100,
436 	0xa200, 0xffffffff, 0x00000100,
437 	0xa204, 0xffffffff, 0x00000100,
438 	0xa208, 0xffffffff, 0x00000100,
439 	0xa20c, 0xffffffff, 0x00000100,
440 	0x971c, 0xffffffff, 0x00000100,
441 	0x977c, 0xffffffff, 0x00000100,
442 	0x3f80, 0xffffffff, 0x00000100,
443 	0xa210, 0xffffffff, 0x00000100,
444 	0xa214, 0xffffffff, 0x00000100,
445 	0x4d8, 0xffffffff, 0x00000100,
446 	0x9784, 0xffffffff, 0x00000100,
447 	0x9698, 0xffffffff, 0x00000100,
448 	0x4d4, 0xffffffff, 0x00000200,
449 	0x30cc, 0xffffffff, 0x00000100,
450 	0xd0c0, 0xffffffff, 0xff000100,
451 	0x802c, 0xffffffff, 0x40000000,
452 	0x915c, 0xffffffff, 0x00010000,
453 	0x9160, 0xffffffff, 0x00030002,
454 	0x9178, 0xffffffff, 0x00070000,
455 	0x917c, 0xffffffff, 0x00030002,
456 	0x9180, 0xffffffff, 0x00050004,
457 	0x918c, 0xffffffff, 0x00010006,
458 	0x9190, 0xffffffff, 0x00090008,
459 	0x9194, 0xffffffff, 0x00070000,
460 	0x9198, 0xffffffff, 0x00030002,
461 	0x919c, 0xffffffff, 0x00050004,
462 	0x91a8, 0xffffffff, 0x00010006,
463 	0x91ac, 0xffffffff, 0x00090008,
464 	0x91b0, 0xffffffff, 0x00070000,
465 	0x91b4, 0xffffffff, 0x00030002,
466 	0x91b8, 0xffffffff, 0x00050004,
467 	0x91c4, 0xffffffff, 0x00010006,
468 	0x91c8, 0xffffffff, 0x00090008,
469 	0x91cc, 0xffffffff, 0x00070000,
470 	0x91d0, 0xffffffff, 0x00030002,
471 	0x91d4, 0xffffffff, 0x00050004,
472 	0x91e0, 0xffffffff, 0x00010006,
473 	0x91e4, 0xffffffff, 0x00090008,
474 	0x91e8, 0xffffffff, 0x00000000,
475 	0x91ec, 0xffffffff, 0x00070000,
476 	0x91f0, 0xffffffff, 0x00030002,
477 	0x91f4, 0xffffffff, 0x00050004,
478 	0x9200, 0xffffffff, 0x00010006,
479 	0x9204, 0xffffffff, 0x00090008,
480 	0x9294, 0xffffffff, 0x00000000,
481 	0x929c, 0xffffffff, 0x00000001,
482 	0x802c, 0xffffffff, 0xc0000000
483 };
484 
485 static const u32 cedar_golden_registers[] =
486 {
487 	0x3f90, 0xffff0000, 0xff000000,
488 	0x9148, 0xffff0000, 0xff000000,
489 	0x3f94, 0xffff0000, 0xff000000,
490 	0x914c, 0xffff0000, 0xff000000,
491 	0x9b7c, 0xffffffff, 0x00000000,
492 	0x8a14, 0xffffffff, 0x00000007,
493 	0x8b10, 0xffffffff, 0x00000000,
494 	0x960c, 0xffffffff, 0x54763210,
495 	0x88c4, 0xffffffff, 0x000000c2,
496 	0x88d4, 0xffffffff, 0x00000000,
497 	0x8974, 0xffffffff, 0x00000000,
498 	0xc78, 0x00000080, 0x00000080,
499 	0x5eb4, 0xffffffff, 0x00000002,
500 	0x5e78, 0xffffffff, 0x001000f0,
501 	0x6104, 0x01000300, 0x00000000,
502 	0x5bc0, 0x00300000, 0x00000000,
503 	0x7030, 0xffffffff, 0x00000011,
504 	0x7c30, 0xffffffff, 0x00000011,
505 	0x10830, 0xffffffff, 0x00000011,
506 	0x11430, 0xffffffff, 0x00000011,
507 	0xd02c, 0xffffffff, 0x08421000,
508 	0x240c, 0xffffffff, 0x00000380,
509 	0x8b24, 0xffffffff, 0x00ff0fff,
510 	0x28a4c, 0x06000000, 0x06000000,
511 	0x10c, 0x00000001, 0x00000001,
512 	0x8d00, 0xffffffff, 0x100e4848,
513 	0x8d04, 0xffffffff, 0x00164745,
514 	0x8c00, 0xffffffff, 0xe4000003,
515 	0x8c04, 0xffffffff, 0x40600060,
516 	0x8c08, 0xffffffff, 0x001c001c,
517 	0x8cf0, 0xffffffff, 0x08e00410,
518 	0x8c20, 0xffffffff, 0x00800080,
519 	0x8c24, 0xffffffff, 0x00800080,
520 	0x8c18, 0xffffffff, 0x20202078,
521 	0x8c1c, 0xffffffff, 0x00001010,
522 	0x28350, 0xffffffff, 0x00000000,
523 	0xa008, 0xffffffff, 0x00010000,
524 	0x5c4, 0xffffffff, 0x00000001,
525 	0x9508, 0xffffffff, 0x00000002
526 };
527 
528 static const u32 cedar_mgcg_init[] =
529 {
530 	0x802c, 0xffffffff, 0xc0000000,
531 	0x5448, 0xffffffff, 0x00000100,
532 	0x55e4, 0xffffffff, 0x00000100,
533 	0x160c, 0xffffffff, 0x00000100,
534 	0x5644, 0xffffffff, 0x00000100,
535 	0xc164, 0xffffffff, 0x00000100,
536 	0x8a18, 0xffffffff, 0x00000100,
537 	0x897c, 0xffffffff, 0x06000100,
538 	0x8b28, 0xffffffff, 0x00000100,
539 	0x9144, 0xffffffff, 0x00000100,
540 	0x9a60, 0xffffffff, 0x00000100,
541 	0x9868, 0xffffffff, 0x00000100,
542 	0x8d58, 0xffffffff, 0x00000100,
543 	0x9510, 0xffffffff, 0x00000100,
544 	0x949c, 0xffffffff, 0x00000100,
545 	0x9654, 0xffffffff, 0x00000100,
546 	0x9030, 0xffffffff, 0x00000100,
547 	0x9034, 0xffffffff, 0x00000100,
548 	0x9038, 0xffffffff, 0x00000100,
549 	0x903c, 0xffffffff, 0x00000100,
550 	0x9040, 0xffffffff, 0x00000100,
551 	0xa200, 0xffffffff, 0x00000100,
552 	0xa204, 0xffffffff, 0x00000100,
553 	0xa208, 0xffffffff, 0x00000100,
554 	0xa20c, 0xffffffff, 0x00000100,
555 	0x971c, 0xffffffff, 0x00000100,
556 	0x977c, 0xffffffff, 0x00000100,
557 	0x3f80, 0xffffffff, 0x00000100,
558 	0xa210, 0xffffffff, 0x00000100,
559 	0xa214, 0xffffffff, 0x00000100,
560 	0x4d8, 0xffffffff, 0x00000100,
561 	0x9784, 0xffffffff, 0x00000100,
562 	0x9698, 0xffffffff, 0x00000100,
563 	0x4d4, 0xffffffff, 0x00000200,
564 	0x30cc, 0xffffffff, 0x00000100,
565 	0xd0c0, 0xffffffff, 0xff000100,
566 	0x802c, 0xffffffff, 0x40000000,
567 	0x915c, 0xffffffff, 0x00010000,
568 	0x9178, 0xffffffff, 0x00050000,
569 	0x917c, 0xffffffff, 0x00030002,
570 	0x918c, 0xffffffff, 0x00010004,
571 	0x9190, 0xffffffff, 0x00070006,
572 	0x9194, 0xffffffff, 0x00050000,
573 	0x9198, 0xffffffff, 0x00030002,
574 	0x91a8, 0xffffffff, 0x00010004,
575 	0x91ac, 0xffffffff, 0x00070006,
576 	0x91e8, 0xffffffff, 0x00000000,
577 	0x9294, 0xffffffff, 0x00000000,
578 	0x929c, 0xffffffff, 0x00000001,
579 	0x802c, 0xffffffff, 0xc0000000
580 };
581 
582 static const u32 juniper_mgcg_init[] =
583 {
584 	0x802c, 0xffffffff, 0xc0000000,
585 	0x5448, 0xffffffff, 0x00000100,
586 	0x55e4, 0xffffffff, 0x00000100,
587 	0x160c, 0xffffffff, 0x00000100,
588 	0x5644, 0xffffffff, 0x00000100,
589 	0xc164, 0xffffffff, 0x00000100,
590 	0x8a18, 0xffffffff, 0x00000100,
591 	0x897c, 0xffffffff, 0x06000100,
592 	0x8b28, 0xffffffff, 0x00000100,
593 	0x9144, 0xffffffff, 0x00000100,
594 	0x9a60, 0xffffffff, 0x00000100,
595 	0x9868, 0xffffffff, 0x00000100,
596 	0x8d58, 0xffffffff, 0x00000100,
597 	0x9510, 0xffffffff, 0x00000100,
598 	0x949c, 0xffffffff, 0x00000100,
599 	0x9654, 0xffffffff, 0x00000100,
600 	0x9030, 0xffffffff, 0x00000100,
601 	0x9034, 0xffffffff, 0x00000100,
602 	0x9038, 0xffffffff, 0x00000100,
603 	0x903c, 0xffffffff, 0x00000100,
604 	0x9040, 0xffffffff, 0x00000100,
605 	0xa200, 0xffffffff, 0x00000100,
606 	0xa204, 0xffffffff, 0x00000100,
607 	0xa208, 0xffffffff, 0x00000100,
608 	0xa20c, 0xffffffff, 0x00000100,
609 	0x971c, 0xffffffff, 0x00000100,
610 	0xd0c0, 0xffffffff, 0xff000100,
611 	0x802c, 0xffffffff, 0x40000000,
612 	0x915c, 0xffffffff, 0x00010000,
613 	0x9160, 0xffffffff, 0x00030002,
614 	0x9178, 0xffffffff, 0x00070000,
615 	0x917c, 0xffffffff, 0x00030002,
616 	0x9180, 0xffffffff, 0x00050004,
617 	0x918c, 0xffffffff, 0x00010006,
618 	0x9190, 0xffffffff, 0x00090008,
619 	0x9194, 0xffffffff, 0x00070000,
620 	0x9198, 0xffffffff, 0x00030002,
621 	0x919c, 0xffffffff, 0x00050004,
622 	0x91a8, 0xffffffff, 0x00010006,
623 	0x91ac, 0xffffffff, 0x00090008,
624 	0x91b0, 0xffffffff, 0x00070000,
625 	0x91b4, 0xffffffff, 0x00030002,
626 	0x91b8, 0xffffffff, 0x00050004,
627 	0x91c4, 0xffffffff, 0x00010006,
628 	0x91c8, 0xffffffff, 0x00090008,
629 	0x91cc, 0xffffffff, 0x00070000,
630 	0x91d0, 0xffffffff, 0x00030002,
631 	0x91d4, 0xffffffff, 0x00050004,
632 	0x91e0, 0xffffffff, 0x00010006,
633 	0x91e4, 0xffffffff, 0x00090008,
634 	0x91e8, 0xffffffff, 0x00000000,
635 	0x91ec, 0xffffffff, 0x00070000,
636 	0x91f0, 0xffffffff, 0x00030002,
637 	0x91f4, 0xffffffff, 0x00050004,
638 	0x9200, 0xffffffff, 0x00010006,
639 	0x9204, 0xffffffff, 0x00090008,
640 	0x9208, 0xffffffff, 0x00070000,
641 	0x920c, 0xffffffff, 0x00030002,
642 	0x9210, 0xffffffff, 0x00050004,
643 	0x921c, 0xffffffff, 0x00010006,
644 	0x9220, 0xffffffff, 0x00090008,
645 	0x9224, 0xffffffff, 0x00070000,
646 	0x9228, 0xffffffff, 0x00030002,
647 	0x922c, 0xffffffff, 0x00050004,
648 	0x9238, 0xffffffff, 0x00010006,
649 	0x923c, 0xffffffff, 0x00090008,
650 	0x9240, 0xffffffff, 0x00070000,
651 	0x9244, 0xffffffff, 0x00030002,
652 	0x9248, 0xffffffff, 0x00050004,
653 	0x9254, 0xffffffff, 0x00010006,
654 	0x9258, 0xffffffff, 0x00090008,
655 	0x925c, 0xffffffff, 0x00070000,
656 	0x9260, 0xffffffff, 0x00030002,
657 	0x9264, 0xffffffff, 0x00050004,
658 	0x9270, 0xffffffff, 0x00010006,
659 	0x9274, 0xffffffff, 0x00090008,
660 	0x9278, 0xffffffff, 0x00070000,
661 	0x927c, 0xffffffff, 0x00030002,
662 	0x9280, 0xffffffff, 0x00050004,
663 	0x928c, 0xffffffff, 0x00010006,
664 	0x9290, 0xffffffff, 0x00090008,
665 	0x9294, 0xffffffff, 0x00000000,
666 	0x929c, 0xffffffff, 0x00000001,
667 	0x802c, 0xffffffff, 0xc0000000,
668 	0x977c, 0xffffffff, 0x00000100,
669 	0x3f80, 0xffffffff, 0x00000100,
670 	0xa210, 0xffffffff, 0x00000100,
671 	0xa214, 0xffffffff, 0x00000100,
672 	0x4d8, 0xffffffff, 0x00000100,
673 	0x9784, 0xffffffff, 0x00000100,
674 	0x9698, 0xffffffff, 0x00000100,
675 	0x4d4, 0xffffffff, 0x00000200,
676 	0x30cc, 0xffffffff, 0x00000100,
677 	0x802c, 0xffffffff, 0xc0000000
678 };
679 
680 static const u32 supersumo_golden_registers[] =
681 {
682 	0x5eb4, 0xffffffff, 0x00000002,
683 	0x5c4, 0xffffffff, 0x00000001,
684 	0x7030, 0xffffffff, 0x00000011,
685 	0x7c30, 0xffffffff, 0x00000011,
686 	0x6104, 0x01000300, 0x00000000,
687 	0x5bc0, 0x00300000, 0x00000000,
688 	0x8c04, 0xffffffff, 0x40600060,
689 	0x8c08, 0xffffffff, 0x001c001c,
690 	0x8c20, 0xffffffff, 0x00800080,
691 	0x8c24, 0xffffffff, 0x00800080,
692 	0x8c18, 0xffffffff, 0x20202078,
693 	0x8c1c, 0xffffffff, 0x00001010,
694 	0x918c, 0xffffffff, 0x00010006,
695 	0x91a8, 0xffffffff, 0x00010006,
696 	0x91c4, 0xffffffff, 0x00010006,
697 	0x91e0, 0xffffffff, 0x00010006,
698 	0x9200, 0xffffffff, 0x00010006,
699 	0x9150, 0xffffffff, 0x6e944040,
700 	0x917c, 0xffffffff, 0x00030002,
701 	0x9180, 0xffffffff, 0x00050004,
702 	0x9198, 0xffffffff, 0x00030002,
703 	0x919c, 0xffffffff, 0x00050004,
704 	0x91b4, 0xffffffff, 0x00030002,
705 	0x91b8, 0xffffffff, 0x00050004,
706 	0x91d0, 0xffffffff, 0x00030002,
707 	0x91d4, 0xffffffff, 0x00050004,
708 	0x91f0, 0xffffffff, 0x00030002,
709 	0x91f4, 0xffffffff, 0x00050004,
710 	0x915c, 0xffffffff, 0x00010000,
711 	0x9160, 0xffffffff, 0x00030002,
712 	0x3f90, 0xffff0000, 0xff000000,
713 	0x9178, 0xffffffff, 0x00070000,
714 	0x9194, 0xffffffff, 0x00070000,
715 	0x91b0, 0xffffffff, 0x00070000,
716 	0x91cc, 0xffffffff, 0x00070000,
717 	0x91ec, 0xffffffff, 0x00070000,
718 	0x9148, 0xffff0000, 0xff000000,
719 	0x9190, 0xffffffff, 0x00090008,
720 	0x91ac, 0xffffffff, 0x00090008,
721 	0x91c8, 0xffffffff, 0x00090008,
722 	0x91e4, 0xffffffff, 0x00090008,
723 	0x9204, 0xffffffff, 0x00090008,
724 	0x3f94, 0xffff0000, 0xff000000,
725 	0x914c, 0xffff0000, 0xff000000,
726 	0x929c, 0xffffffff, 0x00000001,
727 	0x8a18, 0xffffffff, 0x00000100,
728 	0x8b28, 0xffffffff, 0x00000100,
729 	0x9144, 0xffffffff, 0x00000100,
730 	0x5644, 0xffffffff, 0x00000100,
731 	0x9b7c, 0xffffffff, 0x00000000,
732 	0x8030, 0xffffffff, 0x0000100a,
733 	0x8a14, 0xffffffff, 0x00000007,
734 	0x8b24, 0xffffffff, 0x00ff0fff,
735 	0x8b10, 0xffffffff, 0x00000000,
736 	0x28a4c, 0x06000000, 0x06000000,
737 	0x4d8, 0xffffffff, 0x00000100,
738 	0x913c, 0xffff000f, 0x0100000a,
739 	0x960c, 0xffffffff, 0x54763210,
740 	0x88c4, 0xffffffff, 0x000000c2,
741 	0x88d4, 0xffffffff, 0x00000010,
742 	0x8974, 0xffffffff, 0x00000000,
743 	0xc78, 0x00000080, 0x00000080,
744 	0x5e78, 0xffffffff, 0x001000f0,
745 	0xd02c, 0xffffffff, 0x08421000,
746 	0xa008, 0xffffffff, 0x00010000,
747 	0x8d00, 0xffffffff, 0x100e4848,
748 	0x8d04, 0xffffffff, 0x00164745,
749 	0x8c00, 0xffffffff, 0xe4000003,
750 	0x8cf0, 0x1fffffff, 0x08e00620,
751 	0x28350, 0xffffffff, 0x00000000,
752 	0x9508, 0xffffffff, 0x00000002
753 };
754 
755 static const u32 sumo_golden_registers[] =
756 {
757 	0x900c, 0x00ffffff, 0x0017071f,
758 	0x8c18, 0xffffffff, 0x10101060,
759 	0x8c1c, 0xffffffff, 0x00001010,
760 	0x8c30, 0x0000000f, 0x00000005,
761 	0x9688, 0x0000000f, 0x00000007
762 };
763 
764 static const u32 wrestler_golden_registers[] =
765 {
766 	0x5eb4, 0xffffffff, 0x00000002,
767 	0x5c4, 0xffffffff, 0x00000001,
768 	0x7030, 0xffffffff, 0x00000011,
769 	0x7c30, 0xffffffff, 0x00000011,
770 	0x6104, 0x01000300, 0x00000000,
771 	0x5bc0, 0x00300000, 0x00000000,
772 	0x918c, 0xffffffff, 0x00010006,
773 	0x91a8, 0xffffffff, 0x00010006,
774 	0x9150, 0xffffffff, 0x6e944040,
775 	0x917c, 0xffffffff, 0x00030002,
776 	0x9198, 0xffffffff, 0x00030002,
777 	0x915c, 0xffffffff, 0x00010000,
778 	0x3f90, 0xffff0000, 0xff000000,
779 	0x9178, 0xffffffff, 0x00070000,
780 	0x9194, 0xffffffff, 0x00070000,
781 	0x9148, 0xffff0000, 0xff000000,
782 	0x9190, 0xffffffff, 0x00090008,
783 	0x91ac, 0xffffffff, 0x00090008,
784 	0x3f94, 0xffff0000, 0xff000000,
785 	0x914c, 0xffff0000, 0xff000000,
786 	0x929c, 0xffffffff, 0x00000001,
787 	0x8a18, 0xffffffff, 0x00000100,
788 	0x8b28, 0xffffffff, 0x00000100,
789 	0x9144, 0xffffffff, 0x00000100,
790 	0x9b7c, 0xffffffff, 0x00000000,
791 	0x8030, 0xffffffff, 0x0000100a,
792 	0x8a14, 0xffffffff, 0x00000001,
793 	0x8b24, 0xffffffff, 0x00ff0fff,
794 	0x8b10, 0xffffffff, 0x00000000,
795 	0x28a4c, 0x06000000, 0x06000000,
796 	0x4d8, 0xffffffff, 0x00000100,
797 	0x913c, 0xffff000f, 0x0100000a,
798 	0x960c, 0xffffffff, 0x54763210,
799 	0x88c4, 0xffffffff, 0x000000c2,
800 	0x88d4, 0xffffffff, 0x00000010,
801 	0x8974, 0xffffffff, 0x00000000,
802 	0xc78, 0x00000080, 0x00000080,
803 	0x5e78, 0xffffffff, 0x001000f0,
804 	0xd02c, 0xffffffff, 0x08421000,
805 	0xa008, 0xffffffff, 0x00010000,
806 	0x8d00, 0xffffffff, 0x100e4848,
807 	0x8d04, 0xffffffff, 0x00164745,
808 	0x8c00, 0xffffffff, 0xe4000003,
809 	0x8cf0, 0x1fffffff, 0x08e00410,
810 	0x28350, 0xffffffff, 0x00000000,
811 	0x9508, 0xffffffff, 0x00000002,
812 	0x900c, 0xffffffff, 0x0017071f,
813 	0x8c18, 0xffffffff, 0x10101060,
814 	0x8c1c, 0xffffffff, 0x00001010
815 };
816 
817 static const u32 barts_golden_registers[] =
818 {
819 	0x5eb4, 0xffffffff, 0x00000002,
820 	0x5e78, 0x8f311ff1, 0x001000f0,
821 	0x3f90, 0xffff0000, 0xff000000,
822 	0x9148, 0xffff0000, 0xff000000,
823 	0x3f94, 0xffff0000, 0xff000000,
824 	0x914c, 0xffff0000, 0xff000000,
825 	0xc78, 0x00000080, 0x00000080,
826 	0xbd4, 0x70073777, 0x00010001,
827 	0xd02c, 0xbfffff1f, 0x08421000,
828 	0xd0b8, 0x03773777, 0x02011003,
829 	0x5bc0, 0x00200000, 0x50100000,
830 	0x98f8, 0x33773777, 0x02011003,
831 	0x98fc, 0xffffffff, 0x76543210,
832 	0x7030, 0x31000311, 0x00000011,
833 	0x2f48, 0x00000007, 0x02011003,
834 	0x6b28, 0x00000010, 0x00000012,
835 	0x7728, 0x00000010, 0x00000012,
836 	0x10328, 0x00000010, 0x00000012,
837 	0x10f28, 0x00000010, 0x00000012,
838 	0x11b28, 0x00000010, 0x00000012,
839 	0x12728, 0x00000010, 0x00000012,
840 	0x240c, 0x000007ff, 0x00000380,
841 	0x8a14, 0xf000001f, 0x00000007,
842 	0x8b24, 0x3fff3fff, 0x00ff0fff,
843 	0x8b10, 0x0000ff0f, 0x00000000,
844 	0x28a4c, 0x07ffffff, 0x06000000,
845 	0x10c, 0x00000001, 0x00010003,
846 	0xa02c, 0xffffffff, 0x0000009b,
847 	0x913c, 0x0000000f, 0x0100000a,
848 	0x8d00, 0xffff7f7f, 0x100e4848,
849 	0x8d04, 0x00ffffff, 0x00164745,
850 	0x8c00, 0xfffc0003, 0xe4000003,
851 	0x8c04, 0xf8ff00ff, 0x40600060,
852 	0x8c08, 0x00ff00ff, 0x001c001c,
853 	0x8cf0, 0x1fff1fff, 0x08e00620,
854 	0x8c20, 0x0fff0fff, 0x00800080,
855 	0x8c24, 0x0fff0fff, 0x00800080,
856 	0x8c18, 0xffffffff, 0x20202078,
857 	0x8c1c, 0x0000ffff, 0x00001010,
858 	0x28350, 0x00000f01, 0x00000000,
859 	0x9508, 0x3700001f, 0x00000002,
860 	0x960c, 0xffffffff, 0x54763210,
861 	0x88c4, 0x001f3ae3, 0x000000c2,
862 	0x88d4, 0x0000001f, 0x00000010,
863 	0x8974, 0xffffffff, 0x00000000
864 };
865 
866 static const u32 turks_golden_registers[] =
867 {
868 	0x5eb4, 0xffffffff, 0x00000002,
869 	0x5e78, 0x8f311ff1, 0x001000f0,
870 	0x8c8, 0x00003000, 0x00001070,
871 	0x8cc, 0x000fffff, 0x00040035,
872 	0x3f90, 0xffff0000, 0xfff00000,
873 	0x9148, 0xffff0000, 0xfff00000,
874 	0x3f94, 0xffff0000, 0xfff00000,
875 	0x914c, 0xffff0000, 0xfff00000,
876 	0xc78, 0x00000080, 0x00000080,
877 	0xbd4, 0x00073007, 0x00010002,
878 	0xd02c, 0xbfffff1f, 0x08421000,
879 	0xd0b8, 0x03773777, 0x02010002,
880 	0x5bc0, 0x00200000, 0x50100000,
881 	0x98f8, 0x33773777, 0x00010002,
882 	0x98fc, 0xffffffff, 0x33221100,
883 	0x7030, 0x31000311, 0x00000011,
884 	0x2f48, 0x33773777, 0x00010002,
885 	0x6b28, 0x00000010, 0x00000012,
886 	0x7728, 0x00000010, 0x00000012,
887 	0x10328, 0x00000010, 0x00000012,
888 	0x10f28, 0x00000010, 0x00000012,
889 	0x11b28, 0x00000010, 0x00000012,
890 	0x12728, 0x00000010, 0x00000012,
891 	0x240c, 0x000007ff, 0x00000380,
892 	0x8a14, 0xf000001f, 0x00000007,
893 	0x8b24, 0x3fff3fff, 0x00ff0fff,
894 	0x8b10, 0x0000ff0f, 0x00000000,
895 	0x28a4c, 0x07ffffff, 0x06000000,
896 	0x10c, 0x00000001, 0x00010003,
897 	0xa02c, 0xffffffff, 0x0000009b,
898 	0x913c, 0x0000000f, 0x0100000a,
899 	0x8d00, 0xffff7f7f, 0x100e4848,
900 	0x8d04, 0x00ffffff, 0x00164745,
901 	0x8c00, 0xfffc0003, 0xe4000003,
902 	0x8c04, 0xf8ff00ff, 0x40600060,
903 	0x8c08, 0x00ff00ff, 0x001c001c,
904 	0x8cf0, 0x1fff1fff, 0x08e00410,
905 	0x8c20, 0x0fff0fff, 0x00800080,
906 	0x8c24, 0x0fff0fff, 0x00800080,
907 	0x8c18, 0xffffffff, 0x20202078,
908 	0x8c1c, 0x0000ffff, 0x00001010,
909 	0x28350, 0x00000f01, 0x00000000,
910 	0x9508, 0x3700001f, 0x00000002,
911 	0x960c, 0xffffffff, 0x54763210,
912 	0x88c4, 0x001f3ae3, 0x000000c2,
913 	0x88d4, 0x0000001f, 0x00000010,
914 	0x8974, 0xffffffff, 0x00000000
915 };
916 
917 static const u32 caicos_golden_registers[] =
918 {
919 	0x5eb4, 0xffffffff, 0x00000002,
920 	0x5e78, 0x8f311ff1, 0x001000f0,
921 	0x8c8, 0x00003420, 0x00001450,
922 	0x8cc, 0x000fffff, 0x00040035,
923 	0x3f90, 0xffff0000, 0xfffc0000,
924 	0x9148, 0xffff0000, 0xfffc0000,
925 	0x3f94, 0xffff0000, 0xfffc0000,
926 	0x914c, 0xffff0000, 0xfffc0000,
927 	0xc78, 0x00000080, 0x00000080,
928 	0xbd4, 0x00073007, 0x00010001,
929 	0xd02c, 0xbfffff1f, 0x08421000,
930 	0xd0b8, 0x03773777, 0x02010001,
931 	0x5bc0, 0x00200000, 0x50100000,
932 	0x98f8, 0x33773777, 0x02010001,
933 	0x98fc, 0xffffffff, 0x33221100,
934 	0x7030, 0x31000311, 0x00000011,
935 	0x2f48, 0x33773777, 0x02010001,
936 	0x6b28, 0x00000010, 0x00000012,
937 	0x7728, 0x00000010, 0x00000012,
938 	0x10328, 0x00000010, 0x00000012,
939 	0x10f28, 0x00000010, 0x00000012,
940 	0x11b28, 0x00000010, 0x00000012,
941 	0x12728, 0x00000010, 0x00000012,
942 	0x240c, 0x000007ff, 0x00000380,
943 	0x8a14, 0xf000001f, 0x00000001,
944 	0x8b24, 0x3fff3fff, 0x00ff0fff,
945 	0x8b10, 0x0000ff0f, 0x00000000,
946 	0x28a4c, 0x07ffffff, 0x06000000,
947 	0x10c, 0x00000001, 0x00010003,
948 	0xa02c, 0xffffffff, 0x0000009b,
949 	0x913c, 0x0000000f, 0x0100000a,
950 	0x8d00, 0xffff7f7f, 0x100e4848,
951 	0x8d04, 0x00ffffff, 0x00164745,
952 	0x8c00, 0xfffc0003, 0xe4000003,
953 	0x8c04, 0xf8ff00ff, 0x40600060,
954 	0x8c08, 0x00ff00ff, 0x001c001c,
955 	0x8cf0, 0x1fff1fff, 0x08e00410,
956 	0x8c20, 0x0fff0fff, 0x00800080,
957 	0x8c24, 0x0fff0fff, 0x00800080,
958 	0x8c18, 0xffffffff, 0x20202078,
959 	0x8c1c, 0x0000ffff, 0x00001010,
960 	0x28350, 0x00000f01, 0x00000000,
961 	0x9508, 0x3700001f, 0x00000002,
962 	0x960c, 0xffffffff, 0x54763210,
963 	0x88c4, 0x001f3ae3, 0x000000c2,
964 	0x88d4, 0x0000001f, 0x00000010,
965 	0x8974, 0xffffffff, 0x00000000
966 };
967 
968 static void evergreen_init_golden_registers(struct radeon_device *rdev)
969 {
970 	switch (rdev->family) {
971 	case CHIP_CYPRESS:
972 	case CHIP_HEMLOCK:
973 		radeon_program_register_sequence(rdev,
974 						 evergreen_golden_registers,
975 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
976 		radeon_program_register_sequence(rdev,
977 						 evergreen_golden_registers2,
978 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
979 		radeon_program_register_sequence(rdev,
980 						 cypress_mgcg_init,
981 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
982 		break;
983 	case CHIP_JUNIPER:
984 		radeon_program_register_sequence(rdev,
985 						 evergreen_golden_registers,
986 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
987 		radeon_program_register_sequence(rdev,
988 						 evergreen_golden_registers2,
989 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
990 		radeon_program_register_sequence(rdev,
991 						 juniper_mgcg_init,
992 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
993 		break;
994 	case CHIP_REDWOOD:
995 		radeon_program_register_sequence(rdev,
996 						 evergreen_golden_registers,
997 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
998 		radeon_program_register_sequence(rdev,
999 						 evergreen_golden_registers2,
1000 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1001 		radeon_program_register_sequence(rdev,
1002 						 redwood_mgcg_init,
1003 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1004 		break;
1005 	case CHIP_CEDAR:
1006 		radeon_program_register_sequence(rdev,
1007 						 cedar_golden_registers,
1008 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1009 		radeon_program_register_sequence(rdev,
1010 						 evergreen_golden_registers2,
1011 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1012 		radeon_program_register_sequence(rdev,
1013 						 cedar_mgcg_init,
1014 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1015 		break;
1016 	case CHIP_PALM:
1017 		radeon_program_register_sequence(rdev,
1018 						 wrestler_golden_registers,
1019 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1020 		break;
1021 	case CHIP_SUMO:
1022 		radeon_program_register_sequence(rdev,
1023 						 supersumo_golden_registers,
1024 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1025 		break;
1026 	case CHIP_SUMO2:
1027 		radeon_program_register_sequence(rdev,
1028 						 supersumo_golden_registers,
1029 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1030 		radeon_program_register_sequence(rdev,
1031 						 sumo_golden_registers,
1032 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1033 		break;
1034 	case CHIP_BARTS:
1035 		radeon_program_register_sequence(rdev,
1036 						 barts_golden_registers,
1037 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1038 		break;
1039 	case CHIP_TURKS:
1040 		radeon_program_register_sequence(rdev,
1041 						 turks_golden_registers,
1042 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1043 		break;
1044 	case CHIP_CAICOS:
1045 		radeon_program_register_sequence(rdev,
1046 						 caicos_golden_registers,
1047 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1048 		break;
1049 	default:
1050 		break;
1051 	}
1052 }
1053 
1054 /**
1055  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1056  *
1057  * @rdev: radeon_device pointer
1058  * @reg: register offset in bytes
1059  * @val: register value
1060  *
1061  * Returns 0 for success or -EINVAL for an invalid register
1062  *
1063  */
1064 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1065 					u32 reg, u32 *val)
1066 {
1067 	switch (reg) {
1068 	case GRBM_STATUS:
1069 	case GRBM_STATUS_SE0:
1070 	case GRBM_STATUS_SE1:
1071 	case SRBM_STATUS:
1072 	case SRBM_STATUS2:
1073 	case DMA_STATUS_REG:
1074 	case UVD_STATUS:
1075 		*val = RREG32(reg);
1076 		return 0;
1077 	default:
1078 		return -EINVAL;
1079 	}
1080 }
1081 
1082 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1083 			     unsigned *bankh, unsigned *mtaspect,
1084 			     unsigned *tile_split)
1085 {
1086 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1087 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1088 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1089 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1090 	switch (*bankw) {
1091 	default:
1092 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1093 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1094 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1095 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1096 	}
1097 	switch (*bankh) {
1098 	default:
1099 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1100 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1101 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1102 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1103 	}
1104 	switch (*mtaspect) {
1105 	default:
1106 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1107 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1108 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1109 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1110 	}
1111 }
1112 
1113 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1114 			      u32 cntl_reg, u32 status_reg)
1115 {
1116 	int r, i;
1117 	struct atom_clock_dividers dividers;
1118 
1119 	r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1120 					   clock, false, &dividers);
1121 	if (r)
1122 		return r;
1123 
1124 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1125 
1126 	for (i = 0; i < 100; i++) {
1127 		if (RREG32(status_reg) & DCLK_STATUS)
1128 			break;
1129 		mdelay(10);
1130 	}
1131 	if (i == 100)
1132 		return -ETIMEDOUT;
1133 
1134 	return 0;
1135 }
1136 
1137 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1138 {
1139 	int r = 0;
1140 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1141 
1142 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1143 	if (r)
1144 		goto done;
1145 	cg_scratch &= 0xffff0000;
1146 	cg_scratch |= vclk / 100; /* Mhz */
1147 
1148 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1149 	if (r)
1150 		goto done;
1151 	cg_scratch &= 0x0000ffff;
1152 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1153 
1154 done:
1155 	WREG32(CG_SCRATCH1, cg_scratch);
1156 
1157 	return r;
1158 }
1159 
1160 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1161 {
1162 	/* start off with something large */
1163 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1164 	int r;
1165 
1166 	/* bypass vclk and dclk with bclk */
1167 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1168 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1169 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1170 
1171 	/* put PLL in bypass mode */
1172 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1173 
1174 	if (!vclk || !dclk) {
1175 		/* keep the Bypass mode, put PLL to sleep */
1176 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1177 		return 0;
1178 	}
1179 
1180 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1181 					  16384, 0x03FFFFFF, 0, 128, 5,
1182 					  &fb_div, &vclk_div, &dclk_div);
1183 	if (r)
1184 		return r;
1185 
1186 	/* set VCO_MODE to 1 */
1187 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1188 
1189 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1190 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1191 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1192 
1193 	/* deassert UPLL_RESET */
1194 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1195 
1196 	mdelay(1);
1197 
1198 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1199 	if (r)
1200 		return r;
1201 
1202 	/* assert UPLL_RESET again */
1203 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1204 
1205 	/* disable spread spectrum. */
1206 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1207 
1208 	/* set feedback divider */
1209 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1210 
1211 	/* set ref divider to 0 */
1212 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1213 
1214 	if (fb_div < 307200)
1215 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1216 	else
1217 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1218 
1219 	/* set PDIV_A and PDIV_B */
1220 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1221 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1222 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1223 
1224 	/* give the PLL some time to settle */
1225 	mdelay(15);
1226 
1227 	/* deassert PLL_RESET */
1228 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1229 
1230 	mdelay(15);
1231 
1232 	/* switch from bypass mode to normal mode */
1233 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1234 
1235 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1236 	if (r)
1237 		return r;
1238 
1239 	/* switch VCLK and DCLK selection */
1240 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1241 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1242 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1243 
1244 	mdelay(100);
1245 
1246 	return 0;
1247 }
1248 
1249 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1250 {
1251 	int readrq;
1252 	u16 v;
1253 
1254 	readrq = pcie_get_readrq(rdev->pdev);
1255 	v = ffs(readrq) - 8;
1256 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1257 	 * to avoid hangs or perfomance issues
1258 	 */
1259 	if ((v == 0) || (v == 6) || (v == 7))
1260 		pcie_set_readrq(rdev->pdev, 512);
1261 }
1262 
1263 void dce4_program_fmt(struct drm_encoder *encoder)
1264 {
1265 	struct drm_device *dev = encoder->dev;
1266 	struct radeon_device *rdev = dev->dev_private;
1267 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1268 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1269 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1270 	int bpc = 0;
1271 	u32 tmp = 0;
1272 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1273 
1274 	if (connector) {
1275 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1276 		bpc = radeon_get_monitor_bpc(connector);
1277 		dither = radeon_connector->dither;
1278 	}
1279 
1280 	/* LVDS/eDP FMT is set up by atom */
1281 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1282 		return;
1283 
1284 	/* not needed for analog */
1285 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1286 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1287 		return;
1288 
1289 	if (bpc == 0)
1290 		return;
1291 
1292 	switch (bpc) {
1293 	case 6:
1294 		if (dither == RADEON_FMT_DITHER_ENABLE)
1295 			/* XXX sort out optimal dither settings */
1296 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1297 				FMT_SPATIAL_DITHER_EN);
1298 		else
1299 			tmp |= FMT_TRUNCATE_EN;
1300 		break;
1301 	case 8:
1302 		if (dither == RADEON_FMT_DITHER_ENABLE)
1303 			/* XXX sort out optimal dither settings */
1304 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1305 				FMT_RGB_RANDOM_ENABLE |
1306 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1307 		else
1308 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1309 		break;
1310 	case 10:
1311 	default:
1312 		/* not needed */
1313 		break;
1314 	}
1315 
1316 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1317 }
1318 
1319 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1320 {
1321 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1322 		return true;
1323 	else
1324 		return false;
1325 }
1326 
1327 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1328 {
1329 	u32 pos1, pos2;
1330 
1331 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1332 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1333 
1334 	if (pos1 != pos2)
1335 		return true;
1336 	else
1337 		return false;
1338 }
1339 
1340 /**
1341  * dce4_wait_for_vblank - vblank wait asic callback.
1342  *
1343  * @rdev: radeon_device pointer
1344  * @crtc: crtc to wait for vblank on
1345  *
1346  * Wait for vblank on the requested crtc (evergreen+).
1347  */
1348 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1349 {
1350 	unsigned i = 0;
1351 
1352 	if (crtc >= rdev->num_crtc)
1353 		return;
1354 
1355 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1356 		return;
1357 
1358 	/* depending on when we hit vblank, we may be close to active; if so,
1359 	 * wait for another frame.
1360 	 */
1361 	while (dce4_is_in_vblank(rdev, crtc)) {
1362 		if (i++ % 100 == 0) {
1363 			if (!dce4_is_counter_moving(rdev, crtc))
1364 				break;
1365 		}
1366 	}
1367 
1368 	while (!dce4_is_in_vblank(rdev, crtc)) {
1369 		if (i++ % 100 == 0) {
1370 			if (!dce4_is_counter_moving(rdev, crtc))
1371 				break;
1372 		}
1373 	}
1374 }
1375 
1376 /**
1377  * evergreen_page_flip - pageflip callback.
1378  *
1379  * @rdev: radeon_device pointer
1380  * @crtc_id: crtc to cleanup pageflip on
1381  * @crtc_base: new address of the crtc (GPU MC address)
1382  *
1383  * Triggers the actual pageflip by updating the primary
1384  * surface base address (evergreen+).
1385  */
1386 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1387 			 bool async)
1388 {
1389 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1390 
1391 	/* update the scanout addresses */
1392 	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1393 	       async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1394 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1395 	       upper_32_bits(crtc_base));
1396 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1397 	       (u32)crtc_base);
1398 	/* post the write */
1399 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1400 }
1401 
1402 /**
1403  * evergreen_page_flip_pending - check if page flip is still pending
1404  *
1405  * @rdev: radeon_device pointer
1406  * @crtc_id: crtc to check
1407  *
1408  * Returns the current update pending status.
1409  */
1410 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1411 {
1412 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1413 
1414 	/* Return current update_pending status: */
1415 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1416 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1417 }
1418 
1419 /* get temperature in millidegrees */
1420 int evergreen_get_temp(struct radeon_device *rdev)
1421 {
1422 	u32 temp, toffset;
1423 	int actual_temp = 0;
1424 
1425 	if (rdev->family == CHIP_JUNIPER) {
1426 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1427 			TOFFSET_SHIFT;
1428 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1429 			TS0_ADC_DOUT_SHIFT;
1430 
1431 		if (toffset & 0x100)
1432 			actual_temp = temp / 2 - (0x200 - toffset);
1433 		else
1434 			actual_temp = temp / 2 + toffset;
1435 
1436 		actual_temp = actual_temp * 1000;
1437 
1438 	} else {
1439 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1440 			ASIC_T_SHIFT;
1441 
1442 		if (temp & 0x400)
1443 			actual_temp = -256;
1444 		else if (temp & 0x200)
1445 			actual_temp = 255;
1446 		else if (temp & 0x100) {
1447 			actual_temp = temp & 0x1ff;
1448 			actual_temp |= ~0x1ff;
1449 		} else
1450 			actual_temp = temp & 0xff;
1451 
1452 		actual_temp = (actual_temp * 1000) / 2;
1453 	}
1454 
1455 	return actual_temp;
1456 }
1457 
1458 int sumo_get_temp(struct radeon_device *rdev)
1459 {
1460 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1461 	int actual_temp = temp - 49;
1462 
1463 	return actual_temp * 1000;
1464 }
1465 
1466 /**
1467  * sumo_pm_init_profile - Initialize power profiles callback.
1468  *
1469  * @rdev: radeon_device pointer
1470  *
1471  * Initialize the power states used in profile mode
1472  * (sumo, trinity, SI).
1473  * Used for profile mode only.
1474  */
1475 void sumo_pm_init_profile(struct radeon_device *rdev)
1476 {
1477 	int idx;
1478 
1479 	/* default */
1480 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1481 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1482 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1483 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1484 
1485 	/* low,mid sh/mh */
1486 	if (rdev->flags & RADEON_IS_MOBILITY)
1487 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1488 	else
1489 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1490 
1491 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1492 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1493 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1494 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1495 
1496 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1497 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1498 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1499 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1500 
1501 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1502 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1503 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1504 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1505 
1506 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1507 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1508 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1509 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1510 
1511 	/* high sh/mh */
1512 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1513 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1514 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1515 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1516 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1517 		rdev->pm.power_state[idx].num_clock_modes - 1;
1518 
1519 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1520 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1521 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1522 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1523 		rdev->pm.power_state[idx].num_clock_modes - 1;
1524 }
1525 
1526 /**
1527  * btc_pm_init_profile - Initialize power profiles callback.
1528  *
1529  * @rdev: radeon_device pointer
1530  *
1531  * Initialize the power states used in profile mode
1532  * (BTC, cayman).
1533  * Used for profile mode only.
1534  */
1535 void btc_pm_init_profile(struct radeon_device *rdev)
1536 {
1537 	int idx;
1538 
1539 	/* default */
1540 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1541 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1542 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1543 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1544 	/* starting with BTC, there is one state that is used for both
1545 	 * MH and SH.  Difference is that we always use the high clock index for
1546 	 * mclk.
1547 	 */
1548 	if (rdev->flags & RADEON_IS_MOBILITY)
1549 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1550 	else
1551 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1552 	/* low sh */
1553 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1554 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1555 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1556 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1557 	/* mid sh */
1558 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1559 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1560 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1561 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1562 	/* high sh */
1563 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1564 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1565 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1566 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1567 	/* low mh */
1568 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1569 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1570 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1571 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1572 	/* mid mh */
1573 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1574 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1575 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1576 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1577 	/* high mh */
1578 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1579 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1580 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1581 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1582 }
1583 
1584 /**
1585  * evergreen_pm_misc - set additional pm hw parameters callback.
1586  *
1587  * @rdev: radeon_device pointer
1588  *
1589  * Set non-clock parameters associated with a power state
1590  * (voltage, etc.) (evergreen+).
1591  */
1592 void evergreen_pm_misc(struct radeon_device *rdev)
1593 {
1594 	int req_ps_idx = rdev->pm.requested_power_state_index;
1595 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1596 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1597 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1598 
1599 	if (voltage->type == VOLTAGE_SW) {
1600 		/* 0xff0x are flags rather then an actual voltage */
1601 		if ((voltage->voltage & 0xff00) == 0xff00)
1602 			return;
1603 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1604 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1605 			rdev->pm.current_vddc = voltage->voltage;
1606 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1607 		}
1608 
1609 		/* starting with BTC, there is one state that is used for both
1610 		 * MH and SH.  Difference is that we always use the high clock index for
1611 		 * mclk and vddci.
1612 		 */
1613 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1614 		    (rdev->family >= CHIP_BARTS) &&
1615 		    rdev->pm.active_crtc_count &&
1616 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1617 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1618 			voltage = &rdev->pm.power_state[req_ps_idx].
1619 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1620 
1621 		/* 0xff0x are flags rather then an actual voltage */
1622 		if ((voltage->vddci & 0xff00) == 0xff00)
1623 			return;
1624 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1625 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1626 			rdev->pm.current_vddci = voltage->vddci;
1627 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1628 		}
1629 	}
1630 }
1631 
1632 /**
1633  * evergreen_pm_prepare - pre-power state change callback.
1634  *
1635  * @rdev: radeon_device pointer
1636  *
1637  * Prepare for a power state change (evergreen+).
1638  */
1639 void evergreen_pm_prepare(struct radeon_device *rdev)
1640 {
1641 	struct drm_device *ddev = rdev->ddev;
1642 	struct drm_crtc *crtc;
1643 	struct radeon_crtc *radeon_crtc;
1644 	u32 tmp;
1645 
1646 	/* disable any active CRTCs */
1647 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1648 		radeon_crtc = to_radeon_crtc(crtc);
1649 		if (radeon_crtc->enabled) {
1650 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1651 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1652 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1653 		}
1654 	}
1655 }
1656 
1657 /**
1658  * evergreen_pm_finish - post-power state change callback.
1659  *
1660  * @rdev: radeon_device pointer
1661  *
1662  * Clean up after a power state change (evergreen+).
1663  */
1664 void evergreen_pm_finish(struct radeon_device *rdev)
1665 {
1666 	struct drm_device *ddev = rdev->ddev;
1667 	struct drm_crtc *crtc;
1668 	struct radeon_crtc *radeon_crtc;
1669 	u32 tmp;
1670 
1671 	/* enable any active CRTCs */
1672 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1673 		radeon_crtc = to_radeon_crtc(crtc);
1674 		if (radeon_crtc->enabled) {
1675 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1676 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1677 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1678 		}
1679 	}
1680 }
1681 
1682 /**
1683  * evergreen_hpd_sense - hpd sense callback.
1684  *
1685  * @rdev: radeon_device pointer
1686  * @hpd: hpd (hotplug detect) pin
1687  *
1688  * Checks if a digital monitor is connected (evergreen+).
1689  * Returns true if connected, false if not connected.
1690  */
1691 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1692 {
1693 	bool connected = false;
1694 
1695 	switch (hpd) {
1696 	case RADEON_HPD_1:
1697 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1698 			connected = true;
1699 		break;
1700 	case RADEON_HPD_2:
1701 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1702 			connected = true;
1703 		break;
1704 	case RADEON_HPD_3:
1705 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1706 			connected = true;
1707 		break;
1708 	case RADEON_HPD_4:
1709 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1710 			connected = true;
1711 		break;
1712 	case RADEON_HPD_5:
1713 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1714 			connected = true;
1715 		break;
1716 	case RADEON_HPD_6:
1717 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1718 			connected = true;
1719 		break;
1720 	default:
1721 		break;
1722 	}
1723 
1724 	return connected;
1725 }
1726 
1727 /**
1728  * evergreen_hpd_set_polarity - hpd set polarity callback.
1729  *
1730  * @rdev: radeon_device pointer
1731  * @hpd: hpd (hotplug detect) pin
1732  *
1733  * Set the polarity of the hpd pin (evergreen+).
1734  */
1735 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1736 				enum radeon_hpd_id hpd)
1737 {
1738 	u32 tmp;
1739 	bool connected = evergreen_hpd_sense(rdev, hpd);
1740 
1741 	switch (hpd) {
1742 	case RADEON_HPD_1:
1743 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1744 		if (connected)
1745 			tmp &= ~DC_HPDx_INT_POLARITY;
1746 		else
1747 			tmp |= DC_HPDx_INT_POLARITY;
1748 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1749 		break;
1750 	case RADEON_HPD_2:
1751 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1752 		if (connected)
1753 			tmp &= ~DC_HPDx_INT_POLARITY;
1754 		else
1755 			tmp |= DC_HPDx_INT_POLARITY;
1756 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1757 		break;
1758 	case RADEON_HPD_3:
1759 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1760 		if (connected)
1761 			tmp &= ~DC_HPDx_INT_POLARITY;
1762 		else
1763 			tmp |= DC_HPDx_INT_POLARITY;
1764 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1765 		break;
1766 	case RADEON_HPD_4:
1767 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1768 		if (connected)
1769 			tmp &= ~DC_HPDx_INT_POLARITY;
1770 		else
1771 			tmp |= DC_HPDx_INT_POLARITY;
1772 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1773 		break;
1774 	case RADEON_HPD_5:
1775 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1776 		if (connected)
1777 			tmp &= ~DC_HPDx_INT_POLARITY;
1778 		else
1779 			tmp |= DC_HPDx_INT_POLARITY;
1780 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1781 			break;
1782 	case RADEON_HPD_6:
1783 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1784 		if (connected)
1785 			tmp &= ~DC_HPDx_INT_POLARITY;
1786 		else
1787 			tmp |= DC_HPDx_INT_POLARITY;
1788 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1789 		break;
1790 	default:
1791 		break;
1792 	}
1793 }
1794 
1795 /**
1796  * evergreen_hpd_init - hpd setup callback.
1797  *
1798  * @rdev: radeon_device pointer
1799  *
1800  * Setup the hpd pins used by the card (evergreen+).
1801  * Enable the pin, set the polarity, and enable the hpd interrupts.
1802  */
1803 void evergreen_hpd_init(struct radeon_device *rdev)
1804 {
1805 	struct drm_device *dev = rdev->ddev;
1806 	struct drm_connector *connector;
1807 	unsigned enabled = 0;
1808 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1809 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1810 
1811 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1812 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1813 
1814 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1815 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1816 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1817 			 * aux dp channel on imac and help (but not completely fix)
1818 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1819 			 * also avoid interrupt storms during dpms.
1820 			 */
1821 			continue;
1822 		}
1823 		switch (radeon_connector->hpd.hpd) {
1824 		case RADEON_HPD_1:
1825 			WREG32(DC_HPD1_CONTROL, tmp);
1826 			break;
1827 		case RADEON_HPD_2:
1828 			WREG32(DC_HPD2_CONTROL, tmp);
1829 			break;
1830 		case RADEON_HPD_3:
1831 			WREG32(DC_HPD3_CONTROL, tmp);
1832 			break;
1833 		case RADEON_HPD_4:
1834 			WREG32(DC_HPD4_CONTROL, tmp);
1835 			break;
1836 		case RADEON_HPD_5:
1837 			WREG32(DC_HPD5_CONTROL, tmp);
1838 			break;
1839 		case RADEON_HPD_6:
1840 			WREG32(DC_HPD6_CONTROL, tmp);
1841 			break;
1842 		default:
1843 			break;
1844 		}
1845 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1846 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1847 			enabled |= 1 << radeon_connector->hpd.hpd;
1848 	}
1849 	radeon_irq_kms_enable_hpd(rdev, enabled);
1850 }
1851 
1852 /**
1853  * evergreen_hpd_fini - hpd tear down callback.
1854  *
1855  * @rdev: radeon_device pointer
1856  *
1857  * Tear down the hpd pins used by the card (evergreen+).
1858  * Disable the hpd interrupts.
1859  */
1860 void evergreen_hpd_fini(struct radeon_device *rdev)
1861 {
1862 	struct drm_device *dev = rdev->ddev;
1863 	struct drm_connector *connector;
1864 	unsigned disabled = 0;
1865 
1866 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1867 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1868 		switch (radeon_connector->hpd.hpd) {
1869 		case RADEON_HPD_1:
1870 			WREG32(DC_HPD1_CONTROL, 0);
1871 			break;
1872 		case RADEON_HPD_2:
1873 			WREG32(DC_HPD2_CONTROL, 0);
1874 			break;
1875 		case RADEON_HPD_3:
1876 			WREG32(DC_HPD3_CONTROL, 0);
1877 			break;
1878 		case RADEON_HPD_4:
1879 			WREG32(DC_HPD4_CONTROL, 0);
1880 			break;
1881 		case RADEON_HPD_5:
1882 			WREG32(DC_HPD5_CONTROL, 0);
1883 			break;
1884 		case RADEON_HPD_6:
1885 			WREG32(DC_HPD6_CONTROL, 0);
1886 			break;
1887 		default:
1888 			break;
1889 		}
1890 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1891 			disabled |= 1 << radeon_connector->hpd.hpd;
1892 	}
1893 	radeon_irq_kms_disable_hpd(rdev, disabled);
1894 }
1895 
1896 /* watermark setup */
1897 
1898 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1899 					struct radeon_crtc *radeon_crtc,
1900 					struct drm_display_mode *mode,
1901 					struct drm_display_mode *other_mode)
1902 {
1903 	u32 tmp, buffer_alloc, i;
1904 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1905 	/*
1906 	 * Line Buffer Setup
1907 	 * There are 3 line buffers, each one shared by 2 display controllers.
1908 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1909 	 * the display controllers.  The paritioning is done via one of four
1910 	 * preset allocations specified in bits 2:0:
1911 	 * first display controller
1912 	 *  0 - first half of lb (3840 * 2)
1913 	 *  1 - first 3/4 of lb (5760 * 2)
1914 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1915 	 *  3 - first 1/4 of lb (1920 * 2)
1916 	 * second display controller
1917 	 *  4 - second half of lb (3840 * 2)
1918 	 *  5 - second 3/4 of lb (5760 * 2)
1919 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1920 	 *  7 - last 1/4 of lb (1920 * 2)
1921 	 */
1922 	/* this can get tricky if we have two large displays on a paired group
1923 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1924 	 * non-linked crtcs for maximum line buffer allocation.
1925 	 */
1926 	if (radeon_crtc->base.enabled && mode) {
1927 		if (other_mode) {
1928 			tmp = 0; /* 1/2 */
1929 			buffer_alloc = 1;
1930 		} else {
1931 			tmp = 2; /* whole */
1932 			buffer_alloc = 2;
1933 		}
1934 	} else {
1935 		tmp = 0;
1936 		buffer_alloc = 0;
1937 	}
1938 
1939 	/* second controller of the pair uses second half of the lb */
1940 	if (radeon_crtc->crtc_id % 2)
1941 		tmp += 4;
1942 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1943 
1944 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1945 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1946 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1947 		for (i = 0; i < rdev->usec_timeout; i++) {
1948 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1949 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1950 				break;
1951 			udelay(1);
1952 		}
1953 	}
1954 
1955 	if (radeon_crtc->base.enabled && mode) {
1956 		switch (tmp) {
1957 		case 0:
1958 		case 4:
1959 		default:
1960 			if (ASIC_IS_DCE5(rdev))
1961 				return 4096 * 2;
1962 			else
1963 				return 3840 * 2;
1964 		case 1:
1965 		case 5:
1966 			if (ASIC_IS_DCE5(rdev))
1967 				return 6144 * 2;
1968 			else
1969 				return 5760 * 2;
1970 		case 2:
1971 		case 6:
1972 			if (ASIC_IS_DCE5(rdev))
1973 				return 8192 * 2;
1974 			else
1975 				return 7680 * 2;
1976 		case 3:
1977 		case 7:
1978 			if (ASIC_IS_DCE5(rdev))
1979 				return 2048 * 2;
1980 			else
1981 				return 1920 * 2;
1982 		}
1983 	}
1984 
1985 	/* controller not enabled, so no lb used */
1986 	return 0;
1987 }
1988 
1989 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1990 {
1991 	u32 tmp = RREG32(MC_SHARED_CHMAP);
1992 
1993 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1994 	case 0:
1995 	default:
1996 		return 1;
1997 	case 1:
1998 		return 2;
1999 	case 2:
2000 		return 4;
2001 	case 3:
2002 		return 8;
2003 	}
2004 }
2005 
2006 struct evergreen_wm_params {
2007 	u32 dram_channels; /* number of dram channels */
2008 	u32 yclk;          /* bandwidth per dram data pin in kHz */
2009 	u32 sclk;          /* engine clock in kHz */
2010 	u32 disp_clk;      /* display clock in kHz */
2011 	u32 src_width;     /* viewport width */
2012 	u32 active_time;   /* active display time in ns */
2013 	u32 blank_time;    /* blank time in ns */
2014 	bool interlaced;    /* mode is interlaced */
2015 	fixed20_12 vsc;    /* vertical scale ratio */
2016 	u32 num_heads;     /* number of active crtcs */
2017 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2018 	u32 lb_size;       /* line buffer allocated to pipe */
2019 	u32 vtaps;         /* vertical scaler taps */
2020 };
2021 
2022 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2023 {
2024 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2025 	fixed20_12 dram_efficiency; /* 0.7 */
2026 	fixed20_12 yclk, dram_channels, bandwidth;
2027 	fixed20_12 a;
2028 
2029 	a.full = dfixed_const(1000);
2030 	yclk.full = dfixed_const(wm->yclk);
2031 	yclk.full = dfixed_div(yclk, a);
2032 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2033 	a.full = dfixed_const(10);
2034 	dram_efficiency.full = dfixed_const(7);
2035 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2036 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2037 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2038 
2039 	return dfixed_trunc(bandwidth);
2040 }
2041 
2042 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2043 {
2044 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2045 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2046 	fixed20_12 yclk, dram_channels, bandwidth;
2047 	fixed20_12 a;
2048 
2049 	a.full = dfixed_const(1000);
2050 	yclk.full = dfixed_const(wm->yclk);
2051 	yclk.full = dfixed_div(yclk, a);
2052 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2053 	a.full = dfixed_const(10);
2054 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2055 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2056 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2057 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2058 
2059 	return dfixed_trunc(bandwidth);
2060 }
2061 
2062 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2063 {
2064 	/* Calculate the display Data return Bandwidth */
2065 	fixed20_12 return_efficiency; /* 0.8 */
2066 	fixed20_12 sclk, bandwidth;
2067 	fixed20_12 a;
2068 
2069 	a.full = dfixed_const(1000);
2070 	sclk.full = dfixed_const(wm->sclk);
2071 	sclk.full = dfixed_div(sclk, a);
2072 	a.full = dfixed_const(10);
2073 	return_efficiency.full = dfixed_const(8);
2074 	return_efficiency.full = dfixed_div(return_efficiency, a);
2075 	a.full = dfixed_const(32);
2076 	bandwidth.full = dfixed_mul(a, sclk);
2077 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2078 
2079 	return dfixed_trunc(bandwidth);
2080 }
2081 
2082 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2083 {
2084 	/* Calculate the DMIF Request Bandwidth */
2085 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2086 	fixed20_12 disp_clk, bandwidth;
2087 	fixed20_12 a;
2088 
2089 	a.full = dfixed_const(1000);
2090 	disp_clk.full = dfixed_const(wm->disp_clk);
2091 	disp_clk.full = dfixed_div(disp_clk, a);
2092 	a.full = dfixed_const(10);
2093 	disp_clk_request_efficiency.full = dfixed_const(8);
2094 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2095 	a.full = dfixed_const(32);
2096 	bandwidth.full = dfixed_mul(a, disp_clk);
2097 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2098 
2099 	return dfixed_trunc(bandwidth);
2100 }
2101 
2102 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2103 {
2104 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2105 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2106 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2107 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2108 
2109 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2110 }
2111 
2112 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2113 {
2114 	/* Calculate the display mode Average Bandwidth
2115 	 * DisplayMode should contain the source and destination dimensions,
2116 	 * timing, etc.
2117 	 */
2118 	fixed20_12 bpp;
2119 	fixed20_12 line_time;
2120 	fixed20_12 src_width;
2121 	fixed20_12 bandwidth;
2122 	fixed20_12 a;
2123 
2124 	a.full = dfixed_const(1000);
2125 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2126 	line_time.full = dfixed_div(line_time, a);
2127 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2128 	src_width.full = dfixed_const(wm->src_width);
2129 	bandwidth.full = dfixed_mul(src_width, bpp);
2130 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2131 	bandwidth.full = dfixed_div(bandwidth, line_time);
2132 
2133 	return dfixed_trunc(bandwidth);
2134 }
2135 
2136 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2137 {
2138 	/* First calcualte the latency in ns */
2139 	u32 mc_latency = 2000; /* 2000 ns. */
2140 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2141 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2142 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2143 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2144 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2145 		(wm->num_heads * cursor_line_pair_return_time);
2146 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2147 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2148 	fixed20_12 a, b, c;
2149 
2150 	if (wm->num_heads == 0)
2151 		return 0;
2152 
2153 	a.full = dfixed_const(2);
2154 	b.full = dfixed_const(1);
2155 	if ((wm->vsc.full > a.full) ||
2156 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2157 	    (wm->vtaps >= 5) ||
2158 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2159 		max_src_lines_per_dst_line = 4;
2160 	else
2161 		max_src_lines_per_dst_line = 2;
2162 
2163 	a.full = dfixed_const(available_bandwidth);
2164 	b.full = dfixed_const(wm->num_heads);
2165 	a.full = dfixed_div(a, b);
2166 
2167 	b.full = dfixed_const(1000);
2168 	c.full = dfixed_const(wm->disp_clk);
2169 	b.full = dfixed_div(c, b);
2170 	c.full = dfixed_const(wm->bytes_per_pixel);
2171 	b.full = dfixed_mul(b, c);
2172 
2173 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2174 
2175 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2176 	b.full = dfixed_const(1000);
2177 	c.full = dfixed_const(lb_fill_bw);
2178 	b.full = dfixed_div(c, b);
2179 	a.full = dfixed_div(a, b);
2180 	line_fill_time = dfixed_trunc(a);
2181 
2182 	if (line_fill_time < wm->active_time)
2183 		return latency;
2184 	else
2185 		return latency + (line_fill_time - wm->active_time);
2186 
2187 }
2188 
2189 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2190 {
2191 	if (evergreen_average_bandwidth(wm) <=
2192 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2193 		return true;
2194 	else
2195 		return false;
2196 };
2197 
2198 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2199 {
2200 	if (evergreen_average_bandwidth(wm) <=
2201 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2202 		return true;
2203 	else
2204 		return false;
2205 };
2206 
2207 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2208 {
2209 	u32 lb_partitions = wm->lb_size / wm->src_width;
2210 	u32 line_time = wm->active_time + wm->blank_time;
2211 	u32 latency_tolerant_lines;
2212 	u32 latency_hiding;
2213 	fixed20_12 a;
2214 
2215 	a.full = dfixed_const(1);
2216 	if (wm->vsc.full > a.full)
2217 		latency_tolerant_lines = 1;
2218 	else {
2219 		if (lb_partitions <= (wm->vtaps + 1))
2220 			latency_tolerant_lines = 1;
2221 		else
2222 			latency_tolerant_lines = 2;
2223 	}
2224 
2225 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2226 
2227 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2228 		return true;
2229 	else
2230 		return false;
2231 }
2232 
2233 static void evergreen_program_watermarks(struct radeon_device *rdev,
2234 					 struct radeon_crtc *radeon_crtc,
2235 					 u32 lb_size, u32 num_heads)
2236 {
2237 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2238 	struct evergreen_wm_params wm_low, wm_high;
2239 	u32 dram_channels;
2240 	u32 pixel_period;
2241 	u32 line_time = 0;
2242 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2243 	u32 priority_a_mark = 0, priority_b_mark = 0;
2244 	u32 priority_a_cnt = PRIORITY_OFF;
2245 	u32 priority_b_cnt = PRIORITY_OFF;
2246 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2247 	u32 tmp, arb_control3;
2248 	fixed20_12 a, b, c;
2249 
2250 	if (radeon_crtc->base.enabled && num_heads && mode) {
2251 		pixel_period = 1000000 / (u32)mode->clock;
2252 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2253 		priority_a_cnt = 0;
2254 		priority_b_cnt = 0;
2255 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2256 
2257 		/* watermark for high clocks */
2258 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2259 			wm_high.yclk =
2260 				radeon_dpm_get_mclk(rdev, false) * 10;
2261 			wm_high.sclk =
2262 				radeon_dpm_get_sclk(rdev, false) * 10;
2263 		} else {
2264 			wm_high.yclk = rdev->pm.current_mclk * 10;
2265 			wm_high.sclk = rdev->pm.current_sclk * 10;
2266 		}
2267 
2268 		wm_high.disp_clk = mode->clock;
2269 		wm_high.src_width = mode->crtc_hdisplay;
2270 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2271 		wm_high.blank_time = line_time - wm_high.active_time;
2272 		wm_high.interlaced = false;
2273 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2274 			wm_high.interlaced = true;
2275 		wm_high.vsc = radeon_crtc->vsc;
2276 		wm_high.vtaps = 1;
2277 		if (radeon_crtc->rmx_type != RMX_OFF)
2278 			wm_high.vtaps = 2;
2279 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2280 		wm_high.lb_size = lb_size;
2281 		wm_high.dram_channels = dram_channels;
2282 		wm_high.num_heads = num_heads;
2283 
2284 		/* watermark for low clocks */
2285 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2286 			wm_low.yclk =
2287 				radeon_dpm_get_mclk(rdev, true) * 10;
2288 			wm_low.sclk =
2289 				radeon_dpm_get_sclk(rdev, true) * 10;
2290 		} else {
2291 			wm_low.yclk = rdev->pm.current_mclk * 10;
2292 			wm_low.sclk = rdev->pm.current_sclk * 10;
2293 		}
2294 
2295 		wm_low.disp_clk = mode->clock;
2296 		wm_low.src_width = mode->crtc_hdisplay;
2297 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2298 		wm_low.blank_time = line_time - wm_low.active_time;
2299 		wm_low.interlaced = false;
2300 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2301 			wm_low.interlaced = true;
2302 		wm_low.vsc = radeon_crtc->vsc;
2303 		wm_low.vtaps = 1;
2304 		if (radeon_crtc->rmx_type != RMX_OFF)
2305 			wm_low.vtaps = 2;
2306 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2307 		wm_low.lb_size = lb_size;
2308 		wm_low.dram_channels = dram_channels;
2309 		wm_low.num_heads = num_heads;
2310 
2311 		/* set for high clocks */
2312 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2313 		/* set for low clocks */
2314 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2315 
2316 		/* possibly force display priority to high */
2317 		/* should really do this at mode validation time... */
2318 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2319 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2320 		    !evergreen_check_latency_hiding(&wm_high) ||
2321 		    (rdev->disp_priority == 2)) {
2322 			DRM_DEBUG_KMS("force priority a to high\n");
2323 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2324 		}
2325 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2326 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2327 		    !evergreen_check_latency_hiding(&wm_low) ||
2328 		    (rdev->disp_priority == 2)) {
2329 			DRM_DEBUG_KMS("force priority b to high\n");
2330 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2331 		}
2332 
2333 		a.full = dfixed_const(1000);
2334 		b.full = dfixed_const(mode->clock);
2335 		b.full = dfixed_div(b, a);
2336 		c.full = dfixed_const(latency_watermark_a);
2337 		c.full = dfixed_mul(c, b);
2338 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2339 		c.full = dfixed_div(c, a);
2340 		a.full = dfixed_const(16);
2341 		c.full = dfixed_div(c, a);
2342 		priority_a_mark = dfixed_trunc(c);
2343 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2344 
2345 		a.full = dfixed_const(1000);
2346 		b.full = dfixed_const(mode->clock);
2347 		b.full = dfixed_div(b, a);
2348 		c.full = dfixed_const(latency_watermark_b);
2349 		c.full = dfixed_mul(c, b);
2350 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2351 		c.full = dfixed_div(c, a);
2352 		a.full = dfixed_const(16);
2353 		c.full = dfixed_div(c, a);
2354 		priority_b_mark = dfixed_trunc(c);
2355 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2356 
2357 		/* Save number of lines the linebuffer leads before the scanout */
2358 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2359 	}
2360 
2361 	/* select wm A */
2362 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2363 	tmp = arb_control3;
2364 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2365 	tmp |= LATENCY_WATERMARK_MASK(1);
2366 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2367 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2368 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2369 		LATENCY_HIGH_WATERMARK(line_time)));
2370 	/* select wm B */
2371 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2372 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2373 	tmp |= LATENCY_WATERMARK_MASK(2);
2374 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2375 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2376 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2377 		LATENCY_HIGH_WATERMARK(line_time)));
2378 	/* restore original selection */
2379 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2380 
2381 	/* write the priority marks */
2382 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2383 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2384 
2385 	/* save values for DPM */
2386 	radeon_crtc->line_time = line_time;
2387 	radeon_crtc->wm_high = latency_watermark_a;
2388 	radeon_crtc->wm_low = latency_watermark_b;
2389 }
2390 
2391 /**
2392  * evergreen_bandwidth_update - update display watermarks callback.
2393  *
2394  * @rdev: radeon_device pointer
2395  *
2396  * Update the display watermarks based on the requested mode(s)
2397  * (evergreen+).
2398  */
2399 void evergreen_bandwidth_update(struct radeon_device *rdev)
2400 {
2401 	struct drm_display_mode *mode0 = NULL;
2402 	struct drm_display_mode *mode1 = NULL;
2403 	u32 num_heads = 0, lb_size;
2404 	int i;
2405 
2406 	if (!rdev->mode_info.mode_config_initialized)
2407 		return;
2408 
2409 	radeon_update_display_priority(rdev);
2410 
2411 	for (i = 0; i < rdev->num_crtc; i++) {
2412 		if (rdev->mode_info.crtcs[i]->base.enabled)
2413 			num_heads++;
2414 	}
2415 	for (i = 0; i < rdev->num_crtc; i += 2) {
2416 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2417 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2418 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2419 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2420 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2421 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2422 	}
2423 }
2424 
2425 /**
2426  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2427  *
2428  * @rdev: radeon_device pointer
2429  *
2430  * Wait for the MC (memory controller) to be idle.
2431  * (evergreen+).
2432  * Returns 0 if the MC is idle, -1 if not.
2433  */
2434 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2435 {
2436 	unsigned i;
2437 	u32 tmp;
2438 
2439 	for (i = 0; i < rdev->usec_timeout; i++) {
2440 		/* read MC_STATUS */
2441 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2442 		if (!tmp)
2443 			return 0;
2444 		udelay(1);
2445 	}
2446 	return -1;
2447 }
2448 
2449 /*
2450  * GART
2451  */
2452 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2453 {
2454 	unsigned i;
2455 	u32 tmp;
2456 
2457 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2458 
2459 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2460 	for (i = 0; i < rdev->usec_timeout; i++) {
2461 		/* read MC_STATUS */
2462 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2463 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2464 		if (tmp == 2) {
2465 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2466 			return;
2467 		}
2468 		if (tmp) {
2469 			return;
2470 		}
2471 		udelay(1);
2472 	}
2473 }
2474 
2475 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2476 {
2477 	u32 tmp;
2478 	int r;
2479 
2480 	if (rdev->gart.robj == NULL) {
2481 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2482 		return -EINVAL;
2483 	}
2484 	r = radeon_gart_table_vram_pin(rdev);
2485 	if (r)
2486 		return r;
2487 	/* Setup L2 cache */
2488 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2489 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2490 				EFFECTIVE_L2_QUEUE_SIZE(7));
2491 	WREG32(VM_L2_CNTL2, 0);
2492 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2493 	/* Setup TLB control */
2494 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2495 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2496 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2497 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2498 	if (rdev->flags & RADEON_IS_IGP) {
2499 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2500 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2501 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2502 	} else {
2503 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2504 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2505 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2506 		if ((rdev->family == CHIP_JUNIPER) ||
2507 		    (rdev->family == CHIP_CYPRESS) ||
2508 		    (rdev->family == CHIP_HEMLOCK) ||
2509 		    (rdev->family == CHIP_BARTS))
2510 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2511 	}
2512 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2513 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2514 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2515 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2516 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2517 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2518 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2519 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2520 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2521 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2522 			(u32)(rdev->dummy_page.addr >> 12));
2523 	WREG32(VM_CONTEXT1_CNTL, 0);
2524 
2525 	evergreen_pcie_gart_tlb_flush(rdev);
2526 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2527 		 (unsigned)(rdev->mc.gtt_size >> 20),
2528 		 (unsigned long long)rdev->gart.table_addr);
2529 	rdev->gart.ready = true;
2530 	return 0;
2531 }
2532 
2533 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2534 {
2535 	u32 tmp;
2536 
2537 	/* Disable all tables */
2538 	WREG32(VM_CONTEXT0_CNTL, 0);
2539 	WREG32(VM_CONTEXT1_CNTL, 0);
2540 
2541 	/* Setup L2 cache */
2542 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2543 				EFFECTIVE_L2_QUEUE_SIZE(7));
2544 	WREG32(VM_L2_CNTL2, 0);
2545 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2546 	/* Setup TLB control */
2547 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2548 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2549 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2550 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2551 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2552 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2553 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2554 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2555 	radeon_gart_table_vram_unpin(rdev);
2556 }
2557 
2558 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2559 {
2560 	evergreen_pcie_gart_disable(rdev);
2561 	radeon_gart_table_vram_free(rdev);
2562 	radeon_gart_fini(rdev);
2563 }
2564 
2565 
2566 static void evergreen_agp_enable(struct radeon_device *rdev)
2567 {
2568 	u32 tmp;
2569 
2570 	/* Setup L2 cache */
2571 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2572 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2573 				EFFECTIVE_L2_QUEUE_SIZE(7));
2574 	WREG32(VM_L2_CNTL2, 0);
2575 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2576 	/* Setup TLB control */
2577 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2578 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2579 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2580 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2581 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2582 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2583 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2584 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2585 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2586 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2587 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2588 	WREG32(VM_CONTEXT0_CNTL, 0);
2589 	WREG32(VM_CONTEXT1_CNTL, 0);
2590 }
2591 
2592 static const unsigned ni_dig_offsets[] =
2593 {
2594 	NI_DIG0_REGISTER_OFFSET,
2595 	NI_DIG1_REGISTER_OFFSET,
2596 	NI_DIG2_REGISTER_OFFSET,
2597 	NI_DIG3_REGISTER_OFFSET,
2598 	NI_DIG4_REGISTER_OFFSET,
2599 	NI_DIG5_REGISTER_OFFSET
2600 };
2601 
2602 static const unsigned ni_tx_offsets[] =
2603 {
2604 	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2605 	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2606 	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2607 	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2608 	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2609 	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2610 };
2611 
2612 static const unsigned evergreen_dp_offsets[] =
2613 {
2614 	EVERGREEN_DP0_REGISTER_OFFSET,
2615 	EVERGREEN_DP1_REGISTER_OFFSET,
2616 	EVERGREEN_DP2_REGISTER_OFFSET,
2617 	EVERGREEN_DP3_REGISTER_OFFSET,
2618 	EVERGREEN_DP4_REGISTER_OFFSET,
2619 	EVERGREEN_DP5_REGISTER_OFFSET
2620 };
2621 
2622 
2623 /*
2624  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2625  * We go from crtc to connector and it is not relible  since it
2626  * should be an opposite direction .If crtc is enable then
2627  * find the dig_fe which selects this crtc and insure that it enable.
2628  * if such dig_fe is found then find dig_be which selects found dig_be and
2629  * insure that it enable and in DP_SST mode.
2630  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2631  * from dp symbols clocks .
2632  */
2633 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2634 					       unsigned crtc_id, unsigned *ret_dig_fe)
2635 {
2636 	unsigned i;
2637 	unsigned dig_fe;
2638 	unsigned dig_be;
2639 	unsigned dig_en_be;
2640 	unsigned uniphy_pll;
2641 	unsigned digs_fe_selected;
2642 	unsigned dig_be_mode;
2643 	unsigned dig_fe_mask;
2644 	bool is_enabled = false;
2645 	bool found_crtc = false;
2646 
2647 	/* loop through all running dig_fe to find selected crtc */
2648 	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2649 		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2650 		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2651 		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2652 			/* found running pipe */
2653 			found_crtc = true;
2654 			dig_fe_mask = 1 << i;
2655 			dig_fe = i;
2656 			break;
2657 		}
2658 	}
2659 
2660 	if (found_crtc) {
2661 		/* loop through all running dig_be to find selected dig_fe */
2662 		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2663 			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2664 			/* if dig_fe_selected by dig_be? */
2665 			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2666 			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2667 			if (dig_fe_mask &  digs_fe_selected &&
2668 			    /* if dig_be in sst mode? */
2669 			    dig_be_mode == NI_DIG_BE_DPSST) {
2670 				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2671 						   ni_dig_offsets[i]);
2672 				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2673 						    ni_tx_offsets[i]);
2674 				/* dig_be enable and tx is running */
2675 				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2676 				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2677 				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2678 					is_enabled = true;
2679 					*ret_dig_fe = dig_fe;
2680 					break;
2681 				}
2682 			}
2683 		}
2684 	}
2685 
2686 	return is_enabled;
2687 }
2688 
2689 /*
2690  * Blank dig when in dp sst mode
2691  * Dig ignores crtc timing
2692  */
2693 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2694 				      unsigned dig_fe)
2695 {
2696 	unsigned stream_ctrl;
2697 	unsigned fifo_ctrl;
2698 	unsigned counter = 0;
2699 
2700 	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2701 		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2702 		return;
2703 	}
2704 
2705 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2706 			     evergreen_dp_offsets[dig_fe]);
2707 	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2708 		DRM_ERROR("dig %d , should be enable\n", dig_fe);
2709 		return;
2710 	}
2711 
2712 	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2713 	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2714 	       evergreen_dp_offsets[dig_fe], stream_ctrl);
2715 
2716 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2717 			     evergreen_dp_offsets[dig_fe]);
2718 	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2719 		msleep(1);
2720 		counter++;
2721 		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2722 				     evergreen_dp_offsets[dig_fe]);
2723 	}
2724 	if (counter >= 32 )
2725 		DRM_ERROR("counter exceeds %d\n", counter);
2726 
2727 	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2728 	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2729 	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2730 
2731 }
2732 
2733 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2734 {
2735 	u32 crtc_enabled, tmp, frame_count, blackout;
2736 	int i, j;
2737 	unsigned dig_fe;
2738 
2739 	if (!ASIC_IS_NODCE(rdev)) {
2740 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2741 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2742 
2743 		/* disable VGA render */
2744 		WREG32(VGA_RENDER_CONTROL, 0);
2745 	}
2746 	/* blank the display controllers */
2747 	for (i = 0; i < rdev->num_crtc; i++) {
2748 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2749 		if (crtc_enabled) {
2750 			save->crtc_enabled[i] = true;
2751 			if (ASIC_IS_DCE6(rdev)) {
2752 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2753 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2754 					radeon_wait_for_vblank(rdev, i);
2755 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2756 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2757 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2758 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2759 				}
2760 			} else {
2761 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2762 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2763 					radeon_wait_for_vblank(rdev, i);
2764 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2765 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2766 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2767 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2768 				}
2769 			}
2770 			/* wait for the next frame */
2771 			frame_count = radeon_get_vblank_counter(rdev, i);
2772 			for (j = 0; j < rdev->usec_timeout; j++) {
2773 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2774 					break;
2775 				udelay(1);
2776 			}
2777 			/*we should disable dig if it drives dp sst*/
2778 			/*but we are in radeon_device_init and the topology is unknown*/
2779 			/*and it is available after radeon_modeset_init*/
2780 			/*the following method radeon_atom_encoder_dpms_dig*/
2781 			/*does the job if we initialize it properly*/
2782 			/*for now we do it this manually*/
2783 			/**/
2784 			if (ASIC_IS_DCE5(rdev) &&
2785 			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2786 				evergreen_blank_dp_output(rdev, dig_fe);
2787 			/*we could remove 6 lines below*/
2788 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2789 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2790 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2791 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2792 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2793 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2794 			save->crtc_enabled[i] = false;
2795 			/* ***** */
2796 		} else {
2797 			save->crtc_enabled[i] = false;
2798 		}
2799 	}
2800 
2801 	radeon_mc_wait_for_idle(rdev);
2802 
2803 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2804 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2805 		/* Block CPU access */
2806 		WREG32(BIF_FB_EN, 0);
2807 		/* blackout the MC */
2808 		blackout &= ~BLACKOUT_MODE_MASK;
2809 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2810 	}
2811 	/* wait for the MC to settle */
2812 	udelay(100);
2813 
2814 	/* lock double buffered regs */
2815 	for (i = 0; i < rdev->num_crtc; i++) {
2816 		if (save->crtc_enabled[i]) {
2817 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2818 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2819 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2820 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2821 			}
2822 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2823 			if (!(tmp & 1)) {
2824 				tmp |= 1;
2825 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2826 			}
2827 		}
2828 	}
2829 }
2830 
2831 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2832 {
2833 	u32 tmp, frame_count;
2834 	int i, j;
2835 
2836 	/* update crtc base addresses */
2837 	for (i = 0; i < rdev->num_crtc; i++) {
2838 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2839 		       upper_32_bits(rdev->mc.vram_start));
2840 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2841 		       upper_32_bits(rdev->mc.vram_start));
2842 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2843 		       (u32)rdev->mc.vram_start);
2844 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2845 		       (u32)rdev->mc.vram_start);
2846 	}
2847 
2848 	if (!ASIC_IS_NODCE(rdev)) {
2849 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2850 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2851 	}
2852 
2853 	/* unlock regs and wait for update */
2854 	for (i = 0; i < rdev->num_crtc; i++) {
2855 		if (save->crtc_enabled[i]) {
2856 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2857 			if ((tmp & 0x7) != 0) {
2858 				tmp &= ~0x7;
2859 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2860 			}
2861 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2862 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2863 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2864 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2865 			}
2866 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2867 			if (tmp & 1) {
2868 				tmp &= ~1;
2869 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2870 			}
2871 			for (j = 0; j < rdev->usec_timeout; j++) {
2872 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2873 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2874 					break;
2875 				udelay(1);
2876 			}
2877 		}
2878 	}
2879 
2880 	/* unblackout the MC */
2881 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2882 	tmp &= ~BLACKOUT_MODE_MASK;
2883 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2884 	/* allow CPU access */
2885 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2886 
2887 	for (i = 0; i < rdev->num_crtc; i++) {
2888 		if (save->crtc_enabled[i]) {
2889 			if (ASIC_IS_DCE6(rdev)) {
2890 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2891 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2892 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2893 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2894 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2895 			} else {
2896 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2897 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2898 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2899 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2900 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2901 			}
2902 			/* wait for the next frame */
2903 			frame_count = radeon_get_vblank_counter(rdev, i);
2904 			for (j = 0; j < rdev->usec_timeout; j++) {
2905 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2906 					break;
2907 				udelay(1);
2908 			}
2909 		}
2910 	}
2911 	if (!ASIC_IS_NODCE(rdev)) {
2912 		/* Unlock vga access */
2913 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2914 		mdelay(1);
2915 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2916 	}
2917 }
2918 
2919 void evergreen_mc_program(struct radeon_device *rdev)
2920 {
2921 	struct evergreen_mc_save save;
2922 	u32 tmp;
2923 	int i, j;
2924 
2925 	/* Initialize HDP */
2926 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2927 		WREG32((0x2c14 + j), 0x00000000);
2928 		WREG32((0x2c18 + j), 0x00000000);
2929 		WREG32((0x2c1c + j), 0x00000000);
2930 		WREG32((0x2c20 + j), 0x00000000);
2931 		WREG32((0x2c24 + j), 0x00000000);
2932 	}
2933 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2934 
2935 	evergreen_mc_stop(rdev, &save);
2936 	if (evergreen_mc_wait_for_idle(rdev)) {
2937 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2938 	}
2939 	/* Lockout access through VGA aperture*/
2940 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2941 	/* Update configuration */
2942 	if (rdev->flags & RADEON_IS_AGP) {
2943 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2944 			/* VRAM before AGP */
2945 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2946 				rdev->mc.vram_start >> 12);
2947 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2948 				rdev->mc.gtt_end >> 12);
2949 		} else {
2950 			/* VRAM after AGP */
2951 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2952 				rdev->mc.gtt_start >> 12);
2953 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2954 				rdev->mc.vram_end >> 12);
2955 		}
2956 	} else {
2957 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2958 			rdev->mc.vram_start >> 12);
2959 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2960 			rdev->mc.vram_end >> 12);
2961 	}
2962 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2963 	/* llano/ontario only */
2964 	if ((rdev->family == CHIP_PALM) ||
2965 	    (rdev->family == CHIP_SUMO) ||
2966 	    (rdev->family == CHIP_SUMO2)) {
2967 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2968 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2969 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2970 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2971 	}
2972 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2973 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2974 	WREG32(MC_VM_FB_LOCATION, tmp);
2975 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2976 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2977 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2978 	if (rdev->flags & RADEON_IS_AGP) {
2979 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2980 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2981 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2982 	} else {
2983 		WREG32(MC_VM_AGP_BASE, 0);
2984 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2985 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2986 	}
2987 	if (evergreen_mc_wait_for_idle(rdev)) {
2988 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2989 	}
2990 	evergreen_mc_resume(rdev, &save);
2991 	/* we need to own VRAM, so turn off the VGA renderer here
2992 	 * to stop it overwriting our objects */
2993 	rv515_vga_render_disable(rdev);
2994 }
2995 
2996 /*
2997  * CP.
2998  */
2999 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3000 {
3001 	struct radeon_ring *ring = &rdev->ring[ib->ring];
3002 	u32 next_rptr;
3003 
3004 	/* set to DX10/11 mode */
3005 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3006 	radeon_ring_write(ring, 1);
3007 
3008 	if (ring->rptr_save_reg) {
3009 		next_rptr = ring->wptr + 3 + 4;
3010 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3011 		radeon_ring_write(ring, ((ring->rptr_save_reg -
3012 					  PACKET3_SET_CONFIG_REG_START) >> 2));
3013 		radeon_ring_write(ring, next_rptr);
3014 	} else if (rdev->wb.enabled) {
3015 		next_rptr = ring->wptr + 5 + 4;
3016 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3017 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3018 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3019 		radeon_ring_write(ring, next_rptr);
3020 		radeon_ring_write(ring, 0);
3021 	}
3022 
3023 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3024 	radeon_ring_write(ring,
3025 #ifdef __BIG_ENDIAN
3026 			  (2 << 0) |
3027 #endif
3028 			  (ib->gpu_addr & 0xFFFFFFFC));
3029 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3030 	radeon_ring_write(ring, ib->length_dw);
3031 }
3032 
3033 
3034 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3035 {
3036 	const __be32 *fw_data;
3037 	int i;
3038 
3039 	if (!rdev->me_fw || !rdev->pfp_fw)
3040 		return -EINVAL;
3041 
3042 	r700_cp_stop(rdev);
3043 	WREG32(CP_RB_CNTL,
3044 #ifdef __BIG_ENDIAN
3045 	       BUF_SWAP_32BIT |
3046 #endif
3047 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3048 
3049 	fw_data = (const __be32 *)rdev->pfp_fw->data;
3050 	WREG32(CP_PFP_UCODE_ADDR, 0);
3051 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3052 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3053 	WREG32(CP_PFP_UCODE_ADDR, 0);
3054 
3055 	fw_data = (const __be32 *)rdev->me_fw->data;
3056 	WREG32(CP_ME_RAM_WADDR, 0);
3057 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3058 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3059 
3060 	WREG32(CP_PFP_UCODE_ADDR, 0);
3061 	WREG32(CP_ME_RAM_WADDR, 0);
3062 	WREG32(CP_ME_RAM_RADDR, 0);
3063 	return 0;
3064 }
3065 
3066 static int evergreen_cp_start(struct radeon_device *rdev)
3067 {
3068 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3069 	int r, i;
3070 	uint32_t cp_me;
3071 
3072 	r = radeon_ring_lock(rdev, ring, 7);
3073 	if (r) {
3074 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3075 		return r;
3076 	}
3077 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3078 	radeon_ring_write(ring, 0x1);
3079 	radeon_ring_write(ring, 0x0);
3080 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3081 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3082 	radeon_ring_write(ring, 0);
3083 	radeon_ring_write(ring, 0);
3084 	radeon_ring_unlock_commit(rdev, ring, false);
3085 
3086 	cp_me = 0xff;
3087 	WREG32(CP_ME_CNTL, cp_me);
3088 
3089 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3090 	if (r) {
3091 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3092 		return r;
3093 	}
3094 
3095 	/* setup clear context state */
3096 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3097 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3098 
3099 	for (i = 0; i < evergreen_default_size; i++)
3100 		radeon_ring_write(ring, evergreen_default_state[i]);
3101 
3102 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3103 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3104 
3105 	/* set clear context state */
3106 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3107 	radeon_ring_write(ring, 0);
3108 
3109 	/* SQ_VTX_BASE_VTX_LOC */
3110 	radeon_ring_write(ring, 0xc0026f00);
3111 	radeon_ring_write(ring, 0x00000000);
3112 	radeon_ring_write(ring, 0x00000000);
3113 	radeon_ring_write(ring, 0x00000000);
3114 
3115 	/* Clear consts */
3116 	radeon_ring_write(ring, 0xc0036f00);
3117 	radeon_ring_write(ring, 0x00000bc4);
3118 	radeon_ring_write(ring, 0xffffffff);
3119 	radeon_ring_write(ring, 0xffffffff);
3120 	radeon_ring_write(ring, 0xffffffff);
3121 
3122 	radeon_ring_write(ring, 0xc0026900);
3123 	radeon_ring_write(ring, 0x00000316);
3124 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3125 	radeon_ring_write(ring, 0x00000010); /*  */
3126 
3127 	radeon_ring_unlock_commit(rdev, ring, false);
3128 
3129 	return 0;
3130 }
3131 
3132 static int evergreen_cp_resume(struct radeon_device *rdev)
3133 {
3134 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3135 	u32 tmp;
3136 	u32 rb_bufsz;
3137 	int r;
3138 
3139 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3140 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3141 				 SOFT_RESET_PA |
3142 				 SOFT_RESET_SH |
3143 				 SOFT_RESET_VGT |
3144 				 SOFT_RESET_SPI |
3145 				 SOFT_RESET_SX));
3146 	RREG32(GRBM_SOFT_RESET);
3147 	mdelay(15);
3148 	WREG32(GRBM_SOFT_RESET, 0);
3149 	RREG32(GRBM_SOFT_RESET);
3150 
3151 	/* Set ring buffer size */
3152 	rb_bufsz = order_base_2(ring->ring_size / 8);
3153 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3154 #ifdef __BIG_ENDIAN
3155 	tmp |= BUF_SWAP_32BIT;
3156 #endif
3157 	WREG32(CP_RB_CNTL, tmp);
3158 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3159 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3160 
3161 	/* Set the write pointer delay */
3162 	WREG32(CP_RB_WPTR_DELAY, 0);
3163 
3164 	/* Initialize the ring buffer's read and write pointers */
3165 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3166 	WREG32(CP_RB_RPTR_WR, 0);
3167 	ring->wptr = 0;
3168 	WREG32(CP_RB_WPTR, ring->wptr);
3169 
3170 	/* set the wb address whether it's enabled or not */
3171 	WREG32(CP_RB_RPTR_ADDR,
3172 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3173 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3174 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3175 
3176 	if (rdev->wb.enabled)
3177 		WREG32(SCRATCH_UMSK, 0xff);
3178 	else {
3179 		tmp |= RB_NO_UPDATE;
3180 		WREG32(SCRATCH_UMSK, 0);
3181 	}
3182 
3183 	mdelay(1);
3184 	WREG32(CP_RB_CNTL, tmp);
3185 
3186 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3187 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3188 
3189 	evergreen_cp_start(rdev);
3190 	ring->ready = true;
3191 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3192 	if (r) {
3193 		ring->ready = false;
3194 		return r;
3195 	}
3196 	return 0;
3197 }
3198 
3199 /*
3200  * Core functions
3201  */
3202 static void evergreen_gpu_init(struct radeon_device *rdev)
3203 {
3204 	u32 gb_addr_config;
3205 	u32 mc_shared_chmap, mc_arb_ramcfg;
3206 	u32 sx_debug_1;
3207 	u32 smx_dc_ctl0;
3208 	u32 sq_config;
3209 	u32 sq_lds_resource_mgmt;
3210 	u32 sq_gpr_resource_mgmt_1;
3211 	u32 sq_gpr_resource_mgmt_2;
3212 	u32 sq_gpr_resource_mgmt_3;
3213 	u32 sq_thread_resource_mgmt;
3214 	u32 sq_thread_resource_mgmt_2;
3215 	u32 sq_stack_resource_mgmt_1;
3216 	u32 sq_stack_resource_mgmt_2;
3217 	u32 sq_stack_resource_mgmt_3;
3218 	u32 vgt_cache_invalidation;
3219 	u32 hdp_host_path_cntl, tmp;
3220 	u32 disabled_rb_mask;
3221 	int i, j, ps_thread_count;
3222 
3223 	switch (rdev->family) {
3224 	case CHIP_CYPRESS:
3225 	case CHIP_HEMLOCK:
3226 		rdev->config.evergreen.num_ses = 2;
3227 		rdev->config.evergreen.max_pipes = 4;
3228 		rdev->config.evergreen.max_tile_pipes = 8;
3229 		rdev->config.evergreen.max_simds = 10;
3230 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3231 		rdev->config.evergreen.max_gprs = 256;
3232 		rdev->config.evergreen.max_threads = 248;
3233 		rdev->config.evergreen.max_gs_threads = 32;
3234 		rdev->config.evergreen.max_stack_entries = 512;
3235 		rdev->config.evergreen.sx_num_of_sets = 4;
3236 		rdev->config.evergreen.sx_max_export_size = 256;
3237 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3238 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3239 		rdev->config.evergreen.max_hw_contexts = 8;
3240 		rdev->config.evergreen.sq_num_cf_insts = 2;
3241 
3242 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3243 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3244 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3245 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3246 		break;
3247 	case CHIP_JUNIPER:
3248 		rdev->config.evergreen.num_ses = 1;
3249 		rdev->config.evergreen.max_pipes = 4;
3250 		rdev->config.evergreen.max_tile_pipes = 4;
3251 		rdev->config.evergreen.max_simds = 10;
3252 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3253 		rdev->config.evergreen.max_gprs = 256;
3254 		rdev->config.evergreen.max_threads = 248;
3255 		rdev->config.evergreen.max_gs_threads = 32;
3256 		rdev->config.evergreen.max_stack_entries = 512;
3257 		rdev->config.evergreen.sx_num_of_sets = 4;
3258 		rdev->config.evergreen.sx_max_export_size = 256;
3259 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3260 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3261 		rdev->config.evergreen.max_hw_contexts = 8;
3262 		rdev->config.evergreen.sq_num_cf_insts = 2;
3263 
3264 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3265 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3266 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3267 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3268 		break;
3269 	case CHIP_REDWOOD:
3270 		rdev->config.evergreen.num_ses = 1;
3271 		rdev->config.evergreen.max_pipes = 4;
3272 		rdev->config.evergreen.max_tile_pipes = 4;
3273 		rdev->config.evergreen.max_simds = 5;
3274 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3275 		rdev->config.evergreen.max_gprs = 256;
3276 		rdev->config.evergreen.max_threads = 248;
3277 		rdev->config.evergreen.max_gs_threads = 32;
3278 		rdev->config.evergreen.max_stack_entries = 256;
3279 		rdev->config.evergreen.sx_num_of_sets = 4;
3280 		rdev->config.evergreen.sx_max_export_size = 256;
3281 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3282 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3283 		rdev->config.evergreen.max_hw_contexts = 8;
3284 		rdev->config.evergreen.sq_num_cf_insts = 2;
3285 
3286 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3287 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3288 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3289 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3290 		break;
3291 	case CHIP_CEDAR:
3292 	default:
3293 		rdev->config.evergreen.num_ses = 1;
3294 		rdev->config.evergreen.max_pipes = 2;
3295 		rdev->config.evergreen.max_tile_pipes = 2;
3296 		rdev->config.evergreen.max_simds = 2;
3297 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3298 		rdev->config.evergreen.max_gprs = 256;
3299 		rdev->config.evergreen.max_threads = 192;
3300 		rdev->config.evergreen.max_gs_threads = 16;
3301 		rdev->config.evergreen.max_stack_entries = 256;
3302 		rdev->config.evergreen.sx_num_of_sets = 4;
3303 		rdev->config.evergreen.sx_max_export_size = 128;
3304 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3305 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3306 		rdev->config.evergreen.max_hw_contexts = 4;
3307 		rdev->config.evergreen.sq_num_cf_insts = 1;
3308 
3309 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3310 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3311 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3312 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3313 		break;
3314 	case CHIP_PALM:
3315 		rdev->config.evergreen.num_ses = 1;
3316 		rdev->config.evergreen.max_pipes = 2;
3317 		rdev->config.evergreen.max_tile_pipes = 2;
3318 		rdev->config.evergreen.max_simds = 2;
3319 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3320 		rdev->config.evergreen.max_gprs = 256;
3321 		rdev->config.evergreen.max_threads = 192;
3322 		rdev->config.evergreen.max_gs_threads = 16;
3323 		rdev->config.evergreen.max_stack_entries = 256;
3324 		rdev->config.evergreen.sx_num_of_sets = 4;
3325 		rdev->config.evergreen.sx_max_export_size = 128;
3326 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3327 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3328 		rdev->config.evergreen.max_hw_contexts = 4;
3329 		rdev->config.evergreen.sq_num_cf_insts = 1;
3330 
3331 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3332 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3333 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3334 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3335 		break;
3336 	case CHIP_SUMO:
3337 		rdev->config.evergreen.num_ses = 1;
3338 		rdev->config.evergreen.max_pipes = 4;
3339 		rdev->config.evergreen.max_tile_pipes = 4;
3340 		if (rdev->pdev->device == 0x9648)
3341 			rdev->config.evergreen.max_simds = 3;
3342 		else if ((rdev->pdev->device == 0x9647) ||
3343 			 (rdev->pdev->device == 0x964a))
3344 			rdev->config.evergreen.max_simds = 4;
3345 		else
3346 			rdev->config.evergreen.max_simds = 5;
3347 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3348 		rdev->config.evergreen.max_gprs = 256;
3349 		rdev->config.evergreen.max_threads = 248;
3350 		rdev->config.evergreen.max_gs_threads = 32;
3351 		rdev->config.evergreen.max_stack_entries = 256;
3352 		rdev->config.evergreen.sx_num_of_sets = 4;
3353 		rdev->config.evergreen.sx_max_export_size = 256;
3354 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3355 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3356 		rdev->config.evergreen.max_hw_contexts = 8;
3357 		rdev->config.evergreen.sq_num_cf_insts = 2;
3358 
3359 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3360 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3361 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3362 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3363 		break;
3364 	case CHIP_SUMO2:
3365 		rdev->config.evergreen.num_ses = 1;
3366 		rdev->config.evergreen.max_pipes = 4;
3367 		rdev->config.evergreen.max_tile_pipes = 4;
3368 		rdev->config.evergreen.max_simds = 2;
3369 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3370 		rdev->config.evergreen.max_gprs = 256;
3371 		rdev->config.evergreen.max_threads = 248;
3372 		rdev->config.evergreen.max_gs_threads = 32;
3373 		rdev->config.evergreen.max_stack_entries = 512;
3374 		rdev->config.evergreen.sx_num_of_sets = 4;
3375 		rdev->config.evergreen.sx_max_export_size = 256;
3376 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3377 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3378 		rdev->config.evergreen.max_hw_contexts = 4;
3379 		rdev->config.evergreen.sq_num_cf_insts = 2;
3380 
3381 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3382 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3383 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3384 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3385 		break;
3386 	case CHIP_BARTS:
3387 		rdev->config.evergreen.num_ses = 2;
3388 		rdev->config.evergreen.max_pipes = 4;
3389 		rdev->config.evergreen.max_tile_pipes = 8;
3390 		rdev->config.evergreen.max_simds = 7;
3391 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3392 		rdev->config.evergreen.max_gprs = 256;
3393 		rdev->config.evergreen.max_threads = 248;
3394 		rdev->config.evergreen.max_gs_threads = 32;
3395 		rdev->config.evergreen.max_stack_entries = 512;
3396 		rdev->config.evergreen.sx_num_of_sets = 4;
3397 		rdev->config.evergreen.sx_max_export_size = 256;
3398 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3399 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3400 		rdev->config.evergreen.max_hw_contexts = 8;
3401 		rdev->config.evergreen.sq_num_cf_insts = 2;
3402 
3403 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3404 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3405 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3406 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3407 		break;
3408 	case CHIP_TURKS:
3409 		rdev->config.evergreen.num_ses = 1;
3410 		rdev->config.evergreen.max_pipes = 4;
3411 		rdev->config.evergreen.max_tile_pipes = 4;
3412 		rdev->config.evergreen.max_simds = 6;
3413 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3414 		rdev->config.evergreen.max_gprs = 256;
3415 		rdev->config.evergreen.max_threads = 248;
3416 		rdev->config.evergreen.max_gs_threads = 32;
3417 		rdev->config.evergreen.max_stack_entries = 256;
3418 		rdev->config.evergreen.sx_num_of_sets = 4;
3419 		rdev->config.evergreen.sx_max_export_size = 256;
3420 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3421 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3422 		rdev->config.evergreen.max_hw_contexts = 8;
3423 		rdev->config.evergreen.sq_num_cf_insts = 2;
3424 
3425 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3426 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3427 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3428 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3429 		break;
3430 	case CHIP_CAICOS:
3431 		rdev->config.evergreen.num_ses = 1;
3432 		rdev->config.evergreen.max_pipes = 2;
3433 		rdev->config.evergreen.max_tile_pipes = 2;
3434 		rdev->config.evergreen.max_simds = 2;
3435 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3436 		rdev->config.evergreen.max_gprs = 256;
3437 		rdev->config.evergreen.max_threads = 192;
3438 		rdev->config.evergreen.max_gs_threads = 16;
3439 		rdev->config.evergreen.max_stack_entries = 256;
3440 		rdev->config.evergreen.sx_num_of_sets = 4;
3441 		rdev->config.evergreen.sx_max_export_size = 128;
3442 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3443 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3444 		rdev->config.evergreen.max_hw_contexts = 4;
3445 		rdev->config.evergreen.sq_num_cf_insts = 1;
3446 
3447 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3448 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3449 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3450 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3451 		break;
3452 	}
3453 
3454 	/* Initialize HDP */
3455 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3456 		WREG32((0x2c14 + j), 0x00000000);
3457 		WREG32((0x2c18 + j), 0x00000000);
3458 		WREG32((0x2c1c + j), 0x00000000);
3459 		WREG32((0x2c20 + j), 0x00000000);
3460 		WREG32((0x2c24 + j), 0x00000000);
3461 	}
3462 
3463 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3464 	WREG32(SRBM_INT_CNTL, 0x1);
3465 	WREG32(SRBM_INT_ACK, 0x1);
3466 
3467 	evergreen_fix_pci_max_read_req_size(rdev);
3468 
3469 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3470 	if ((rdev->family == CHIP_PALM) ||
3471 	    (rdev->family == CHIP_SUMO) ||
3472 	    (rdev->family == CHIP_SUMO2))
3473 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3474 	else
3475 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3476 
3477 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3478 	 * not have bank info, so create a custom tiling dword.
3479 	 * bits 3:0   num_pipes
3480 	 * bits 7:4   num_banks
3481 	 * bits 11:8  group_size
3482 	 * bits 15:12 row_size
3483 	 */
3484 	rdev->config.evergreen.tile_config = 0;
3485 	switch (rdev->config.evergreen.max_tile_pipes) {
3486 	case 1:
3487 	default:
3488 		rdev->config.evergreen.tile_config |= (0 << 0);
3489 		break;
3490 	case 2:
3491 		rdev->config.evergreen.tile_config |= (1 << 0);
3492 		break;
3493 	case 4:
3494 		rdev->config.evergreen.tile_config |= (2 << 0);
3495 		break;
3496 	case 8:
3497 		rdev->config.evergreen.tile_config |= (3 << 0);
3498 		break;
3499 	}
3500 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3501 	if (rdev->flags & RADEON_IS_IGP)
3502 		rdev->config.evergreen.tile_config |= 1 << 4;
3503 	else {
3504 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3505 		case 0: /* four banks */
3506 			rdev->config.evergreen.tile_config |= 0 << 4;
3507 			break;
3508 		case 1: /* eight banks */
3509 			rdev->config.evergreen.tile_config |= 1 << 4;
3510 			break;
3511 		case 2: /* sixteen banks */
3512 		default:
3513 			rdev->config.evergreen.tile_config |= 2 << 4;
3514 			break;
3515 		}
3516 	}
3517 	rdev->config.evergreen.tile_config |= 0 << 8;
3518 	rdev->config.evergreen.tile_config |=
3519 		((gb_addr_config & 0x30000000) >> 28) << 12;
3520 
3521 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3522 		u32 efuse_straps_4;
3523 		u32 efuse_straps_3;
3524 
3525 		efuse_straps_4 = RREG32_RCU(0x204);
3526 		efuse_straps_3 = RREG32_RCU(0x203);
3527 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3528 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3529 	} else {
3530 		tmp = 0;
3531 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3532 			u32 rb_disable_bitmap;
3533 
3534 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3535 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3536 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3537 			tmp <<= 4;
3538 			tmp |= rb_disable_bitmap;
3539 		}
3540 	}
3541 	/* enabled rb are just the one not disabled :) */
3542 	disabled_rb_mask = tmp;
3543 	tmp = 0;
3544 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3545 		tmp |= (1 << i);
3546 	/* if all the backends are disabled, fix it up here */
3547 	if ((disabled_rb_mask & tmp) == tmp) {
3548 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3549 			disabled_rb_mask &= ~(1 << i);
3550 	}
3551 
3552 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3553 		u32 simd_disable_bitmap;
3554 
3555 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3556 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3557 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3558 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3559 		tmp <<= 16;
3560 		tmp |= simd_disable_bitmap;
3561 	}
3562 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3563 
3564 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3565 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3566 
3567 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3568 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3569 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3570 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3571 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3572 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3573 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3574 
3575 	if ((rdev->config.evergreen.max_backends == 1) &&
3576 	    (rdev->flags & RADEON_IS_IGP)) {
3577 		if ((disabled_rb_mask & 3) == 1) {
3578 			/* RB0 disabled, RB1 enabled */
3579 			tmp = 0x11111111;
3580 		} else {
3581 			/* RB1 disabled, RB0 enabled */
3582 			tmp = 0x00000000;
3583 		}
3584 	} else {
3585 		tmp = gb_addr_config & NUM_PIPES_MASK;
3586 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3587 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3588 	}
3589 	WREG32(GB_BACKEND_MAP, tmp);
3590 
3591 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3592 	WREG32(CGTS_TCC_DISABLE, 0);
3593 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3594 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3595 
3596 	/* set HW defaults for 3D engine */
3597 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3598 				     ROQ_IB2_START(0x2b)));
3599 
3600 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3601 
3602 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3603 			     SYNC_GRADIENT |
3604 			     SYNC_WALKER |
3605 			     SYNC_ALIGNER));
3606 
3607 	sx_debug_1 = RREG32(SX_DEBUG_1);
3608 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3609 	WREG32(SX_DEBUG_1, sx_debug_1);
3610 
3611 
3612 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3613 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3614 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3615 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3616 
3617 	if (rdev->family <= CHIP_SUMO2)
3618 		WREG32(SMX_SAR_CTL0, 0x00010000);
3619 
3620 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3621 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3622 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3623 
3624 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3625 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3626 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3627 
3628 	WREG32(VGT_NUM_INSTANCES, 1);
3629 	WREG32(SPI_CONFIG_CNTL, 0);
3630 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3631 	WREG32(CP_PERFMON_CNTL, 0);
3632 
3633 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3634 				  FETCH_FIFO_HIWATER(0x4) |
3635 				  DONE_FIFO_HIWATER(0xe0) |
3636 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3637 
3638 	sq_config = RREG32(SQ_CONFIG);
3639 	sq_config &= ~(PS_PRIO(3) |
3640 		       VS_PRIO(3) |
3641 		       GS_PRIO(3) |
3642 		       ES_PRIO(3));
3643 	sq_config |= (VC_ENABLE |
3644 		      EXPORT_SRC_C |
3645 		      PS_PRIO(0) |
3646 		      VS_PRIO(1) |
3647 		      GS_PRIO(2) |
3648 		      ES_PRIO(3));
3649 
3650 	switch (rdev->family) {
3651 	case CHIP_CEDAR:
3652 	case CHIP_PALM:
3653 	case CHIP_SUMO:
3654 	case CHIP_SUMO2:
3655 	case CHIP_CAICOS:
3656 		/* no vertex cache */
3657 		sq_config &= ~VC_ENABLE;
3658 		break;
3659 	default:
3660 		break;
3661 	}
3662 
3663 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3664 
3665 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3666 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3667 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3668 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3669 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3670 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3671 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3672 
3673 	switch (rdev->family) {
3674 	case CHIP_CEDAR:
3675 	case CHIP_PALM:
3676 	case CHIP_SUMO:
3677 	case CHIP_SUMO2:
3678 		ps_thread_count = 96;
3679 		break;
3680 	default:
3681 		ps_thread_count = 128;
3682 		break;
3683 	}
3684 
3685 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3686 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3687 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3688 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3689 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3690 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3691 
3692 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3693 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3694 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3695 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3696 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3697 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3698 
3699 	WREG32(SQ_CONFIG, sq_config);
3700 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3701 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3702 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3703 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3704 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3705 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3706 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3707 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3708 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3709 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3710 
3711 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3712 					  FORCE_EOV_MAX_REZ_CNT(255)));
3713 
3714 	switch (rdev->family) {
3715 	case CHIP_CEDAR:
3716 	case CHIP_PALM:
3717 	case CHIP_SUMO:
3718 	case CHIP_SUMO2:
3719 	case CHIP_CAICOS:
3720 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3721 		break;
3722 	default:
3723 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3724 		break;
3725 	}
3726 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3727 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3728 
3729 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3730 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3731 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3732 
3733 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3734 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3735 
3736 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3737 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3738 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3739 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3740 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3741 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3742 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3743 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3744 
3745 	/* clear render buffer base addresses */
3746 	WREG32(CB_COLOR0_BASE, 0);
3747 	WREG32(CB_COLOR1_BASE, 0);
3748 	WREG32(CB_COLOR2_BASE, 0);
3749 	WREG32(CB_COLOR3_BASE, 0);
3750 	WREG32(CB_COLOR4_BASE, 0);
3751 	WREG32(CB_COLOR5_BASE, 0);
3752 	WREG32(CB_COLOR6_BASE, 0);
3753 	WREG32(CB_COLOR7_BASE, 0);
3754 	WREG32(CB_COLOR8_BASE, 0);
3755 	WREG32(CB_COLOR9_BASE, 0);
3756 	WREG32(CB_COLOR10_BASE, 0);
3757 	WREG32(CB_COLOR11_BASE, 0);
3758 
3759 	/* set the shader const cache sizes to 0 */
3760 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3761 		WREG32(i, 0);
3762 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3763 		WREG32(i, 0);
3764 
3765 	tmp = RREG32(HDP_MISC_CNTL);
3766 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3767 	WREG32(HDP_MISC_CNTL, tmp);
3768 
3769 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3770 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3771 
3772 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3773 
3774 	udelay(50);
3775 
3776 }
3777 
3778 int evergreen_mc_init(struct radeon_device *rdev)
3779 {
3780 	u32 tmp;
3781 	int chansize, numchan;
3782 
3783 	/* Get VRAM informations */
3784 	rdev->mc.vram_is_ddr = true;
3785 	if ((rdev->family == CHIP_PALM) ||
3786 	    (rdev->family == CHIP_SUMO) ||
3787 	    (rdev->family == CHIP_SUMO2))
3788 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3789 	else
3790 		tmp = RREG32(MC_ARB_RAMCFG);
3791 	if (tmp & CHANSIZE_OVERRIDE) {
3792 		chansize = 16;
3793 	} else if (tmp & CHANSIZE_MASK) {
3794 		chansize = 64;
3795 	} else {
3796 		chansize = 32;
3797 	}
3798 	tmp = RREG32(MC_SHARED_CHMAP);
3799 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3800 	case 0:
3801 	default:
3802 		numchan = 1;
3803 		break;
3804 	case 1:
3805 		numchan = 2;
3806 		break;
3807 	case 2:
3808 		numchan = 4;
3809 		break;
3810 	case 3:
3811 		numchan = 8;
3812 		break;
3813 	}
3814 	rdev->mc.vram_width = numchan * chansize;
3815 	/* Could aper size report 0 ? */
3816 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3817 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3818 	/* Setup GPU memory space */
3819 	if ((rdev->family == CHIP_PALM) ||
3820 	    (rdev->family == CHIP_SUMO) ||
3821 	    (rdev->family == CHIP_SUMO2)) {
3822 		/* size in bytes on fusion */
3823 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3824 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3825 	} else {
3826 		/* size in MB on evergreen/cayman/tn */
3827 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3828 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3829 	}
3830 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3831 	r700_vram_gtt_location(rdev, &rdev->mc);
3832 	radeon_update_bandwidth_info(rdev);
3833 
3834 	return 0;
3835 }
3836 
3837 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3838 {
3839 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3840 		RREG32(GRBM_STATUS));
3841 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3842 		RREG32(GRBM_STATUS_SE0));
3843 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3844 		RREG32(GRBM_STATUS_SE1));
3845 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3846 		RREG32(SRBM_STATUS));
3847 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3848 		RREG32(SRBM_STATUS2));
3849 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3850 		RREG32(CP_STALLED_STAT1));
3851 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3852 		RREG32(CP_STALLED_STAT2));
3853 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3854 		RREG32(CP_BUSY_STAT));
3855 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3856 		RREG32(CP_STAT));
3857 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3858 		RREG32(DMA_STATUS_REG));
3859 	if (rdev->family >= CHIP_CAYMAN) {
3860 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3861 			 RREG32(DMA_STATUS_REG + 0x800));
3862 	}
3863 }
3864 
3865 bool evergreen_is_display_hung(struct radeon_device *rdev)
3866 {
3867 	u32 crtc_hung = 0;
3868 	u32 crtc_status[6];
3869 	u32 i, j, tmp;
3870 
3871 	for (i = 0; i < rdev->num_crtc; i++) {
3872 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3873 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3874 			crtc_hung |= (1 << i);
3875 		}
3876 	}
3877 
3878 	for (j = 0; j < 10; j++) {
3879 		for (i = 0; i < rdev->num_crtc; i++) {
3880 			if (crtc_hung & (1 << i)) {
3881 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3882 				if (tmp != crtc_status[i])
3883 					crtc_hung &= ~(1 << i);
3884 			}
3885 		}
3886 		if (crtc_hung == 0)
3887 			return false;
3888 		udelay(100);
3889 	}
3890 
3891 	return true;
3892 }
3893 
3894 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3895 {
3896 	u32 reset_mask = 0;
3897 	u32 tmp;
3898 
3899 	/* GRBM_STATUS */
3900 	tmp = RREG32(GRBM_STATUS);
3901 	if (tmp & (PA_BUSY | SC_BUSY |
3902 		   SH_BUSY | SX_BUSY |
3903 		   TA_BUSY | VGT_BUSY |
3904 		   DB_BUSY | CB_BUSY |
3905 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3906 		reset_mask |= RADEON_RESET_GFX;
3907 
3908 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3909 		   CP_BUSY | CP_COHERENCY_BUSY))
3910 		reset_mask |= RADEON_RESET_CP;
3911 
3912 	if (tmp & GRBM_EE_BUSY)
3913 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3914 
3915 	/* DMA_STATUS_REG */
3916 	tmp = RREG32(DMA_STATUS_REG);
3917 	if (!(tmp & DMA_IDLE))
3918 		reset_mask |= RADEON_RESET_DMA;
3919 
3920 	/* SRBM_STATUS2 */
3921 	tmp = RREG32(SRBM_STATUS2);
3922 	if (tmp & DMA_BUSY)
3923 		reset_mask |= RADEON_RESET_DMA;
3924 
3925 	/* SRBM_STATUS */
3926 	tmp = RREG32(SRBM_STATUS);
3927 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3928 		reset_mask |= RADEON_RESET_RLC;
3929 
3930 	if (tmp & IH_BUSY)
3931 		reset_mask |= RADEON_RESET_IH;
3932 
3933 	if (tmp & SEM_BUSY)
3934 		reset_mask |= RADEON_RESET_SEM;
3935 
3936 	if (tmp & GRBM_RQ_PENDING)
3937 		reset_mask |= RADEON_RESET_GRBM;
3938 
3939 	if (tmp & VMC_BUSY)
3940 		reset_mask |= RADEON_RESET_VMC;
3941 
3942 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3943 		   MCC_BUSY | MCD_BUSY))
3944 		reset_mask |= RADEON_RESET_MC;
3945 
3946 	if (evergreen_is_display_hung(rdev))
3947 		reset_mask |= RADEON_RESET_DISPLAY;
3948 
3949 	/* VM_L2_STATUS */
3950 	tmp = RREG32(VM_L2_STATUS);
3951 	if (tmp & L2_BUSY)
3952 		reset_mask |= RADEON_RESET_VMC;
3953 
3954 	/* Skip MC reset as it's mostly likely not hung, just busy */
3955 	if (reset_mask & RADEON_RESET_MC) {
3956 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3957 		reset_mask &= ~RADEON_RESET_MC;
3958 	}
3959 
3960 	return reset_mask;
3961 }
3962 
3963 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3964 {
3965 	struct evergreen_mc_save save;
3966 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3967 	u32 tmp;
3968 
3969 	if (reset_mask == 0)
3970 		return;
3971 
3972 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3973 
3974 	evergreen_print_gpu_status_regs(rdev);
3975 
3976 	/* Disable CP parsing/prefetching */
3977 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3978 
3979 	if (reset_mask & RADEON_RESET_DMA) {
3980 		/* Disable DMA */
3981 		tmp = RREG32(DMA_RB_CNTL);
3982 		tmp &= ~DMA_RB_ENABLE;
3983 		WREG32(DMA_RB_CNTL, tmp);
3984 	}
3985 
3986 	udelay(50);
3987 
3988 	evergreen_mc_stop(rdev, &save);
3989 	if (evergreen_mc_wait_for_idle(rdev)) {
3990 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3991 	}
3992 
3993 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3994 		grbm_soft_reset |= SOFT_RESET_DB |
3995 			SOFT_RESET_CB |
3996 			SOFT_RESET_PA |
3997 			SOFT_RESET_SC |
3998 			SOFT_RESET_SPI |
3999 			SOFT_RESET_SX |
4000 			SOFT_RESET_SH |
4001 			SOFT_RESET_TC |
4002 			SOFT_RESET_TA |
4003 			SOFT_RESET_VC |
4004 			SOFT_RESET_VGT;
4005 	}
4006 
4007 	if (reset_mask & RADEON_RESET_CP) {
4008 		grbm_soft_reset |= SOFT_RESET_CP |
4009 			SOFT_RESET_VGT;
4010 
4011 		srbm_soft_reset |= SOFT_RESET_GRBM;
4012 	}
4013 
4014 	if (reset_mask & RADEON_RESET_DMA)
4015 		srbm_soft_reset |= SOFT_RESET_DMA;
4016 
4017 	if (reset_mask & RADEON_RESET_DISPLAY)
4018 		srbm_soft_reset |= SOFT_RESET_DC;
4019 
4020 	if (reset_mask & RADEON_RESET_RLC)
4021 		srbm_soft_reset |= SOFT_RESET_RLC;
4022 
4023 	if (reset_mask & RADEON_RESET_SEM)
4024 		srbm_soft_reset |= SOFT_RESET_SEM;
4025 
4026 	if (reset_mask & RADEON_RESET_IH)
4027 		srbm_soft_reset |= SOFT_RESET_IH;
4028 
4029 	if (reset_mask & RADEON_RESET_GRBM)
4030 		srbm_soft_reset |= SOFT_RESET_GRBM;
4031 
4032 	if (reset_mask & RADEON_RESET_VMC)
4033 		srbm_soft_reset |= SOFT_RESET_VMC;
4034 
4035 	if (!(rdev->flags & RADEON_IS_IGP)) {
4036 		if (reset_mask & RADEON_RESET_MC)
4037 			srbm_soft_reset |= SOFT_RESET_MC;
4038 	}
4039 
4040 	if (grbm_soft_reset) {
4041 		tmp = RREG32(GRBM_SOFT_RESET);
4042 		tmp |= grbm_soft_reset;
4043 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4044 		WREG32(GRBM_SOFT_RESET, tmp);
4045 		tmp = RREG32(GRBM_SOFT_RESET);
4046 
4047 		udelay(50);
4048 
4049 		tmp &= ~grbm_soft_reset;
4050 		WREG32(GRBM_SOFT_RESET, tmp);
4051 		tmp = RREG32(GRBM_SOFT_RESET);
4052 	}
4053 
4054 	if (srbm_soft_reset) {
4055 		tmp = RREG32(SRBM_SOFT_RESET);
4056 		tmp |= srbm_soft_reset;
4057 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4058 		WREG32(SRBM_SOFT_RESET, tmp);
4059 		tmp = RREG32(SRBM_SOFT_RESET);
4060 
4061 		udelay(50);
4062 
4063 		tmp &= ~srbm_soft_reset;
4064 		WREG32(SRBM_SOFT_RESET, tmp);
4065 		tmp = RREG32(SRBM_SOFT_RESET);
4066 	}
4067 
4068 	/* Wait a little for things to settle down */
4069 	udelay(50);
4070 
4071 	evergreen_mc_resume(rdev, &save);
4072 	udelay(50);
4073 
4074 	evergreen_print_gpu_status_regs(rdev);
4075 }
4076 
4077 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4078 {
4079 	struct evergreen_mc_save save;
4080 	u32 tmp, i;
4081 
4082 	dev_info(rdev->dev, "GPU pci config reset\n");
4083 
4084 	/* disable dpm? */
4085 
4086 	/* Disable CP parsing/prefetching */
4087 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4088 	udelay(50);
4089 	/* Disable DMA */
4090 	tmp = RREG32(DMA_RB_CNTL);
4091 	tmp &= ~DMA_RB_ENABLE;
4092 	WREG32(DMA_RB_CNTL, tmp);
4093 	/* XXX other engines? */
4094 
4095 	/* halt the rlc */
4096 	r600_rlc_stop(rdev);
4097 
4098 	udelay(50);
4099 
4100 	/* set mclk/sclk to bypass */
4101 	rv770_set_clk_bypass_mode(rdev);
4102 	/* disable BM */
4103 	pci_clear_master(rdev->pdev);
4104 	/* disable mem access */
4105 	evergreen_mc_stop(rdev, &save);
4106 	if (evergreen_mc_wait_for_idle(rdev)) {
4107 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4108 	}
4109 	/* reset */
4110 	radeon_pci_config_reset(rdev);
4111 	/* wait for asic to come out of reset */
4112 	for (i = 0; i < rdev->usec_timeout; i++) {
4113 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4114 			break;
4115 		udelay(1);
4116 	}
4117 }
4118 
4119 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4120 {
4121 	u32 reset_mask;
4122 
4123 	if (hard) {
4124 		evergreen_gpu_pci_config_reset(rdev);
4125 		return 0;
4126 	}
4127 
4128 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4129 
4130 	if (reset_mask)
4131 		r600_set_bios_scratch_engine_hung(rdev, true);
4132 
4133 	/* try soft reset */
4134 	evergreen_gpu_soft_reset(rdev, reset_mask);
4135 
4136 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4137 
4138 	/* try pci config reset */
4139 	if (reset_mask && radeon_hard_reset)
4140 		evergreen_gpu_pci_config_reset(rdev);
4141 
4142 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4143 
4144 	if (!reset_mask)
4145 		r600_set_bios_scratch_engine_hung(rdev, false);
4146 
4147 	return 0;
4148 }
4149 
4150 /**
4151  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4152  *
4153  * @rdev: radeon_device pointer
4154  * @ring: radeon_ring structure holding ring information
4155  *
4156  * Check if the GFX engine is locked up.
4157  * Returns true if the engine appears to be locked up, false if not.
4158  */
4159 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4160 {
4161 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4162 
4163 	if (!(reset_mask & (RADEON_RESET_GFX |
4164 			    RADEON_RESET_COMPUTE |
4165 			    RADEON_RESET_CP))) {
4166 		radeon_ring_lockup_update(rdev, ring);
4167 		return false;
4168 	}
4169 	return radeon_ring_test_lockup(rdev, ring);
4170 }
4171 
4172 /*
4173  * RLC
4174  */
4175 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4176 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4177 
4178 void sumo_rlc_fini(struct radeon_device *rdev)
4179 {
4180 	int r;
4181 
4182 	/* save restore block */
4183 	if (rdev->rlc.save_restore_obj) {
4184 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4185 		if (unlikely(r != 0))
4186 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4187 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4188 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4189 
4190 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4191 		rdev->rlc.save_restore_obj = NULL;
4192 	}
4193 
4194 	/* clear state block */
4195 	if (rdev->rlc.clear_state_obj) {
4196 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4197 		if (unlikely(r != 0))
4198 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4199 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4200 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4201 
4202 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4203 		rdev->rlc.clear_state_obj = NULL;
4204 	}
4205 
4206 	/* clear state block */
4207 	if (rdev->rlc.cp_table_obj) {
4208 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4209 		if (unlikely(r != 0))
4210 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4211 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4212 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4213 
4214 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4215 		rdev->rlc.cp_table_obj = NULL;
4216 	}
4217 }
4218 
4219 #define CP_ME_TABLE_SIZE    96
4220 
4221 #pragma GCC diagnostic push
4222 #pragma GCC diagnostic ignored "-Wcast-qual"
4223 int sumo_rlc_init(struct radeon_device *rdev)
4224 {
4225 	const u32 *src_ptr;
4226 	volatile u32 *dst_ptr;
4227 	u32 dws, data, i, j, k, reg_num;
4228 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4229 	u64 reg_list_mc_addr;
4230 	const struct cs_section_def *cs_data;
4231 	int r;
4232 
4233 	src_ptr = rdev->rlc.reg_list;
4234 	dws = rdev->rlc.reg_list_size;
4235 	if (rdev->family >= CHIP_BONAIRE) {
4236 		dws += (5 * 16) + 48 + 48 + 64;
4237 	}
4238 	cs_data = rdev->rlc.cs_data;
4239 
4240 	if (src_ptr) {
4241 		/* save restore block */
4242 		if (rdev->rlc.save_restore_obj == NULL) {
4243 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4244 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4245 					     NULL, &rdev->rlc.save_restore_obj);
4246 			if (r) {
4247 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4248 				return r;
4249 			}
4250 		}
4251 
4252 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4253 		if (unlikely(r != 0)) {
4254 			sumo_rlc_fini(rdev);
4255 			return r;
4256 		}
4257 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4258 				  &rdev->rlc.save_restore_gpu_addr);
4259 		if (r) {
4260 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4261 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4262 			sumo_rlc_fini(rdev);
4263 			return r;
4264 		}
4265 
4266 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4267 		if (r) {
4268 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4269 			sumo_rlc_fini(rdev);
4270 			return r;
4271 		}
4272 		/* write the sr buffer */
4273 		dst_ptr = rdev->rlc.sr_ptr;
4274 		if (rdev->family >= CHIP_TAHITI) {
4275 			/* SI */
4276 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4277 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4278 		} else {
4279 			/* ON/LN/TN */
4280 			/* format:
4281 			 * dw0: (reg2 << 16) | reg1
4282 			 * dw1: reg1 save space
4283 			 * dw2: reg2 save space
4284 			 */
4285 			for (i = 0; i < dws; i++) {
4286 				data = src_ptr[i] >> 2;
4287 				i++;
4288 				if (i < dws)
4289 					data |= (src_ptr[i] >> 2) << 16;
4290 				j = (((i - 1) * 3) / 2);
4291 				dst_ptr[j] = cpu_to_le32(data);
4292 			}
4293 			j = ((i * 3) / 2);
4294 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4295 		}
4296 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4297 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4298 	}
4299 
4300 	if (cs_data) {
4301 		/* clear state block */
4302 		if (rdev->family >= CHIP_BONAIRE) {
4303 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4304 		} else if (rdev->family >= CHIP_TAHITI) {
4305 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4306 			dws = rdev->rlc.clear_state_size + (256 / 4);
4307 		} else {
4308 			reg_list_num = 0;
4309 			dws = 0;
4310 			for (i = 0; cs_data[i].section != NULL; i++) {
4311 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4312 					reg_list_num++;
4313 					dws += cs_data[i].section[j].reg_count;
4314 				}
4315 			}
4316 			reg_list_blk_index = (3 * reg_list_num + 2);
4317 			dws += reg_list_blk_index;
4318 			rdev->rlc.clear_state_size = dws;
4319 		}
4320 
4321 		if (rdev->rlc.clear_state_obj == NULL) {
4322 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4323 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4324 					     NULL, &rdev->rlc.clear_state_obj);
4325 			if (r) {
4326 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4327 				sumo_rlc_fini(rdev);
4328 				return r;
4329 			}
4330 		}
4331 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4332 		if (unlikely(r != 0)) {
4333 			sumo_rlc_fini(rdev);
4334 			return r;
4335 		}
4336 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4337 				  &rdev->rlc.clear_state_gpu_addr);
4338 		if (r) {
4339 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4340 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4341 			sumo_rlc_fini(rdev);
4342 			return r;
4343 		}
4344 
4345 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4346 		if (r) {
4347 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4348 			sumo_rlc_fini(rdev);
4349 			return r;
4350 		}
4351 		/* set up the cs buffer */
4352 		dst_ptr = rdev->rlc.cs_ptr;
4353 		if (rdev->family >= CHIP_BONAIRE) {
4354 			cik_get_csb_buffer(rdev, dst_ptr);
4355 		} else if (rdev->family >= CHIP_TAHITI) {
4356 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4357 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4358 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4359 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4360 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4361 		} else {
4362 			reg_list_hdr_blk_index = 0;
4363 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4364 			data = upper_32_bits(reg_list_mc_addr);
4365 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4366 			reg_list_hdr_blk_index++;
4367 			for (i = 0; cs_data[i].section != NULL; i++) {
4368 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4369 					reg_num = cs_data[i].section[j].reg_count;
4370 					data = reg_list_mc_addr & 0xffffffff;
4371 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4372 					reg_list_hdr_blk_index++;
4373 
4374 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4375 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4376 					reg_list_hdr_blk_index++;
4377 
4378 					data = 0x08000000 | (reg_num * 4);
4379 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4380 					reg_list_hdr_blk_index++;
4381 
4382 					for (k = 0; k < reg_num; k++) {
4383 						data = cs_data[i].section[j].extent[k];
4384 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4385 					}
4386 					reg_list_mc_addr += reg_num * 4;
4387 					reg_list_blk_index += reg_num;
4388 				}
4389 			}
4390 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4391 		}
4392 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4393 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4394 	}
4395 
4396 	if (rdev->rlc.cp_table_size) {
4397 		if (rdev->rlc.cp_table_obj == NULL) {
4398 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4399 					     PAGE_SIZE, true,
4400 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4401 					     NULL, &rdev->rlc.cp_table_obj);
4402 			if (r) {
4403 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4404 				sumo_rlc_fini(rdev);
4405 				return r;
4406 			}
4407 		}
4408 
4409 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4410 		if (unlikely(r != 0)) {
4411 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4412 			sumo_rlc_fini(rdev);
4413 			return r;
4414 		}
4415 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4416 				  &rdev->rlc.cp_table_gpu_addr);
4417 		if (r) {
4418 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4419 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4420 			sumo_rlc_fini(rdev);
4421 			return r;
4422 		}
4423 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4424 		if (r) {
4425 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4426 			sumo_rlc_fini(rdev);
4427 			return r;
4428 		}
4429 
4430 		cik_init_cp_pg_table(rdev);
4431 
4432 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4433 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4434 
4435 	}
4436 
4437 	return 0;
4438 }
4439 #pragma GCC diagnostic pop
4440 
4441 static void evergreen_rlc_start(struct radeon_device *rdev)
4442 {
4443 	u32 mask = RLC_ENABLE;
4444 
4445 	if (rdev->flags & RADEON_IS_IGP) {
4446 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4447 	}
4448 
4449 	WREG32(RLC_CNTL, mask);
4450 }
4451 
4452 int evergreen_rlc_resume(struct radeon_device *rdev)
4453 {
4454 	u32 i;
4455 	const __be32 *fw_data;
4456 
4457 	if (!rdev->rlc_fw)
4458 		return -EINVAL;
4459 
4460 	r600_rlc_stop(rdev);
4461 
4462 	WREG32(RLC_HB_CNTL, 0);
4463 
4464 	if (rdev->flags & RADEON_IS_IGP) {
4465 		if (rdev->family == CHIP_ARUBA) {
4466 			u32 always_on_bitmap =
4467 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4468 			/* find out the number of active simds */
4469 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4470 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4471 			tmp = hweight32(~tmp);
4472 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4473 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4474 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4475 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4476 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4477 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4478 			}
4479 		} else {
4480 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4481 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4482 		}
4483 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4484 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4485 	} else {
4486 		WREG32(RLC_HB_BASE, 0);
4487 		WREG32(RLC_HB_RPTR, 0);
4488 		WREG32(RLC_HB_WPTR, 0);
4489 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4490 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4491 	}
4492 	WREG32(RLC_MC_CNTL, 0);
4493 	WREG32(RLC_UCODE_CNTL, 0);
4494 
4495 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4496 	if (rdev->family >= CHIP_ARUBA) {
4497 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4498 			WREG32(RLC_UCODE_ADDR, i);
4499 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4500 		}
4501 	} else if (rdev->family >= CHIP_CAYMAN) {
4502 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4503 			WREG32(RLC_UCODE_ADDR, i);
4504 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4505 		}
4506 	} else {
4507 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4508 			WREG32(RLC_UCODE_ADDR, i);
4509 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4510 		}
4511 	}
4512 	WREG32(RLC_UCODE_ADDR, 0);
4513 
4514 	evergreen_rlc_start(rdev);
4515 
4516 	return 0;
4517 }
4518 
4519 /* Interrupts */
4520 
4521 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4522 {
4523 	if (crtc >= rdev->num_crtc)
4524 		return 0;
4525 	else
4526 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4527 }
4528 
4529 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4530 {
4531 	u32 tmp;
4532 
4533 	if (rdev->family >= CHIP_CAYMAN) {
4534 		cayman_cp_int_cntl_setup(rdev, 0,
4535 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4536 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4537 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4538 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4539 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4540 	} else
4541 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4542 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4543 	WREG32(DMA_CNTL, tmp);
4544 	WREG32(GRBM_INT_CNTL, 0);
4545 	WREG32(SRBM_INT_CNTL, 0);
4546 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4547 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4548 	if (rdev->num_crtc >= 4) {
4549 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4550 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4551 	}
4552 	if (rdev->num_crtc >= 6) {
4553 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4554 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4555 	}
4556 
4557 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4558 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4559 	if (rdev->num_crtc >= 4) {
4560 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4561 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4562 	}
4563 	if (rdev->num_crtc >= 6) {
4564 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4565 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4566 	}
4567 
4568 	/* only one DAC on DCE5 */
4569 	if (!ASIC_IS_DCE5(rdev))
4570 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4571 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4572 
4573 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4574 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4575 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4576 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4577 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4578 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4579 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4580 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4581 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4582 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4583 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4584 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4585 
4586 }
4587 
4588 int evergreen_irq_set(struct radeon_device *rdev)
4589 {
4590 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4591 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4592 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4593 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4594 	u32 grbm_int_cntl = 0;
4595 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4596 	u32 dma_cntl, dma_cntl1 = 0;
4597 	u32 thermal_int = 0;
4598 
4599 	if (!rdev->irq.installed) {
4600 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4601 		return -EINVAL;
4602 	}
4603 	/* don't enable anything if the ih is disabled */
4604 	if (!rdev->ih.enabled) {
4605 		r600_disable_interrupts(rdev);
4606 		/* force the active interrupt state to all disabled */
4607 		evergreen_disable_interrupt_state(rdev);
4608 		return 0;
4609 	}
4610 
4611 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4612 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4613 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4614 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4615 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4616 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4617 	if (rdev->family == CHIP_ARUBA)
4618 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4619 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4620 	else
4621 		thermal_int = RREG32(CG_THERMAL_INT) &
4622 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4623 
4624 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4625 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4626 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4627 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4628 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4629 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4630 
4631 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4632 
4633 	if (rdev->family >= CHIP_CAYMAN) {
4634 		/* enable CP interrupts on all rings */
4635 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4636 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4637 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4638 		}
4639 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4640 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4641 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4642 		}
4643 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4644 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4645 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4646 		}
4647 	} else {
4648 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4649 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4650 			cp_int_cntl |= RB_INT_ENABLE;
4651 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4652 		}
4653 	}
4654 
4655 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4656 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4657 		dma_cntl |= TRAP_ENABLE;
4658 	}
4659 
4660 	if (rdev->family >= CHIP_CAYMAN) {
4661 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4662 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4663 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4664 			dma_cntl1 |= TRAP_ENABLE;
4665 		}
4666 	}
4667 
4668 	if (rdev->irq.dpm_thermal) {
4669 		DRM_DEBUG("dpm thermal\n");
4670 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4671 	}
4672 
4673 	if (rdev->irq.crtc_vblank_int[0] ||
4674 	    atomic_read(&rdev->irq.pflip[0])) {
4675 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4676 		crtc1 |= VBLANK_INT_MASK;
4677 	}
4678 	if (rdev->irq.crtc_vblank_int[1] ||
4679 	    atomic_read(&rdev->irq.pflip[1])) {
4680 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4681 		crtc2 |= VBLANK_INT_MASK;
4682 	}
4683 	if (rdev->irq.crtc_vblank_int[2] ||
4684 	    atomic_read(&rdev->irq.pflip[2])) {
4685 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4686 		crtc3 |= VBLANK_INT_MASK;
4687 	}
4688 	if (rdev->irq.crtc_vblank_int[3] ||
4689 	    atomic_read(&rdev->irq.pflip[3])) {
4690 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4691 		crtc4 |= VBLANK_INT_MASK;
4692 	}
4693 	if (rdev->irq.crtc_vblank_int[4] ||
4694 	    atomic_read(&rdev->irq.pflip[4])) {
4695 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4696 		crtc5 |= VBLANK_INT_MASK;
4697 	}
4698 	if (rdev->irq.crtc_vblank_int[5] ||
4699 	    atomic_read(&rdev->irq.pflip[5])) {
4700 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4701 		crtc6 |= VBLANK_INT_MASK;
4702 	}
4703 	if (rdev->irq.hpd[0]) {
4704 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4705 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4706 	}
4707 	if (rdev->irq.hpd[1]) {
4708 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4709 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4710 	}
4711 	if (rdev->irq.hpd[2]) {
4712 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4713 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4714 	}
4715 	if (rdev->irq.hpd[3]) {
4716 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4717 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4718 	}
4719 	if (rdev->irq.hpd[4]) {
4720 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4721 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4722 	}
4723 	if (rdev->irq.hpd[5]) {
4724 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4725 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4726 	}
4727 	if (rdev->irq.afmt[0]) {
4728 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4729 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4730 	}
4731 	if (rdev->irq.afmt[1]) {
4732 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4733 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4734 	}
4735 	if (rdev->irq.afmt[2]) {
4736 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4737 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4738 	}
4739 	if (rdev->irq.afmt[3]) {
4740 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4741 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4742 	}
4743 	if (rdev->irq.afmt[4]) {
4744 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4745 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4746 	}
4747 	if (rdev->irq.afmt[5]) {
4748 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4749 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4750 	}
4751 
4752 	if (rdev->family >= CHIP_CAYMAN) {
4753 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4754 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4755 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4756 	} else
4757 		WREG32(CP_INT_CNTL, cp_int_cntl);
4758 
4759 	WREG32(DMA_CNTL, dma_cntl);
4760 
4761 	if (rdev->family >= CHIP_CAYMAN)
4762 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4763 
4764 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4765 
4766 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4767 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4768 	if (rdev->num_crtc >= 4) {
4769 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4770 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4771 	}
4772 	if (rdev->num_crtc >= 6) {
4773 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4774 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4775 	}
4776 
4777 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4778 	       GRPH_PFLIP_INT_MASK);
4779 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4780 	       GRPH_PFLIP_INT_MASK);
4781 	if (rdev->num_crtc >= 4) {
4782 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4783 		       GRPH_PFLIP_INT_MASK);
4784 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4785 		       GRPH_PFLIP_INT_MASK);
4786 	}
4787 	if (rdev->num_crtc >= 6) {
4788 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4789 		       GRPH_PFLIP_INT_MASK);
4790 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4791 		       GRPH_PFLIP_INT_MASK);
4792 	}
4793 
4794 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4795 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4796 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4797 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4798 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4799 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4800 	if (rdev->family == CHIP_ARUBA)
4801 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4802 	else
4803 		WREG32(CG_THERMAL_INT, thermal_int);
4804 
4805 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4806 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4807 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4808 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4809 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4810 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4811 
4812 	/* posting read */
4813 	RREG32(SRBM_STATUS);
4814 
4815 	return 0;
4816 }
4817 
4818 static void evergreen_irq_ack(struct radeon_device *rdev)
4819 {
4820 	u32 tmp;
4821 
4822 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4823 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4824 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4825 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4826 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4827 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4828 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4829 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4830 	if (rdev->num_crtc >= 4) {
4831 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4832 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4833 	}
4834 	if (rdev->num_crtc >= 6) {
4835 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4836 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4837 	}
4838 
4839 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4840 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4841 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4842 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4843 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4844 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4845 
4846 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4847 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4848 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4849 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4850 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4851 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4852 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4853 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4854 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4855 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4856 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4857 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4858 
4859 	if (rdev->num_crtc >= 4) {
4860 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4861 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4862 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4863 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4864 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4865 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4866 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4867 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4868 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4869 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4870 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4871 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4872 	}
4873 
4874 	if (rdev->num_crtc >= 6) {
4875 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4876 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4877 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4878 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4879 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4880 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4881 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4882 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4883 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4884 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4885 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4886 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4887 	}
4888 
4889 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4890 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4891 		tmp |= DC_HPDx_INT_ACK;
4892 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4893 	}
4894 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4895 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4896 		tmp |= DC_HPDx_INT_ACK;
4897 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4898 	}
4899 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4900 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4901 		tmp |= DC_HPDx_INT_ACK;
4902 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4903 	}
4904 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4905 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4906 		tmp |= DC_HPDx_INT_ACK;
4907 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4908 	}
4909 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4910 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4911 		tmp |= DC_HPDx_INT_ACK;
4912 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4913 	}
4914 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4915 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4916 		tmp |= DC_HPDx_INT_ACK;
4917 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4918 	}
4919 
4920 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4921 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4922 		tmp |= DC_HPDx_RX_INT_ACK;
4923 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4924 	}
4925 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4926 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4927 		tmp |= DC_HPDx_RX_INT_ACK;
4928 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4929 	}
4930 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4931 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4932 		tmp |= DC_HPDx_RX_INT_ACK;
4933 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4934 	}
4935 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4936 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4937 		tmp |= DC_HPDx_RX_INT_ACK;
4938 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4939 	}
4940 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4941 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4942 		tmp |= DC_HPDx_RX_INT_ACK;
4943 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4944 	}
4945 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4946 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4947 		tmp |= DC_HPDx_RX_INT_ACK;
4948 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4949 	}
4950 
4951 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4952 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4953 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4954 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4955 	}
4956 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4957 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4958 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4959 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4960 	}
4961 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4962 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4963 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4964 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4965 	}
4966 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4967 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4968 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4969 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4970 	}
4971 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4972 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4973 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4974 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4975 	}
4976 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4977 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4978 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4979 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4980 	}
4981 }
4982 
4983 static void evergreen_irq_disable(struct radeon_device *rdev)
4984 {
4985 	r600_disable_interrupts(rdev);
4986 	/* Wait and acknowledge irq */
4987 	mdelay(1);
4988 	evergreen_irq_ack(rdev);
4989 	evergreen_disable_interrupt_state(rdev);
4990 }
4991 
4992 void evergreen_irq_suspend(struct radeon_device *rdev)
4993 {
4994 	evergreen_irq_disable(rdev);
4995 	r600_rlc_stop(rdev);
4996 }
4997 
4998 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4999 {
5000 	u32 wptr, tmp;
5001 
5002 	if (rdev->wb.enabled)
5003 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5004 	else
5005 		wptr = RREG32(IH_RB_WPTR);
5006 
5007 	if (wptr & RB_OVERFLOW) {
5008 		wptr &= ~RB_OVERFLOW;
5009 		/* When a ring buffer overflow happen start parsing interrupt
5010 		 * from the last not overwritten vector (wptr + 16). Hopefully
5011 		 * this should allow us to catchup.
5012 		 */
5013 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5014 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5015 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5016 		tmp = RREG32(IH_RB_CNTL);
5017 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
5018 		WREG32(IH_RB_CNTL, tmp);
5019 	}
5020 	return (wptr & rdev->ih.ptr_mask);
5021 }
5022 
5023 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
5024 {
5025 	u32 wptr;
5026 	u32 rptr;
5027 	u32 src_id, src_data;
5028 	u32 ring_index;
5029 	bool queue_hotplug = false;
5030 	bool queue_hdmi = false;
5031 	bool queue_dp = false;
5032 	bool queue_thermal = false;
5033 	u32 status, addr;
5034 
5035 	if (!rdev->ih.enabled || rdev->shutdown)
5036 		return IRQ_NONE;
5037 
5038 	wptr = evergreen_get_ih_wptr(rdev);
5039 
5040 restart_ih:
5041 	/* is somebody else already processing irqs? */
5042 	if (atomic_xchg(&rdev->ih.lock, 1))
5043 		return IRQ_NONE;
5044 
5045 	rptr = rdev->ih.rptr;
5046 	DRM_DEBUG_VBLANK("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5047 
5048 	/* Order reading of wptr vs. reading of IH ring data */
5049 	rmb();
5050 
5051 	/* display interrupts */
5052 	evergreen_irq_ack(rdev);
5053 
5054 	while (rptr != wptr) {
5055 		/* wptr/rptr are in bytes! */
5056 		ring_index = rptr / 4;
5057 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5058 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5059 
5060 		switch (src_id) {
5061 		case 1: /* D1 vblank/vline */
5062 			switch (src_data) {
5063 			case 0: /* D1 vblank */
5064 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5065 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5066 
5067 				if (rdev->irq.crtc_vblank_int[0]) {
5068 					drm_handle_vblank(rdev->ddev, 0);
5069 					rdev->pm.vblank_sync = true;
5070 					wake_up(&rdev->irq.vblank_queue);
5071 				}
5072 				if (atomic_read(&rdev->irq.pflip[0]))
5073 					radeon_crtc_handle_vblank(rdev, 0);
5074 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5075 				DRM_DEBUG_VBLANK("IH: D1 vblank\n");
5076 
5077 				break;
5078 			case 1: /* D1 vline */
5079 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5080 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5081 
5082 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5083 				DRM_DEBUG_VBLANK("IH: D1 vline\n");
5084 
5085 				break;
5086 			default:
5087 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5088 				break;
5089 			}
5090 			break;
5091 		case 2: /* D2 vblank/vline */
5092 			switch (src_data) {
5093 			case 0: /* D2 vblank */
5094 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5095 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5096 
5097 				if (rdev->irq.crtc_vblank_int[1]) {
5098 					drm_handle_vblank(rdev->ddev, 1);
5099 					rdev->pm.vblank_sync = true;
5100 					wake_up(&rdev->irq.vblank_queue);
5101 				}
5102 				if (atomic_read(&rdev->irq.pflip[1]))
5103 					radeon_crtc_handle_vblank(rdev, 1);
5104 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5105 				DRM_DEBUG_VBLANK("IH: D2 vblank\n");
5106 
5107 				break;
5108 			case 1: /* D2 vline */
5109 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5110 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5111 
5112 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5113 				DRM_DEBUG_VBLANK("IH: D2 vline\n");
5114 
5115 				break;
5116 			default:
5117 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5118 				break;
5119 			}
5120 			break;
5121 		case 3: /* D3 vblank/vline */
5122 			switch (src_data) {
5123 			case 0: /* D3 vblank */
5124 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5125 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5126 
5127 				if (rdev->irq.crtc_vblank_int[2]) {
5128 					drm_handle_vblank(rdev->ddev, 2);
5129 					rdev->pm.vblank_sync = true;
5130 					wake_up(&rdev->irq.vblank_queue);
5131 				}
5132 				if (atomic_read(&rdev->irq.pflip[2]))
5133 					radeon_crtc_handle_vblank(rdev, 2);
5134 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5135 				DRM_DEBUG_VBLANK("IH: D3 vblank\n");
5136 
5137 				break;
5138 			case 1: /* D3 vline */
5139 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5140 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5141 
5142 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5143 				DRM_DEBUG_VBLANK("IH: D3 vline\n");
5144 
5145 				break;
5146 			default:
5147 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5148 				break;
5149 			}
5150 			break;
5151 		case 4: /* D4 vblank/vline */
5152 			switch (src_data) {
5153 			case 0: /* D4 vblank */
5154 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5155 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5156 
5157 				if (rdev->irq.crtc_vblank_int[3]) {
5158 					drm_handle_vblank(rdev->ddev, 3);
5159 					rdev->pm.vblank_sync = true;
5160 					wake_up(&rdev->irq.vblank_queue);
5161 				}
5162 				if (atomic_read(&rdev->irq.pflip[3]))
5163 					radeon_crtc_handle_vblank(rdev, 3);
5164 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5165 				DRM_DEBUG_VBLANK("IH: D4 vblank\n");
5166 
5167 				break;
5168 			case 1: /* D4 vline */
5169 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5170 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5171 
5172 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5173 				DRM_DEBUG_VBLANK("IH: D4 vline\n");
5174 
5175 				break;
5176 			default:
5177 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5178 				break;
5179 			}
5180 			break;
5181 		case 5: /* D5 vblank/vline */
5182 			switch (src_data) {
5183 			case 0: /* D5 vblank */
5184 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5185 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5186 
5187 				if (rdev->irq.crtc_vblank_int[4]) {
5188 					drm_handle_vblank(rdev->ddev, 4);
5189 					rdev->pm.vblank_sync = true;
5190 					wake_up(&rdev->irq.vblank_queue);
5191 				}
5192 				if (atomic_read(&rdev->irq.pflip[4]))
5193 					radeon_crtc_handle_vblank(rdev, 4);
5194 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5195 				DRM_DEBUG_VBLANK("IH: D5 vblank\n");
5196 
5197 				break;
5198 			case 1: /* D5 vline */
5199 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5200 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5201 
5202 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5203 				DRM_DEBUG_VBLANK("IH: D5 vline\n");
5204 
5205 				break;
5206 			default:
5207 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5208 				break;
5209 			}
5210 			break;
5211 		case 6: /* D6 vblank/vline */
5212 			switch (src_data) {
5213 			case 0: /* D6 vblank */
5214 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5215 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5216 
5217 				if (rdev->irq.crtc_vblank_int[5]) {
5218 					drm_handle_vblank(rdev->ddev, 5);
5219 					rdev->pm.vblank_sync = true;
5220 					wake_up(&rdev->irq.vblank_queue);
5221 				}
5222 				if (atomic_read(&rdev->irq.pflip[5]))
5223 					radeon_crtc_handle_vblank(rdev, 5);
5224 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5225 				DRM_DEBUG_VBLANK("IH: D6 vblank\n");
5226 
5227 				break;
5228 			case 1: /* D6 vline */
5229 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5230 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5231 
5232 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5233 				DRM_DEBUG_VBLANK("IH: D6 vline\n");
5234 
5235 				break;
5236 			default:
5237 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5238 				break;
5239 			}
5240 			break;
5241 		case 8: /* D1 page flip */
5242 		case 10: /* D2 page flip */
5243 		case 12: /* D3 page flip */
5244 		case 14: /* D4 page flip */
5245 		case 16: /* D5 page flip */
5246 		case 18: /* D6 page flip */
5247 			DRM_DEBUG_VBLANK("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5248 			if (radeon_use_pflipirq > 0)
5249 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5250 			break;
5251 		case 42: /* HPD hotplug */
5252 			switch (src_data) {
5253 			case 0:
5254 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5255 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5256 
5257 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5258 				queue_hotplug = true;
5259 				DRM_DEBUG("IH: HPD1\n");
5260 				break;
5261 			case 1:
5262 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5263 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5264 
5265 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5266 				queue_hotplug = true;
5267 				DRM_DEBUG("IH: HPD2\n");
5268 				break;
5269 			case 2:
5270 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5271 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5272 
5273 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5274 				queue_hotplug = true;
5275 				DRM_DEBUG("IH: HPD3\n");
5276 				break;
5277 			case 3:
5278 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5279 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5280 
5281 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5282 				queue_hotplug = true;
5283 				DRM_DEBUG("IH: HPD4\n");
5284 				break;
5285 			case 4:
5286 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5287 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5288 
5289 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5290 				queue_hotplug = true;
5291 				DRM_DEBUG("IH: HPD5\n");
5292 				break;
5293 			case 5:
5294 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5295 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5296 
5297 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5298 				queue_hotplug = true;
5299 				DRM_DEBUG("IH: HPD6\n");
5300 				break;
5301 			case 6:
5302 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5303 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5304 
5305 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5306 				queue_dp = true;
5307 				DRM_DEBUG("IH: HPD_RX 1\n");
5308 				break;
5309 			case 7:
5310 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5311 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5312 
5313 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5314 				queue_dp = true;
5315 				DRM_DEBUG("IH: HPD_RX 2\n");
5316 				break;
5317 			case 8:
5318 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5319 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5320 
5321 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5322 				queue_dp = true;
5323 				DRM_DEBUG("IH: HPD_RX 3\n");
5324 				break;
5325 			case 9:
5326 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5327 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5328 
5329 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5330 				queue_dp = true;
5331 				DRM_DEBUG("IH: HPD_RX 4\n");
5332 				break;
5333 			case 10:
5334 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5335 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5336 
5337 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5338 				queue_dp = true;
5339 				DRM_DEBUG("IH: HPD_RX 5\n");
5340 				break;
5341 			case 11:
5342 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5343 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5344 
5345 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5346 				queue_dp = true;
5347 				DRM_DEBUG("IH: HPD_RX 6\n");
5348 				break;
5349 			default:
5350 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5351 				break;
5352 			}
5353 			break;
5354 		case 44: /* hdmi */
5355 			switch (src_data) {
5356 			case 0:
5357 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5358 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5359 
5360 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5361 				queue_hdmi = true;
5362 				DRM_DEBUG("IH: HDMI0\n");
5363 				break;
5364 			case 1:
5365 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5366 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5367 
5368 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5369 				queue_hdmi = true;
5370 				DRM_DEBUG("IH: HDMI1\n");
5371 				break;
5372 			case 2:
5373 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5374 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5375 
5376 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5377 				queue_hdmi = true;
5378 				DRM_DEBUG("IH: HDMI2\n");
5379 				break;
5380 			case 3:
5381 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5382 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5383 
5384 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5385 				queue_hdmi = true;
5386 				DRM_DEBUG("IH: HDMI3\n");
5387 				break;
5388 			case 4:
5389 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5390 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5391 
5392 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5393 				queue_hdmi = true;
5394 				DRM_DEBUG("IH: HDMI4\n");
5395 				break;
5396 			case 5:
5397 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5398 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5399 
5400 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5401 				queue_hdmi = true;
5402 				DRM_DEBUG("IH: HDMI5\n");
5403 				break;
5404 			default:
5405 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5406 				break;
5407 			}
5408 		case 96:
5409 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5410 			WREG32(SRBM_INT_ACK, 0x1);
5411 			break;
5412 		case 124: /* UVD */
5413 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5414 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5415 			break;
5416 		case 146:
5417 		case 147:
5418 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5419 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5420 			/* reset addr and status */
5421 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5422 			if (addr == 0x0 && status == 0x0)
5423 				break;
5424 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5425 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5426 				addr);
5427 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5428 				status);
5429 			cayman_vm_decode_fault(rdev, status, addr);
5430 			break;
5431 		case 176: /* CP_INT in ring buffer */
5432 		case 177: /* CP_INT in IB1 */
5433 		case 178: /* CP_INT in IB2 */
5434 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5435 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5436 			break;
5437 		case 181: /* CP EOP event */
5438 			DRM_DEBUG("IH: CP EOP\n");
5439 			if (rdev->family >= CHIP_CAYMAN) {
5440 				switch (src_data) {
5441 				case 0:
5442 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5443 					break;
5444 				case 1:
5445 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5446 					break;
5447 				case 2:
5448 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5449 					break;
5450 				}
5451 			} else
5452 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5453 			break;
5454 		case 224: /* DMA trap event */
5455 			DRM_DEBUG("IH: DMA trap\n");
5456 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5457 			break;
5458 		case 230: /* thermal low to high */
5459 			DRM_DEBUG("IH: thermal low to high\n");
5460 			rdev->pm.dpm.thermal.high_to_low = false;
5461 			queue_thermal = true;
5462 			break;
5463 		case 231: /* thermal high to low */
5464 			DRM_DEBUG("IH: thermal high to low\n");
5465 			rdev->pm.dpm.thermal.high_to_low = true;
5466 			queue_thermal = true;
5467 			break;
5468 		case 233: /* GUI IDLE */
5469 			DRM_DEBUG("IH: GUI idle\n");
5470 			break;
5471 		case 244: /* DMA trap event */
5472 			if (rdev->family >= CHIP_CAYMAN) {
5473 				DRM_DEBUG("IH: DMA1 trap\n");
5474 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5475 			}
5476 			break;
5477 		default:
5478 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5479 			break;
5480 		}
5481 
5482 		/* wptr/rptr are in bytes! */
5483 		rptr += 16;
5484 		rptr &= rdev->ih.ptr_mask;
5485 		WREG32(IH_RB_RPTR, rptr);
5486 	}
5487 	if (queue_dp)
5488 		schedule_work(&rdev->dp_work);
5489 	if (queue_hotplug)
5490 		schedule_delayed_work(&rdev->hotplug_work, 0);
5491 	if (queue_hdmi)
5492 		schedule_work(&rdev->audio_work);
5493 	if (queue_thermal && rdev->pm.dpm_enabled)
5494 		schedule_work(&rdev->pm.dpm.thermal.work);
5495 	rdev->ih.rptr = rptr;
5496 	atomic_set(&rdev->ih.lock, 0);
5497 
5498 	/* make sure wptr hasn't changed while processing */
5499 	wptr = evergreen_get_ih_wptr(rdev);
5500 	if (wptr != rptr)
5501 		goto restart_ih;
5502 
5503 	return IRQ_HANDLED;
5504 }
5505 
5506 static void evergreen_uvd_init(struct radeon_device *rdev)
5507 {
5508 	int r;
5509 
5510 	if (!rdev->has_uvd)
5511 		return;
5512 
5513 	r = radeon_uvd_init(rdev);
5514 	if (r) {
5515 		dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5516 		/*
5517 		 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5518 		 * to early fails uvd_v2_2_resume() and thus nothing happens
5519 		 * there. So it is pointless to try to go through that code
5520 		 * hence why we disable uvd here.
5521 		 */
5522 		rdev->has_uvd = 0;
5523 		return;
5524 	}
5525 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5526 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5527 }
5528 
5529 static void evergreen_uvd_start(struct radeon_device *rdev)
5530 {
5531 	int r;
5532 
5533 	if (!rdev->has_uvd)
5534 		return;
5535 
5536 	r = uvd_v2_2_resume(rdev);
5537 	if (r) {
5538 		dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5539 		goto error;
5540 	}
5541 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5542 	if (r) {
5543 		dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5544 		goto error;
5545 	}
5546 	return;
5547 
5548 error:
5549 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5550 }
5551 
5552 static void evergreen_uvd_resume(struct radeon_device *rdev)
5553 {
5554 	struct radeon_ring *ring;
5555 	int r;
5556 
5557 	if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5558 		return;
5559 
5560 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5561 	r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
5562 	if (r) {
5563 		dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5564 		return;
5565 	}
5566 	r = uvd_v1_0_init(rdev);
5567 	if (r) {
5568 		dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5569 		return;
5570 	}
5571 }
5572 
5573 static int evergreen_startup(struct radeon_device *rdev)
5574 {
5575 	struct radeon_ring *ring;
5576 	int r;
5577 
5578 	/* enable pcie gen2 link */
5579 	evergreen_pcie_gen2_enable(rdev);
5580 	/* enable aspm */
5581 	evergreen_program_aspm(rdev);
5582 
5583 	/* scratch needs to be initialized before MC */
5584 	r = r600_vram_scratch_init(rdev);
5585 	if (r)
5586 		return r;
5587 
5588 	evergreen_mc_program(rdev);
5589 
5590 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5591 		r = ni_mc_load_microcode(rdev);
5592 		if (r) {
5593 			DRM_ERROR("Failed to load MC firmware!\n");
5594 			return r;
5595 		}
5596 	}
5597 
5598 	if (rdev->flags & RADEON_IS_AGP) {
5599 		evergreen_agp_enable(rdev);
5600 	} else {
5601 		r = evergreen_pcie_gart_enable(rdev);
5602 		if (r)
5603 			return r;
5604 	}
5605 	evergreen_gpu_init(rdev);
5606 
5607 	/* allocate rlc buffers */
5608 	if (rdev->flags & RADEON_IS_IGP) {
5609 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5610 		rdev->rlc.reg_list_size =
5611 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5612 		rdev->rlc.cs_data = evergreen_cs_data;
5613 		r = sumo_rlc_init(rdev);
5614 		if (r) {
5615 			DRM_ERROR("Failed to init rlc BOs!\n");
5616 			return r;
5617 		}
5618 	}
5619 
5620 	/* allocate wb buffer */
5621 	r = radeon_wb_init(rdev);
5622 	if (r)
5623 		return r;
5624 
5625 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5626 	if (r) {
5627 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5628 		return r;
5629 	}
5630 
5631 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5632 	if (r) {
5633 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5634 		return r;
5635 	}
5636 
5637 	evergreen_uvd_start(rdev);
5638 
5639 	/* Enable IRQ */
5640 	if (!rdev->irq.installed) {
5641 		r = radeon_irq_kms_init(rdev);
5642 		if (r)
5643 			return r;
5644 	}
5645 
5646 	r = r600_irq_init(rdev);
5647 	if (r) {
5648 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5649 		radeon_irq_kms_fini(rdev);
5650 		return r;
5651 	}
5652 	evergreen_irq_set(rdev);
5653 
5654 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5655 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5656 			     RADEON_CP_PACKET2);
5657 	if (r)
5658 		return r;
5659 
5660 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5661 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5662 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5663 	if (r)
5664 		return r;
5665 
5666 	r = evergreen_cp_load_microcode(rdev);
5667 	if (r)
5668 		return r;
5669 	r = evergreen_cp_resume(rdev);
5670 	if (r)
5671 		return r;
5672 	r = r600_dma_resume(rdev);
5673 	if (r)
5674 		return r;
5675 
5676 	evergreen_uvd_resume(rdev);
5677 
5678 	r = radeon_ib_pool_init(rdev);
5679 	if (r) {
5680 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5681 		return r;
5682 	}
5683 
5684 	r = radeon_audio_init(rdev);
5685 	if (r) {
5686 		DRM_ERROR("radeon: audio init failed\n");
5687 		return r;
5688 	}
5689 
5690 	return 0;
5691 }
5692 
5693 int evergreen_resume(struct radeon_device *rdev)
5694 {
5695 	int r;
5696 
5697 	/* reset the asic, the gfx blocks are often in a bad state
5698 	 * after the driver is unloaded or after a resume
5699 	 */
5700 	if (radeon_asic_reset(rdev))
5701 		dev_warn(rdev->dev, "GPU reset failed !\n");
5702 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5703 	 * posting will perform necessary task to bring back GPU into good
5704 	 * shape.
5705 	 */
5706 	/* post card */
5707 	atom_asic_init(rdev->mode_info.atom_context);
5708 
5709 	/* init golden registers */
5710 	evergreen_init_golden_registers(rdev);
5711 
5712 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5713 		radeon_pm_resume(rdev);
5714 
5715 	rdev->accel_working = true;
5716 	r = evergreen_startup(rdev);
5717 	if (r) {
5718 		DRM_ERROR("evergreen startup failed on resume\n");
5719 		rdev->accel_working = false;
5720 		return r;
5721 	}
5722 
5723 	return r;
5724 
5725 }
5726 
5727 int evergreen_suspend(struct radeon_device *rdev)
5728 {
5729 	radeon_pm_suspend(rdev);
5730 	radeon_audio_fini(rdev);
5731 	if (rdev->has_uvd) {
5732 		uvd_v1_0_fini(rdev);
5733 		radeon_uvd_suspend(rdev);
5734 	}
5735 	r700_cp_stop(rdev);
5736 	r600_dma_stop(rdev);
5737 	evergreen_irq_suspend(rdev);
5738 	radeon_wb_disable(rdev);
5739 	evergreen_pcie_gart_disable(rdev);
5740 
5741 	return 0;
5742 }
5743 
5744 /* Plan is to move initialization in that function and use
5745  * helper function so that radeon_device_init pretty much
5746  * do nothing more than calling asic specific function. This
5747  * should also allow to remove a bunch of callback function
5748  * like vram_info.
5749  */
5750 int evergreen_init(struct radeon_device *rdev)
5751 {
5752 	int r;
5753 
5754 	/* Read BIOS */
5755 	if (!radeon_get_bios(rdev)) {
5756 		if (ASIC_IS_AVIVO(rdev))
5757 			return -EINVAL;
5758 	}
5759 	/* Must be an ATOMBIOS */
5760 	if (!rdev->is_atom_bios) {
5761 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5762 		return -EINVAL;
5763 	}
5764 	r = radeon_atombios_init(rdev);
5765 	if (r)
5766 		return r;
5767 	/* reset the asic, the gfx blocks are often in a bad state
5768 	 * after the driver is unloaded or after a resume
5769 	 */
5770 	if (radeon_asic_reset(rdev))
5771 		dev_warn(rdev->dev, "GPU reset failed !\n");
5772 	/* Post card if necessary */
5773 	if (!radeon_card_posted(rdev)) {
5774 		if (!rdev->bios) {
5775 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5776 			return -EINVAL;
5777 		}
5778 		DRM_INFO("GPU not posted. posting now...\n");
5779 		atom_asic_init(rdev->mode_info.atom_context);
5780 	}
5781 	/* init golden registers */
5782 	evergreen_init_golden_registers(rdev);
5783 	/* Initialize scratch registers */
5784 	r600_scratch_init(rdev);
5785 	/* Initialize surface registers */
5786 	radeon_surface_init(rdev);
5787 	/* Initialize clocks */
5788 	radeon_get_clock_info(rdev->ddev);
5789 	/* Fence driver */
5790 	r = radeon_fence_driver_init(rdev);
5791 	if (r)
5792 		return r;
5793 	/* initialize AGP */
5794 	if (rdev->flags & RADEON_IS_AGP) {
5795 		r = radeon_agp_init(rdev);
5796 		if (r)
5797 			radeon_agp_disable(rdev);
5798 	}
5799 	/* initialize memory controller */
5800 	r = evergreen_mc_init(rdev);
5801 	if (r)
5802 		return r;
5803 	/* Memory manager */
5804 	r = radeon_bo_init(rdev);
5805 	if (r)
5806 		return r;
5807 
5808 	if (ASIC_IS_DCE5(rdev)) {
5809 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5810 			r = ni_init_microcode(rdev);
5811 			if (r) {
5812 				DRM_ERROR("Failed to load firmware!\n");
5813 				return r;
5814 			}
5815 		}
5816 	} else {
5817 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5818 			r = r600_init_microcode(rdev);
5819 			if (r) {
5820 				DRM_ERROR("Failed to load firmware!\n");
5821 				return r;
5822 			}
5823 		}
5824 	}
5825 
5826 	/* Initialize power management */
5827 	radeon_pm_init(rdev);
5828 
5829 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5830 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5831 
5832 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5833 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5834 
5835 	evergreen_uvd_init(rdev);
5836 
5837 	rdev->ih.ring_obj = NULL;
5838 	r600_ih_ring_init(rdev, 64 * 1024);
5839 
5840 	r = r600_pcie_gart_init(rdev);
5841 	if (r)
5842 		return r;
5843 
5844 #ifdef __DragonFly__
5845 	/*
5846 	   There are unresolved crashes on evergreen hardware,
5847 	   tell userland acceleration is not working properly
5848 	   Bug report: https://bugs.dragonflybsd.org/issues/3198
5849 	*/
5850 	rdev->accel_working = false;
5851 	DRM_ERROR("GPU acceleration disabled for now on DragonFly\n");
5852 #else
5853 	rdev->accel_working = true;
5854 #endif
5855 	r = evergreen_startup(rdev);
5856 	if (r) {
5857 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5858 		r700_cp_fini(rdev);
5859 		r600_dma_fini(rdev);
5860 		r600_irq_fini(rdev);
5861 		if (rdev->flags & RADEON_IS_IGP)
5862 			sumo_rlc_fini(rdev);
5863 		radeon_wb_fini(rdev);
5864 		radeon_ib_pool_fini(rdev);
5865 		radeon_irq_kms_fini(rdev);
5866 		evergreen_pcie_gart_fini(rdev);
5867 		rdev->accel_working = false;
5868 	}
5869 
5870 	/* Don't start up if the MC ucode is missing on BTC parts.
5871 	 * The default clocks and voltages before the MC ucode
5872 	 * is loaded are not suffient for advanced operations.
5873 	 */
5874 	if (ASIC_IS_DCE5(rdev)) {
5875 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5876 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5877 			return -EINVAL;
5878 		}
5879 	}
5880 
5881 	return 0;
5882 }
5883 
5884 void evergreen_fini(struct radeon_device *rdev)
5885 {
5886 	radeon_pm_fini(rdev);
5887 	radeon_audio_fini(rdev);
5888 	r700_cp_fini(rdev);
5889 	r600_dma_fini(rdev);
5890 	r600_irq_fini(rdev);
5891 	if (rdev->flags & RADEON_IS_IGP)
5892 		sumo_rlc_fini(rdev);
5893 	radeon_wb_fini(rdev);
5894 	radeon_ib_pool_fini(rdev);
5895 	radeon_irq_kms_fini(rdev);
5896 	uvd_v1_0_fini(rdev);
5897 	radeon_uvd_fini(rdev);
5898 	evergreen_pcie_gart_fini(rdev);
5899 	r600_vram_scratch_fini(rdev);
5900 	radeon_gem_fini(rdev);
5901 	radeon_fence_driver_fini(rdev);
5902 	radeon_agp_fini(rdev);
5903 	radeon_bo_fini(rdev);
5904 	radeon_atombios_fini(rdev);
5905 	if (ASIC_IS_DCE5(rdev))
5906 		ni_fini_microcode(rdev);
5907 	else
5908 		r600_fini_microcode(rdev);
5909 	kfree(rdev->bios);
5910 	rdev->bios = NULL;
5911 }
5912 
5913 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5914 {
5915 	u32 link_width_cntl, speed_cntl;
5916 	u32 mask;
5917 
5918 	if (radeon_pcie_gen2 == 0)
5919 		return;
5920 
5921 	if (rdev->flags & RADEON_IS_IGP)
5922 		return;
5923 
5924 	if (!(rdev->flags & RADEON_IS_PCIE))
5925 		return;
5926 
5927 	/* x2 cards have a special sequence */
5928 	if (ASIC_IS_X2(rdev))
5929 		return;
5930 
5931 #ifdef __DragonFly__
5932 	if (drm_pcie_get_speed_cap_mask(rdev->ddev, &mask) != 0)
5933 		return;
5934 #endif
5935 
5936 	if (!(mask & DRM_PCIE_SPEED_50))
5937 		return;
5938 
5939 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5940 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5941 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5942 		return;
5943 	}
5944 
5945 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5946 
5947 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5948 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5949 
5950 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5951 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5952 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5953 
5954 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5955 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5956 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5957 
5958 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5959 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5960 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5961 
5962 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5963 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5964 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5965 
5966 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5967 		speed_cntl |= LC_GEN2_EN_STRAP;
5968 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5969 
5970 	} else {
5971 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5972 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5973 		if (1)
5974 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5975 		else
5976 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5977 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5978 	}
5979 }
5980 
5981 void evergreen_program_aspm(struct radeon_device *rdev)
5982 {
5983 	u32 data, orig;
5984 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5985 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5986 	/* fusion_platform = true
5987 	 * if the system is a fusion system
5988 	 * (APU or DGPU in a fusion system).
5989 	 * todo: check if the system is a fusion platform.
5990 	 */
5991 	bool fusion_platform = false;
5992 
5993 	if (radeon_aspm == 0)
5994 		return;
5995 
5996 	if (!(rdev->flags & RADEON_IS_PCIE))
5997 		return;
5998 
5999 	switch (rdev->family) {
6000 	case CHIP_CYPRESS:
6001 	case CHIP_HEMLOCK:
6002 	case CHIP_JUNIPER:
6003 	case CHIP_REDWOOD:
6004 	case CHIP_CEDAR:
6005 	case CHIP_SUMO:
6006 	case CHIP_SUMO2:
6007 	case CHIP_PALM:
6008 	case CHIP_ARUBA:
6009 		disable_l0s = true;
6010 		break;
6011 	default:
6012 		disable_l0s = false;
6013 		break;
6014 	}
6015 
6016 	if (rdev->flags & RADEON_IS_IGP)
6017 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
6018 
6019 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6020 	if (fusion_platform)
6021 		data &= ~MULTI_PIF;
6022 	else
6023 		data |= MULTI_PIF;
6024 	if (data != orig)
6025 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6026 
6027 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6028 	if (fusion_platform)
6029 		data &= ~MULTI_PIF;
6030 	else
6031 		data |= MULTI_PIF;
6032 	if (data != orig)
6033 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6034 
6035 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6036 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6037 	if (!disable_l0s) {
6038 		if (rdev->family >= CHIP_BARTS)
6039 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6040 		else
6041 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6042 	}
6043 
6044 	if (!disable_l1) {
6045 		if (rdev->family >= CHIP_BARTS)
6046 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6047 		else
6048 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6049 
6050 		if (!disable_plloff_in_l1) {
6051 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6052 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6053 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6054 			if (data != orig)
6055 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6056 
6057 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6058 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6059 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6060 			if (data != orig)
6061 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6062 
6063 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6064 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6065 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6066 			if (data != orig)
6067 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6068 
6069 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6070 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6071 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6072 			if (data != orig)
6073 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6074 
6075 			if (rdev->family >= CHIP_BARTS) {
6076 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6077 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6078 				data |= PLL_RAMP_UP_TIME_0(4);
6079 				if (data != orig)
6080 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6081 
6082 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6083 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6084 				data |= PLL_RAMP_UP_TIME_1(4);
6085 				if (data != orig)
6086 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6087 
6088 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6089 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6090 				data |= PLL_RAMP_UP_TIME_0(4);
6091 				if (data != orig)
6092 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6093 
6094 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6095 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6096 				data |= PLL_RAMP_UP_TIME_1(4);
6097 				if (data != orig)
6098 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6099 			}
6100 
6101 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6102 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6103 			data |= LC_DYN_LANES_PWR_STATE(3);
6104 			if (data != orig)
6105 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6106 
6107 			if (rdev->family >= CHIP_BARTS) {
6108 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6109 				data &= ~LS2_EXIT_TIME_MASK;
6110 				data |= LS2_EXIT_TIME(1);
6111 				if (data != orig)
6112 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6113 
6114 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6115 				data &= ~LS2_EXIT_TIME_MASK;
6116 				data |= LS2_EXIT_TIME(1);
6117 				if (data != orig)
6118 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6119 			}
6120 		}
6121 	}
6122 
6123 	/* evergreen parts only */
6124 	if (rdev->family < CHIP_BARTS)
6125 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6126 
6127 	if (pcie_lc_cntl != pcie_lc_cntl_old)
6128 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6129 }
6130