xref: /dragonfly/sys/dev/drm/radeon/evergreen.c (revision 62dc643e)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <drm/drmP.h>
26 #include "radeon.h"
27 #include "radeon_asic.h"
28 #include "radeon_audio.h"
29 #include <uapi_drm/radeon_drm.h>
30 #include "evergreend.h"
31 #include "atom.h"
32 #include "avivod.h"
33 #include "evergreen_reg.h"
34 #include "evergreen_blit_shaders.h"
35 #include "radeon_ucode.h"
36 
37 /*
38  * Indirect registers accessor
39  */
40 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
41 {
42 	unsigned long flags;
43 	u32 r;
44 
45 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
46 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
47 	r = RREG32(EVERGREEN_CG_IND_DATA);
48 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
49 	return r;
50 }
51 
52 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
53 {
54 	unsigned long flags;
55 
56 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
57 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
58 	WREG32(EVERGREEN_CG_IND_DATA, (v));
59 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
60 }
61 
62 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
63 {
64 	unsigned long flags;
65 	u32 r;
66 
67 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
68 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
69 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
70 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
71 	return r;
72 }
73 
74 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
75 {
76 	unsigned long flags;
77 
78 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
79 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
80 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
81 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
82 }
83 
84 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
85 {
86 	unsigned long flags;
87 	u32 r;
88 
89 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
90 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
91 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
92 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
93 	return r;
94 }
95 
96 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
97 {
98 	unsigned long flags;
99 
100 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
101 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
102 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
103 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
104 }
105 
106 static const u32 crtc_offsets[6] =
107 {
108 	EVERGREEN_CRTC0_REGISTER_OFFSET,
109 	EVERGREEN_CRTC1_REGISTER_OFFSET,
110 	EVERGREEN_CRTC2_REGISTER_OFFSET,
111 	EVERGREEN_CRTC3_REGISTER_OFFSET,
112 	EVERGREEN_CRTC4_REGISTER_OFFSET,
113 	EVERGREEN_CRTC5_REGISTER_OFFSET
114 };
115 
116 #include "clearstate_evergreen.h"
117 
118 static const u32 sumo_rlc_save_restore_register_list[] =
119 {
120 	0x98fc,
121 	0x9830,
122 	0x9834,
123 	0x9838,
124 	0x9870,
125 	0x9874,
126 	0x8a14,
127 	0x8b24,
128 	0x8bcc,
129 	0x8b10,
130 	0x8d00,
131 	0x8d04,
132 	0x8c00,
133 	0x8c04,
134 	0x8c08,
135 	0x8c0c,
136 	0x8d8c,
137 	0x8c20,
138 	0x8c24,
139 	0x8c28,
140 	0x8c18,
141 	0x8c1c,
142 	0x8cf0,
143 	0x8e2c,
144 	0x8e38,
145 	0x8c30,
146 	0x9508,
147 	0x9688,
148 	0x9608,
149 	0x960c,
150 	0x9610,
151 	0x9614,
152 	0x88c4,
153 	0x88d4,
154 	0xa008,
155 	0x900c,
156 	0x9100,
157 	0x913c,
158 	0x98f8,
159 	0x98f4,
160 	0x9b7c,
161 	0x3f8c,
162 	0x8950,
163 	0x8954,
164 	0x8a18,
165 	0x8b28,
166 	0x9144,
167 	0x9148,
168 	0x914c,
169 	0x3f90,
170 	0x3f94,
171 	0x915c,
172 	0x9160,
173 	0x9178,
174 	0x917c,
175 	0x9180,
176 	0x918c,
177 	0x9190,
178 	0x9194,
179 	0x9198,
180 	0x919c,
181 	0x91a8,
182 	0x91ac,
183 	0x91b0,
184 	0x91b4,
185 	0x91b8,
186 	0x91c4,
187 	0x91c8,
188 	0x91cc,
189 	0x91d0,
190 	0x91d4,
191 	0x91e0,
192 	0x91e4,
193 	0x91ec,
194 	0x91f0,
195 	0x91f4,
196 	0x9200,
197 	0x9204,
198 	0x929c,
199 	0x9150,
200 	0x802c,
201 };
202 
203 static void evergreen_gpu_init(struct radeon_device *rdev);
204 
205 static const u32 evergreen_golden_registers[] =
206 {
207 	0x3f90, 0xffff0000, 0xff000000,
208 	0x9148, 0xffff0000, 0xff000000,
209 	0x3f94, 0xffff0000, 0xff000000,
210 	0x914c, 0xffff0000, 0xff000000,
211 	0x9b7c, 0xffffffff, 0x00000000,
212 	0x8a14, 0xffffffff, 0x00000007,
213 	0x8b10, 0xffffffff, 0x00000000,
214 	0x960c, 0xffffffff, 0x54763210,
215 	0x88c4, 0xffffffff, 0x000000c2,
216 	0x88d4, 0xffffffff, 0x00000010,
217 	0x8974, 0xffffffff, 0x00000000,
218 	0xc78, 0x00000080, 0x00000080,
219 	0x5eb4, 0xffffffff, 0x00000002,
220 	0x5e78, 0xffffffff, 0x001000f0,
221 	0x6104, 0x01000300, 0x00000000,
222 	0x5bc0, 0x00300000, 0x00000000,
223 	0x7030, 0xffffffff, 0x00000011,
224 	0x7c30, 0xffffffff, 0x00000011,
225 	0x10830, 0xffffffff, 0x00000011,
226 	0x11430, 0xffffffff, 0x00000011,
227 	0x12030, 0xffffffff, 0x00000011,
228 	0x12c30, 0xffffffff, 0x00000011,
229 	0xd02c, 0xffffffff, 0x08421000,
230 	0x240c, 0xffffffff, 0x00000380,
231 	0x8b24, 0xffffffff, 0x00ff0fff,
232 	0x28a4c, 0x06000000, 0x06000000,
233 	0x10c, 0x00000001, 0x00000001,
234 	0x8d00, 0xffffffff, 0x100e4848,
235 	0x8d04, 0xffffffff, 0x00164745,
236 	0x8c00, 0xffffffff, 0xe4000003,
237 	0x8c04, 0xffffffff, 0x40600060,
238 	0x8c08, 0xffffffff, 0x001c001c,
239 	0x8cf0, 0xffffffff, 0x08e00620,
240 	0x8c20, 0xffffffff, 0x00800080,
241 	0x8c24, 0xffffffff, 0x00800080,
242 	0x8c18, 0xffffffff, 0x20202078,
243 	0x8c1c, 0xffffffff, 0x00001010,
244 	0x28350, 0xffffffff, 0x00000000,
245 	0xa008, 0xffffffff, 0x00010000,
246 	0x5c4, 0xffffffff, 0x00000001,
247 	0x9508, 0xffffffff, 0x00000002,
248 	0x913c, 0x0000000f, 0x0000000a
249 };
250 
251 static const u32 evergreen_golden_registers2[] =
252 {
253 	0x2f4c, 0xffffffff, 0x00000000,
254 	0x54f4, 0xffffffff, 0x00000000,
255 	0x54f0, 0xffffffff, 0x00000000,
256 	0x5498, 0xffffffff, 0x00000000,
257 	0x549c, 0xffffffff, 0x00000000,
258 	0x5494, 0xffffffff, 0x00000000,
259 	0x53cc, 0xffffffff, 0x00000000,
260 	0x53c8, 0xffffffff, 0x00000000,
261 	0x53c4, 0xffffffff, 0x00000000,
262 	0x53c0, 0xffffffff, 0x00000000,
263 	0x53bc, 0xffffffff, 0x00000000,
264 	0x53b8, 0xffffffff, 0x00000000,
265 	0x53b4, 0xffffffff, 0x00000000,
266 	0x53b0, 0xffffffff, 0x00000000
267 };
268 
269 static const u32 cypress_mgcg_init[] =
270 {
271 	0x802c, 0xffffffff, 0xc0000000,
272 	0x5448, 0xffffffff, 0x00000100,
273 	0x55e4, 0xffffffff, 0x00000100,
274 	0x160c, 0xffffffff, 0x00000100,
275 	0x5644, 0xffffffff, 0x00000100,
276 	0xc164, 0xffffffff, 0x00000100,
277 	0x8a18, 0xffffffff, 0x00000100,
278 	0x897c, 0xffffffff, 0x06000100,
279 	0x8b28, 0xffffffff, 0x00000100,
280 	0x9144, 0xffffffff, 0x00000100,
281 	0x9a60, 0xffffffff, 0x00000100,
282 	0x9868, 0xffffffff, 0x00000100,
283 	0x8d58, 0xffffffff, 0x00000100,
284 	0x9510, 0xffffffff, 0x00000100,
285 	0x949c, 0xffffffff, 0x00000100,
286 	0x9654, 0xffffffff, 0x00000100,
287 	0x9030, 0xffffffff, 0x00000100,
288 	0x9034, 0xffffffff, 0x00000100,
289 	0x9038, 0xffffffff, 0x00000100,
290 	0x903c, 0xffffffff, 0x00000100,
291 	0x9040, 0xffffffff, 0x00000100,
292 	0xa200, 0xffffffff, 0x00000100,
293 	0xa204, 0xffffffff, 0x00000100,
294 	0xa208, 0xffffffff, 0x00000100,
295 	0xa20c, 0xffffffff, 0x00000100,
296 	0x971c, 0xffffffff, 0x00000100,
297 	0x977c, 0xffffffff, 0x00000100,
298 	0x3f80, 0xffffffff, 0x00000100,
299 	0xa210, 0xffffffff, 0x00000100,
300 	0xa214, 0xffffffff, 0x00000100,
301 	0x4d8, 0xffffffff, 0x00000100,
302 	0x9784, 0xffffffff, 0x00000100,
303 	0x9698, 0xffffffff, 0x00000100,
304 	0x4d4, 0xffffffff, 0x00000200,
305 	0x30cc, 0xffffffff, 0x00000100,
306 	0xd0c0, 0xffffffff, 0xff000100,
307 	0x802c, 0xffffffff, 0x40000000,
308 	0x915c, 0xffffffff, 0x00010000,
309 	0x9160, 0xffffffff, 0x00030002,
310 	0x9178, 0xffffffff, 0x00070000,
311 	0x917c, 0xffffffff, 0x00030002,
312 	0x9180, 0xffffffff, 0x00050004,
313 	0x918c, 0xffffffff, 0x00010006,
314 	0x9190, 0xffffffff, 0x00090008,
315 	0x9194, 0xffffffff, 0x00070000,
316 	0x9198, 0xffffffff, 0x00030002,
317 	0x919c, 0xffffffff, 0x00050004,
318 	0x91a8, 0xffffffff, 0x00010006,
319 	0x91ac, 0xffffffff, 0x00090008,
320 	0x91b0, 0xffffffff, 0x00070000,
321 	0x91b4, 0xffffffff, 0x00030002,
322 	0x91b8, 0xffffffff, 0x00050004,
323 	0x91c4, 0xffffffff, 0x00010006,
324 	0x91c8, 0xffffffff, 0x00090008,
325 	0x91cc, 0xffffffff, 0x00070000,
326 	0x91d0, 0xffffffff, 0x00030002,
327 	0x91d4, 0xffffffff, 0x00050004,
328 	0x91e0, 0xffffffff, 0x00010006,
329 	0x91e4, 0xffffffff, 0x00090008,
330 	0x91e8, 0xffffffff, 0x00000000,
331 	0x91ec, 0xffffffff, 0x00070000,
332 	0x91f0, 0xffffffff, 0x00030002,
333 	0x91f4, 0xffffffff, 0x00050004,
334 	0x9200, 0xffffffff, 0x00010006,
335 	0x9204, 0xffffffff, 0x00090008,
336 	0x9208, 0xffffffff, 0x00070000,
337 	0x920c, 0xffffffff, 0x00030002,
338 	0x9210, 0xffffffff, 0x00050004,
339 	0x921c, 0xffffffff, 0x00010006,
340 	0x9220, 0xffffffff, 0x00090008,
341 	0x9224, 0xffffffff, 0x00070000,
342 	0x9228, 0xffffffff, 0x00030002,
343 	0x922c, 0xffffffff, 0x00050004,
344 	0x9238, 0xffffffff, 0x00010006,
345 	0x923c, 0xffffffff, 0x00090008,
346 	0x9240, 0xffffffff, 0x00070000,
347 	0x9244, 0xffffffff, 0x00030002,
348 	0x9248, 0xffffffff, 0x00050004,
349 	0x9254, 0xffffffff, 0x00010006,
350 	0x9258, 0xffffffff, 0x00090008,
351 	0x925c, 0xffffffff, 0x00070000,
352 	0x9260, 0xffffffff, 0x00030002,
353 	0x9264, 0xffffffff, 0x00050004,
354 	0x9270, 0xffffffff, 0x00010006,
355 	0x9274, 0xffffffff, 0x00090008,
356 	0x9278, 0xffffffff, 0x00070000,
357 	0x927c, 0xffffffff, 0x00030002,
358 	0x9280, 0xffffffff, 0x00050004,
359 	0x928c, 0xffffffff, 0x00010006,
360 	0x9290, 0xffffffff, 0x00090008,
361 	0x9294, 0xffffffff, 0x00000000,
362 	0x929c, 0xffffffff, 0x00000001,
363 	0x802c, 0xffffffff, 0x40010000,
364 	0x915c, 0xffffffff, 0x00010000,
365 	0x9160, 0xffffffff, 0x00030002,
366 	0x9178, 0xffffffff, 0x00070000,
367 	0x917c, 0xffffffff, 0x00030002,
368 	0x9180, 0xffffffff, 0x00050004,
369 	0x918c, 0xffffffff, 0x00010006,
370 	0x9190, 0xffffffff, 0x00090008,
371 	0x9194, 0xffffffff, 0x00070000,
372 	0x9198, 0xffffffff, 0x00030002,
373 	0x919c, 0xffffffff, 0x00050004,
374 	0x91a8, 0xffffffff, 0x00010006,
375 	0x91ac, 0xffffffff, 0x00090008,
376 	0x91b0, 0xffffffff, 0x00070000,
377 	0x91b4, 0xffffffff, 0x00030002,
378 	0x91b8, 0xffffffff, 0x00050004,
379 	0x91c4, 0xffffffff, 0x00010006,
380 	0x91c8, 0xffffffff, 0x00090008,
381 	0x91cc, 0xffffffff, 0x00070000,
382 	0x91d0, 0xffffffff, 0x00030002,
383 	0x91d4, 0xffffffff, 0x00050004,
384 	0x91e0, 0xffffffff, 0x00010006,
385 	0x91e4, 0xffffffff, 0x00090008,
386 	0x91e8, 0xffffffff, 0x00000000,
387 	0x91ec, 0xffffffff, 0x00070000,
388 	0x91f0, 0xffffffff, 0x00030002,
389 	0x91f4, 0xffffffff, 0x00050004,
390 	0x9200, 0xffffffff, 0x00010006,
391 	0x9204, 0xffffffff, 0x00090008,
392 	0x9208, 0xffffffff, 0x00070000,
393 	0x920c, 0xffffffff, 0x00030002,
394 	0x9210, 0xffffffff, 0x00050004,
395 	0x921c, 0xffffffff, 0x00010006,
396 	0x9220, 0xffffffff, 0x00090008,
397 	0x9224, 0xffffffff, 0x00070000,
398 	0x9228, 0xffffffff, 0x00030002,
399 	0x922c, 0xffffffff, 0x00050004,
400 	0x9238, 0xffffffff, 0x00010006,
401 	0x923c, 0xffffffff, 0x00090008,
402 	0x9240, 0xffffffff, 0x00070000,
403 	0x9244, 0xffffffff, 0x00030002,
404 	0x9248, 0xffffffff, 0x00050004,
405 	0x9254, 0xffffffff, 0x00010006,
406 	0x9258, 0xffffffff, 0x00090008,
407 	0x925c, 0xffffffff, 0x00070000,
408 	0x9260, 0xffffffff, 0x00030002,
409 	0x9264, 0xffffffff, 0x00050004,
410 	0x9270, 0xffffffff, 0x00010006,
411 	0x9274, 0xffffffff, 0x00090008,
412 	0x9278, 0xffffffff, 0x00070000,
413 	0x927c, 0xffffffff, 0x00030002,
414 	0x9280, 0xffffffff, 0x00050004,
415 	0x928c, 0xffffffff, 0x00010006,
416 	0x9290, 0xffffffff, 0x00090008,
417 	0x9294, 0xffffffff, 0x00000000,
418 	0x929c, 0xffffffff, 0x00000001,
419 	0x802c, 0xffffffff, 0xc0000000
420 };
421 
422 static const u32 redwood_mgcg_init[] =
423 {
424 	0x802c, 0xffffffff, 0xc0000000,
425 	0x5448, 0xffffffff, 0x00000100,
426 	0x55e4, 0xffffffff, 0x00000100,
427 	0x160c, 0xffffffff, 0x00000100,
428 	0x5644, 0xffffffff, 0x00000100,
429 	0xc164, 0xffffffff, 0x00000100,
430 	0x8a18, 0xffffffff, 0x00000100,
431 	0x897c, 0xffffffff, 0x06000100,
432 	0x8b28, 0xffffffff, 0x00000100,
433 	0x9144, 0xffffffff, 0x00000100,
434 	0x9a60, 0xffffffff, 0x00000100,
435 	0x9868, 0xffffffff, 0x00000100,
436 	0x8d58, 0xffffffff, 0x00000100,
437 	0x9510, 0xffffffff, 0x00000100,
438 	0x949c, 0xffffffff, 0x00000100,
439 	0x9654, 0xffffffff, 0x00000100,
440 	0x9030, 0xffffffff, 0x00000100,
441 	0x9034, 0xffffffff, 0x00000100,
442 	0x9038, 0xffffffff, 0x00000100,
443 	0x903c, 0xffffffff, 0x00000100,
444 	0x9040, 0xffffffff, 0x00000100,
445 	0xa200, 0xffffffff, 0x00000100,
446 	0xa204, 0xffffffff, 0x00000100,
447 	0xa208, 0xffffffff, 0x00000100,
448 	0xa20c, 0xffffffff, 0x00000100,
449 	0x971c, 0xffffffff, 0x00000100,
450 	0x977c, 0xffffffff, 0x00000100,
451 	0x3f80, 0xffffffff, 0x00000100,
452 	0xa210, 0xffffffff, 0x00000100,
453 	0xa214, 0xffffffff, 0x00000100,
454 	0x4d8, 0xffffffff, 0x00000100,
455 	0x9784, 0xffffffff, 0x00000100,
456 	0x9698, 0xffffffff, 0x00000100,
457 	0x4d4, 0xffffffff, 0x00000200,
458 	0x30cc, 0xffffffff, 0x00000100,
459 	0xd0c0, 0xffffffff, 0xff000100,
460 	0x802c, 0xffffffff, 0x40000000,
461 	0x915c, 0xffffffff, 0x00010000,
462 	0x9160, 0xffffffff, 0x00030002,
463 	0x9178, 0xffffffff, 0x00070000,
464 	0x917c, 0xffffffff, 0x00030002,
465 	0x9180, 0xffffffff, 0x00050004,
466 	0x918c, 0xffffffff, 0x00010006,
467 	0x9190, 0xffffffff, 0x00090008,
468 	0x9194, 0xffffffff, 0x00070000,
469 	0x9198, 0xffffffff, 0x00030002,
470 	0x919c, 0xffffffff, 0x00050004,
471 	0x91a8, 0xffffffff, 0x00010006,
472 	0x91ac, 0xffffffff, 0x00090008,
473 	0x91b0, 0xffffffff, 0x00070000,
474 	0x91b4, 0xffffffff, 0x00030002,
475 	0x91b8, 0xffffffff, 0x00050004,
476 	0x91c4, 0xffffffff, 0x00010006,
477 	0x91c8, 0xffffffff, 0x00090008,
478 	0x91cc, 0xffffffff, 0x00070000,
479 	0x91d0, 0xffffffff, 0x00030002,
480 	0x91d4, 0xffffffff, 0x00050004,
481 	0x91e0, 0xffffffff, 0x00010006,
482 	0x91e4, 0xffffffff, 0x00090008,
483 	0x91e8, 0xffffffff, 0x00000000,
484 	0x91ec, 0xffffffff, 0x00070000,
485 	0x91f0, 0xffffffff, 0x00030002,
486 	0x91f4, 0xffffffff, 0x00050004,
487 	0x9200, 0xffffffff, 0x00010006,
488 	0x9204, 0xffffffff, 0x00090008,
489 	0x9294, 0xffffffff, 0x00000000,
490 	0x929c, 0xffffffff, 0x00000001,
491 	0x802c, 0xffffffff, 0xc0000000
492 };
493 
494 static const u32 cedar_golden_registers[] =
495 {
496 	0x3f90, 0xffff0000, 0xff000000,
497 	0x9148, 0xffff0000, 0xff000000,
498 	0x3f94, 0xffff0000, 0xff000000,
499 	0x914c, 0xffff0000, 0xff000000,
500 	0x9b7c, 0xffffffff, 0x00000000,
501 	0x8a14, 0xffffffff, 0x00000007,
502 	0x8b10, 0xffffffff, 0x00000000,
503 	0x960c, 0xffffffff, 0x54763210,
504 	0x88c4, 0xffffffff, 0x000000c2,
505 	0x88d4, 0xffffffff, 0x00000000,
506 	0x8974, 0xffffffff, 0x00000000,
507 	0xc78, 0x00000080, 0x00000080,
508 	0x5eb4, 0xffffffff, 0x00000002,
509 	0x5e78, 0xffffffff, 0x001000f0,
510 	0x6104, 0x01000300, 0x00000000,
511 	0x5bc0, 0x00300000, 0x00000000,
512 	0x7030, 0xffffffff, 0x00000011,
513 	0x7c30, 0xffffffff, 0x00000011,
514 	0x10830, 0xffffffff, 0x00000011,
515 	0x11430, 0xffffffff, 0x00000011,
516 	0xd02c, 0xffffffff, 0x08421000,
517 	0x240c, 0xffffffff, 0x00000380,
518 	0x8b24, 0xffffffff, 0x00ff0fff,
519 	0x28a4c, 0x06000000, 0x06000000,
520 	0x10c, 0x00000001, 0x00000001,
521 	0x8d00, 0xffffffff, 0x100e4848,
522 	0x8d04, 0xffffffff, 0x00164745,
523 	0x8c00, 0xffffffff, 0xe4000003,
524 	0x8c04, 0xffffffff, 0x40600060,
525 	0x8c08, 0xffffffff, 0x001c001c,
526 	0x8cf0, 0xffffffff, 0x08e00410,
527 	0x8c20, 0xffffffff, 0x00800080,
528 	0x8c24, 0xffffffff, 0x00800080,
529 	0x8c18, 0xffffffff, 0x20202078,
530 	0x8c1c, 0xffffffff, 0x00001010,
531 	0x28350, 0xffffffff, 0x00000000,
532 	0xa008, 0xffffffff, 0x00010000,
533 	0x5c4, 0xffffffff, 0x00000001,
534 	0x9508, 0xffffffff, 0x00000002
535 };
536 
537 static const u32 cedar_mgcg_init[] =
538 {
539 	0x802c, 0xffffffff, 0xc0000000,
540 	0x5448, 0xffffffff, 0x00000100,
541 	0x55e4, 0xffffffff, 0x00000100,
542 	0x160c, 0xffffffff, 0x00000100,
543 	0x5644, 0xffffffff, 0x00000100,
544 	0xc164, 0xffffffff, 0x00000100,
545 	0x8a18, 0xffffffff, 0x00000100,
546 	0x897c, 0xffffffff, 0x06000100,
547 	0x8b28, 0xffffffff, 0x00000100,
548 	0x9144, 0xffffffff, 0x00000100,
549 	0x9a60, 0xffffffff, 0x00000100,
550 	0x9868, 0xffffffff, 0x00000100,
551 	0x8d58, 0xffffffff, 0x00000100,
552 	0x9510, 0xffffffff, 0x00000100,
553 	0x949c, 0xffffffff, 0x00000100,
554 	0x9654, 0xffffffff, 0x00000100,
555 	0x9030, 0xffffffff, 0x00000100,
556 	0x9034, 0xffffffff, 0x00000100,
557 	0x9038, 0xffffffff, 0x00000100,
558 	0x903c, 0xffffffff, 0x00000100,
559 	0x9040, 0xffffffff, 0x00000100,
560 	0xa200, 0xffffffff, 0x00000100,
561 	0xa204, 0xffffffff, 0x00000100,
562 	0xa208, 0xffffffff, 0x00000100,
563 	0xa20c, 0xffffffff, 0x00000100,
564 	0x971c, 0xffffffff, 0x00000100,
565 	0x977c, 0xffffffff, 0x00000100,
566 	0x3f80, 0xffffffff, 0x00000100,
567 	0xa210, 0xffffffff, 0x00000100,
568 	0xa214, 0xffffffff, 0x00000100,
569 	0x4d8, 0xffffffff, 0x00000100,
570 	0x9784, 0xffffffff, 0x00000100,
571 	0x9698, 0xffffffff, 0x00000100,
572 	0x4d4, 0xffffffff, 0x00000200,
573 	0x30cc, 0xffffffff, 0x00000100,
574 	0xd0c0, 0xffffffff, 0xff000100,
575 	0x802c, 0xffffffff, 0x40000000,
576 	0x915c, 0xffffffff, 0x00010000,
577 	0x9178, 0xffffffff, 0x00050000,
578 	0x917c, 0xffffffff, 0x00030002,
579 	0x918c, 0xffffffff, 0x00010004,
580 	0x9190, 0xffffffff, 0x00070006,
581 	0x9194, 0xffffffff, 0x00050000,
582 	0x9198, 0xffffffff, 0x00030002,
583 	0x91a8, 0xffffffff, 0x00010004,
584 	0x91ac, 0xffffffff, 0x00070006,
585 	0x91e8, 0xffffffff, 0x00000000,
586 	0x9294, 0xffffffff, 0x00000000,
587 	0x929c, 0xffffffff, 0x00000001,
588 	0x802c, 0xffffffff, 0xc0000000
589 };
590 
591 static const u32 juniper_mgcg_init[] =
592 {
593 	0x802c, 0xffffffff, 0xc0000000,
594 	0x5448, 0xffffffff, 0x00000100,
595 	0x55e4, 0xffffffff, 0x00000100,
596 	0x160c, 0xffffffff, 0x00000100,
597 	0x5644, 0xffffffff, 0x00000100,
598 	0xc164, 0xffffffff, 0x00000100,
599 	0x8a18, 0xffffffff, 0x00000100,
600 	0x897c, 0xffffffff, 0x06000100,
601 	0x8b28, 0xffffffff, 0x00000100,
602 	0x9144, 0xffffffff, 0x00000100,
603 	0x9a60, 0xffffffff, 0x00000100,
604 	0x9868, 0xffffffff, 0x00000100,
605 	0x8d58, 0xffffffff, 0x00000100,
606 	0x9510, 0xffffffff, 0x00000100,
607 	0x949c, 0xffffffff, 0x00000100,
608 	0x9654, 0xffffffff, 0x00000100,
609 	0x9030, 0xffffffff, 0x00000100,
610 	0x9034, 0xffffffff, 0x00000100,
611 	0x9038, 0xffffffff, 0x00000100,
612 	0x903c, 0xffffffff, 0x00000100,
613 	0x9040, 0xffffffff, 0x00000100,
614 	0xa200, 0xffffffff, 0x00000100,
615 	0xa204, 0xffffffff, 0x00000100,
616 	0xa208, 0xffffffff, 0x00000100,
617 	0xa20c, 0xffffffff, 0x00000100,
618 	0x971c, 0xffffffff, 0x00000100,
619 	0xd0c0, 0xffffffff, 0xff000100,
620 	0x802c, 0xffffffff, 0x40000000,
621 	0x915c, 0xffffffff, 0x00010000,
622 	0x9160, 0xffffffff, 0x00030002,
623 	0x9178, 0xffffffff, 0x00070000,
624 	0x917c, 0xffffffff, 0x00030002,
625 	0x9180, 0xffffffff, 0x00050004,
626 	0x918c, 0xffffffff, 0x00010006,
627 	0x9190, 0xffffffff, 0x00090008,
628 	0x9194, 0xffffffff, 0x00070000,
629 	0x9198, 0xffffffff, 0x00030002,
630 	0x919c, 0xffffffff, 0x00050004,
631 	0x91a8, 0xffffffff, 0x00010006,
632 	0x91ac, 0xffffffff, 0x00090008,
633 	0x91b0, 0xffffffff, 0x00070000,
634 	0x91b4, 0xffffffff, 0x00030002,
635 	0x91b8, 0xffffffff, 0x00050004,
636 	0x91c4, 0xffffffff, 0x00010006,
637 	0x91c8, 0xffffffff, 0x00090008,
638 	0x91cc, 0xffffffff, 0x00070000,
639 	0x91d0, 0xffffffff, 0x00030002,
640 	0x91d4, 0xffffffff, 0x00050004,
641 	0x91e0, 0xffffffff, 0x00010006,
642 	0x91e4, 0xffffffff, 0x00090008,
643 	0x91e8, 0xffffffff, 0x00000000,
644 	0x91ec, 0xffffffff, 0x00070000,
645 	0x91f0, 0xffffffff, 0x00030002,
646 	0x91f4, 0xffffffff, 0x00050004,
647 	0x9200, 0xffffffff, 0x00010006,
648 	0x9204, 0xffffffff, 0x00090008,
649 	0x9208, 0xffffffff, 0x00070000,
650 	0x920c, 0xffffffff, 0x00030002,
651 	0x9210, 0xffffffff, 0x00050004,
652 	0x921c, 0xffffffff, 0x00010006,
653 	0x9220, 0xffffffff, 0x00090008,
654 	0x9224, 0xffffffff, 0x00070000,
655 	0x9228, 0xffffffff, 0x00030002,
656 	0x922c, 0xffffffff, 0x00050004,
657 	0x9238, 0xffffffff, 0x00010006,
658 	0x923c, 0xffffffff, 0x00090008,
659 	0x9240, 0xffffffff, 0x00070000,
660 	0x9244, 0xffffffff, 0x00030002,
661 	0x9248, 0xffffffff, 0x00050004,
662 	0x9254, 0xffffffff, 0x00010006,
663 	0x9258, 0xffffffff, 0x00090008,
664 	0x925c, 0xffffffff, 0x00070000,
665 	0x9260, 0xffffffff, 0x00030002,
666 	0x9264, 0xffffffff, 0x00050004,
667 	0x9270, 0xffffffff, 0x00010006,
668 	0x9274, 0xffffffff, 0x00090008,
669 	0x9278, 0xffffffff, 0x00070000,
670 	0x927c, 0xffffffff, 0x00030002,
671 	0x9280, 0xffffffff, 0x00050004,
672 	0x928c, 0xffffffff, 0x00010006,
673 	0x9290, 0xffffffff, 0x00090008,
674 	0x9294, 0xffffffff, 0x00000000,
675 	0x929c, 0xffffffff, 0x00000001,
676 	0x802c, 0xffffffff, 0xc0000000,
677 	0x977c, 0xffffffff, 0x00000100,
678 	0x3f80, 0xffffffff, 0x00000100,
679 	0xa210, 0xffffffff, 0x00000100,
680 	0xa214, 0xffffffff, 0x00000100,
681 	0x4d8, 0xffffffff, 0x00000100,
682 	0x9784, 0xffffffff, 0x00000100,
683 	0x9698, 0xffffffff, 0x00000100,
684 	0x4d4, 0xffffffff, 0x00000200,
685 	0x30cc, 0xffffffff, 0x00000100,
686 	0x802c, 0xffffffff, 0xc0000000
687 };
688 
689 static const u32 supersumo_golden_registers[] =
690 {
691 	0x5eb4, 0xffffffff, 0x00000002,
692 	0x5c4, 0xffffffff, 0x00000001,
693 	0x7030, 0xffffffff, 0x00000011,
694 	0x7c30, 0xffffffff, 0x00000011,
695 	0x6104, 0x01000300, 0x00000000,
696 	0x5bc0, 0x00300000, 0x00000000,
697 	0x8c04, 0xffffffff, 0x40600060,
698 	0x8c08, 0xffffffff, 0x001c001c,
699 	0x8c20, 0xffffffff, 0x00800080,
700 	0x8c24, 0xffffffff, 0x00800080,
701 	0x8c18, 0xffffffff, 0x20202078,
702 	0x8c1c, 0xffffffff, 0x00001010,
703 	0x918c, 0xffffffff, 0x00010006,
704 	0x91a8, 0xffffffff, 0x00010006,
705 	0x91c4, 0xffffffff, 0x00010006,
706 	0x91e0, 0xffffffff, 0x00010006,
707 	0x9200, 0xffffffff, 0x00010006,
708 	0x9150, 0xffffffff, 0x6e944040,
709 	0x917c, 0xffffffff, 0x00030002,
710 	0x9180, 0xffffffff, 0x00050004,
711 	0x9198, 0xffffffff, 0x00030002,
712 	0x919c, 0xffffffff, 0x00050004,
713 	0x91b4, 0xffffffff, 0x00030002,
714 	0x91b8, 0xffffffff, 0x00050004,
715 	0x91d0, 0xffffffff, 0x00030002,
716 	0x91d4, 0xffffffff, 0x00050004,
717 	0x91f0, 0xffffffff, 0x00030002,
718 	0x91f4, 0xffffffff, 0x00050004,
719 	0x915c, 0xffffffff, 0x00010000,
720 	0x9160, 0xffffffff, 0x00030002,
721 	0x3f90, 0xffff0000, 0xff000000,
722 	0x9178, 0xffffffff, 0x00070000,
723 	0x9194, 0xffffffff, 0x00070000,
724 	0x91b0, 0xffffffff, 0x00070000,
725 	0x91cc, 0xffffffff, 0x00070000,
726 	0x91ec, 0xffffffff, 0x00070000,
727 	0x9148, 0xffff0000, 0xff000000,
728 	0x9190, 0xffffffff, 0x00090008,
729 	0x91ac, 0xffffffff, 0x00090008,
730 	0x91c8, 0xffffffff, 0x00090008,
731 	0x91e4, 0xffffffff, 0x00090008,
732 	0x9204, 0xffffffff, 0x00090008,
733 	0x3f94, 0xffff0000, 0xff000000,
734 	0x914c, 0xffff0000, 0xff000000,
735 	0x929c, 0xffffffff, 0x00000001,
736 	0x8a18, 0xffffffff, 0x00000100,
737 	0x8b28, 0xffffffff, 0x00000100,
738 	0x9144, 0xffffffff, 0x00000100,
739 	0x5644, 0xffffffff, 0x00000100,
740 	0x9b7c, 0xffffffff, 0x00000000,
741 	0x8030, 0xffffffff, 0x0000100a,
742 	0x8a14, 0xffffffff, 0x00000007,
743 	0x8b24, 0xffffffff, 0x00ff0fff,
744 	0x8b10, 0xffffffff, 0x00000000,
745 	0x28a4c, 0x06000000, 0x06000000,
746 	0x4d8, 0xffffffff, 0x00000100,
747 	0x913c, 0xffff000f, 0x0100000a,
748 	0x960c, 0xffffffff, 0x54763210,
749 	0x88c4, 0xffffffff, 0x000000c2,
750 	0x88d4, 0xffffffff, 0x00000010,
751 	0x8974, 0xffffffff, 0x00000000,
752 	0xc78, 0x00000080, 0x00000080,
753 	0x5e78, 0xffffffff, 0x001000f0,
754 	0xd02c, 0xffffffff, 0x08421000,
755 	0xa008, 0xffffffff, 0x00010000,
756 	0x8d00, 0xffffffff, 0x100e4848,
757 	0x8d04, 0xffffffff, 0x00164745,
758 	0x8c00, 0xffffffff, 0xe4000003,
759 	0x8cf0, 0x1fffffff, 0x08e00620,
760 	0x28350, 0xffffffff, 0x00000000,
761 	0x9508, 0xffffffff, 0x00000002
762 };
763 
764 static const u32 sumo_golden_registers[] =
765 {
766 	0x900c, 0x00ffffff, 0x0017071f,
767 	0x8c18, 0xffffffff, 0x10101060,
768 	0x8c1c, 0xffffffff, 0x00001010,
769 	0x8c30, 0x0000000f, 0x00000005,
770 	0x9688, 0x0000000f, 0x00000007
771 };
772 
773 static const u32 wrestler_golden_registers[] =
774 {
775 	0x5eb4, 0xffffffff, 0x00000002,
776 	0x5c4, 0xffffffff, 0x00000001,
777 	0x7030, 0xffffffff, 0x00000011,
778 	0x7c30, 0xffffffff, 0x00000011,
779 	0x6104, 0x01000300, 0x00000000,
780 	0x5bc0, 0x00300000, 0x00000000,
781 	0x918c, 0xffffffff, 0x00010006,
782 	0x91a8, 0xffffffff, 0x00010006,
783 	0x9150, 0xffffffff, 0x6e944040,
784 	0x917c, 0xffffffff, 0x00030002,
785 	0x9198, 0xffffffff, 0x00030002,
786 	0x915c, 0xffffffff, 0x00010000,
787 	0x3f90, 0xffff0000, 0xff000000,
788 	0x9178, 0xffffffff, 0x00070000,
789 	0x9194, 0xffffffff, 0x00070000,
790 	0x9148, 0xffff0000, 0xff000000,
791 	0x9190, 0xffffffff, 0x00090008,
792 	0x91ac, 0xffffffff, 0x00090008,
793 	0x3f94, 0xffff0000, 0xff000000,
794 	0x914c, 0xffff0000, 0xff000000,
795 	0x929c, 0xffffffff, 0x00000001,
796 	0x8a18, 0xffffffff, 0x00000100,
797 	0x8b28, 0xffffffff, 0x00000100,
798 	0x9144, 0xffffffff, 0x00000100,
799 	0x9b7c, 0xffffffff, 0x00000000,
800 	0x8030, 0xffffffff, 0x0000100a,
801 	0x8a14, 0xffffffff, 0x00000001,
802 	0x8b24, 0xffffffff, 0x00ff0fff,
803 	0x8b10, 0xffffffff, 0x00000000,
804 	0x28a4c, 0x06000000, 0x06000000,
805 	0x4d8, 0xffffffff, 0x00000100,
806 	0x913c, 0xffff000f, 0x0100000a,
807 	0x960c, 0xffffffff, 0x54763210,
808 	0x88c4, 0xffffffff, 0x000000c2,
809 	0x88d4, 0xffffffff, 0x00000010,
810 	0x8974, 0xffffffff, 0x00000000,
811 	0xc78, 0x00000080, 0x00000080,
812 	0x5e78, 0xffffffff, 0x001000f0,
813 	0xd02c, 0xffffffff, 0x08421000,
814 	0xa008, 0xffffffff, 0x00010000,
815 	0x8d00, 0xffffffff, 0x100e4848,
816 	0x8d04, 0xffffffff, 0x00164745,
817 	0x8c00, 0xffffffff, 0xe4000003,
818 	0x8cf0, 0x1fffffff, 0x08e00410,
819 	0x28350, 0xffffffff, 0x00000000,
820 	0x9508, 0xffffffff, 0x00000002,
821 	0x900c, 0xffffffff, 0x0017071f,
822 	0x8c18, 0xffffffff, 0x10101060,
823 	0x8c1c, 0xffffffff, 0x00001010
824 };
825 
826 static const u32 barts_golden_registers[] =
827 {
828 	0x5eb4, 0xffffffff, 0x00000002,
829 	0x5e78, 0x8f311ff1, 0x001000f0,
830 	0x3f90, 0xffff0000, 0xff000000,
831 	0x9148, 0xffff0000, 0xff000000,
832 	0x3f94, 0xffff0000, 0xff000000,
833 	0x914c, 0xffff0000, 0xff000000,
834 	0xc78, 0x00000080, 0x00000080,
835 	0xbd4, 0x70073777, 0x00010001,
836 	0xd02c, 0xbfffff1f, 0x08421000,
837 	0xd0b8, 0x03773777, 0x02011003,
838 	0x5bc0, 0x00200000, 0x50100000,
839 	0x98f8, 0x33773777, 0x02011003,
840 	0x98fc, 0xffffffff, 0x76543210,
841 	0x7030, 0x31000311, 0x00000011,
842 	0x2f48, 0x00000007, 0x02011003,
843 	0x6b28, 0x00000010, 0x00000012,
844 	0x7728, 0x00000010, 0x00000012,
845 	0x10328, 0x00000010, 0x00000012,
846 	0x10f28, 0x00000010, 0x00000012,
847 	0x11b28, 0x00000010, 0x00000012,
848 	0x12728, 0x00000010, 0x00000012,
849 	0x240c, 0x000007ff, 0x00000380,
850 	0x8a14, 0xf000001f, 0x00000007,
851 	0x8b24, 0x3fff3fff, 0x00ff0fff,
852 	0x8b10, 0x0000ff0f, 0x00000000,
853 	0x28a4c, 0x07ffffff, 0x06000000,
854 	0x10c, 0x00000001, 0x00010003,
855 	0xa02c, 0xffffffff, 0x0000009b,
856 	0x913c, 0x0000000f, 0x0100000a,
857 	0x8d00, 0xffff7f7f, 0x100e4848,
858 	0x8d04, 0x00ffffff, 0x00164745,
859 	0x8c00, 0xfffc0003, 0xe4000003,
860 	0x8c04, 0xf8ff00ff, 0x40600060,
861 	0x8c08, 0x00ff00ff, 0x001c001c,
862 	0x8cf0, 0x1fff1fff, 0x08e00620,
863 	0x8c20, 0x0fff0fff, 0x00800080,
864 	0x8c24, 0x0fff0fff, 0x00800080,
865 	0x8c18, 0xffffffff, 0x20202078,
866 	0x8c1c, 0x0000ffff, 0x00001010,
867 	0x28350, 0x00000f01, 0x00000000,
868 	0x9508, 0x3700001f, 0x00000002,
869 	0x960c, 0xffffffff, 0x54763210,
870 	0x88c4, 0x001f3ae3, 0x000000c2,
871 	0x88d4, 0x0000001f, 0x00000010,
872 	0x8974, 0xffffffff, 0x00000000
873 };
874 
875 static const u32 turks_golden_registers[] =
876 {
877 	0x5eb4, 0xffffffff, 0x00000002,
878 	0x5e78, 0x8f311ff1, 0x001000f0,
879 	0x8c8, 0x00003000, 0x00001070,
880 	0x8cc, 0x000fffff, 0x00040035,
881 	0x3f90, 0xffff0000, 0xfff00000,
882 	0x9148, 0xffff0000, 0xfff00000,
883 	0x3f94, 0xffff0000, 0xfff00000,
884 	0x914c, 0xffff0000, 0xfff00000,
885 	0xc78, 0x00000080, 0x00000080,
886 	0xbd4, 0x00073007, 0x00010002,
887 	0xd02c, 0xbfffff1f, 0x08421000,
888 	0xd0b8, 0x03773777, 0x02010002,
889 	0x5bc0, 0x00200000, 0x50100000,
890 	0x98f8, 0x33773777, 0x00010002,
891 	0x98fc, 0xffffffff, 0x33221100,
892 	0x7030, 0x31000311, 0x00000011,
893 	0x2f48, 0x33773777, 0x00010002,
894 	0x6b28, 0x00000010, 0x00000012,
895 	0x7728, 0x00000010, 0x00000012,
896 	0x10328, 0x00000010, 0x00000012,
897 	0x10f28, 0x00000010, 0x00000012,
898 	0x11b28, 0x00000010, 0x00000012,
899 	0x12728, 0x00000010, 0x00000012,
900 	0x240c, 0x000007ff, 0x00000380,
901 	0x8a14, 0xf000001f, 0x00000007,
902 	0x8b24, 0x3fff3fff, 0x00ff0fff,
903 	0x8b10, 0x0000ff0f, 0x00000000,
904 	0x28a4c, 0x07ffffff, 0x06000000,
905 	0x10c, 0x00000001, 0x00010003,
906 	0xa02c, 0xffffffff, 0x0000009b,
907 	0x913c, 0x0000000f, 0x0100000a,
908 	0x8d00, 0xffff7f7f, 0x100e4848,
909 	0x8d04, 0x00ffffff, 0x00164745,
910 	0x8c00, 0xfffc0003, 0xe4000003,
911 	0x8c04, 0xf8ff00ff, 0x40600060,
912 	0x8c08, 0x00ff00ff, 0x001c001c,
913 	0x8cf0, 0x1fff1fff, 0x08e00410,
914 	0x8c20, 0x0fff0fff, 0x00800080,
915 	0x8c24, 0x0fff0fff, 0x00800080,
916 	0x8c18, 0xffffffff, 0x20202078,
917 	0x8c1c, 0x0000ffff, 0x00001010,
918 	0x28350, 0x00000f01, 0x00000000,
919 	0x9508, 0x3700001f, 0x00000002,
920 	0x960c, 0xffffffff, 0x54763210,
921 	0x88c4, 0x001f3ae3, 0x000000c2,
922 	0x88d4, 0x0000001f, 0x00000010,
923 	0x8974, 0xffffffff, 0x00000000
924 };
925 
926 static const u32 caicos_golden_registers[] =
927 {
928 	0x5eb4, 0xffffffff, 0x00000002,
929 	0x5e78, 0x8f311ff1, 0x001000f0,
930 	0x8c8, 0x00003420, 0x00001450,
931 	0x8cc, 0x000fffff, 0x00040035,
932 	0x3f90, 0xffff0000, 0xfffc0000,
933 	0x9148, 0xffff0000, 0xfffc0000,
934 	0x3f94, 0xffff0000, 0xfffc0000,
935 	0x914c, 0xffff0000, 0xfffc0000,
936 	0xc78, 0x00000080, 0x00000080,
937 	0xbd4, 0x00073007, 0x00010001,
938 	0xd02c, 0xbfffff1f, 0x08421000,
939 	0xd0b8, 0x03773777, 0x02010001,
940 	0x5bc0, 0x00200000, 0x50100000,
941 	0x98f8, 0x33773777, 0x02010001,
942 	0x98fc, 0xffffffff, 0x33221100,
943 	0x7030, 0x31000311, 0x00000011,
944 	0x2f48, 0x33773777, 0x02010001,
945 	0x6b28, 0x00000010, 0x00000012,
946 	0x7728, 0x00000010, 0x00000012,
947 	0x10328, 0x00000010, 0x00000012,
948 	0x10f28, 0x00000010, 0x00000012,
949 	0x11b28, 0x00000010, 0x00000012,
950 	0x12728, 0x00000010, 0x00000012,
951 	0x240c, 0x000007ff, 0x00000380,
952 	0x8a14, 0xf000001f, 0x00000001,
953 	0x8b24, 0x3fff3fff, 0x00ff0fff,
954 	0x8b10, 0x0000ff0f, 0x00000000,
955 	0x28a4c, 0x07ffffff, 0x06000000,
956 	0x10c, 0x00000001, 0x00010003,
957 	0xa02c, 0xffffffff, 0x0000009b,
958 	0x913c, 0x0000000f, 0x0100000a,
959 	0x8d00, 0xffff7f7f, 0x100e4848,
960 	0x8d04, 0x00ffffff, 0x00164745,
961 	0x8c00, 0xfffc0003, 0xe4000003,
962 	0x8c04, 0xf8ff00ff, 0x40600060,
963 	0x8c08, 0x00ff00ff, 0x001c001c,
964 	0x8cf0, 0x1fff1fff, 0x08e00410,
965 	0x8c20, 0x0fff0fff, 0x00800080,
966 	0x8c24, 0x0fff0fff, 0x00800080,
967 	0x8c18, 0xffffffff, 0x20202078,
968 	0x8c1c, 0x0000ffff, 0x00001010,
969 	0x28350, 0x00000f01, 0x00000000,
970 	0x9508, 0x3700001f, 0x00000002,
971 	0x960c, 0xffffffff, 0x54763210,
972 	0x88c4, 0x001f3ae3, 0x000000c2,
973 	0x88d4, 0x0000001f, 0x00000010,
974 	0x8974, 0xffffffff, 0x00000000
975 };
976 
977 static void evergreen_init_golden_registers(struct radeon_device *rdev)
978 {
979 	switch (rdev->family) {
980 	case CHIP_CYPRESS:
981 	case CHIP_HEMLOCK:
982 		radeon_program_register_sequence(rdev,
983 						 evergreen_golden_registers,
984 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
985 		radeon_program_register_sequence(rdev,
986 						 evergreen_golden_registers2,
987 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
988 		radeon_program_register_sequence(rdev,
989 						 cypress_mgcg_init,
990 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
991 		break;
992 	case CHIP_JUNIPER:
993 		radeon_program_register_sequence(rdev,
994 						 evergreen_golden_registers,
995 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
996 		radeon_program_register_sequence(rdev,
997 						 evergreen_golden_registers2,
998 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
999 		radeon_program_register_sequence(rdev,
1000 						 juniper_mgcg_init,
1001 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1002 		break;
1003 	case CHIP_REDWOOD:
1004 		radeon_program_register_sequence(rdev,
1005 						 evergreen_golden_registers,
1006 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1007 		radeon_program_register_sequence(rdev,
1008 						 evergreen_golden_registers2,
1009 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1010 		radeon_program_register_sequence(rdev,
1011 						 redwood_mgcg_init,
1012 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1013 		break;
1014 	case CHIP_CEDAR:
1015 		radeon_program_register_sequence(rdev,
1016 						 cedar_golden_registers,
1017 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1018 		radeon_program_register_sequence(rdev,
1019 						 evergreen_golden_registers2,
1020 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1021 		radeon_program_register_sequence(rdev,
1022 						 cedar_mgcg_init,
1023 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1024 		break;
1025 	case CHIP_PALM:
1026 		radeon_program_register_sequence(rdev,
1027 						 wrestler_golden_registers,
1028 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1029 		break;
1030 	case CHIP_SUMO:
1031 		radeon_program_register_sequence(rdev,
1032 						 supersumo_golden_registers,
1033 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1034 		break;
1035 	case CHIP_SUMO2:
1036 		radeon_program_register_sequence(rdev,
1037 						 supersumo_golden_registers,
1038 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1039 		radeon_program_register_sequence(rdev,
1040 						 sumo_golden_registers,
1041 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1042 		break;
1043 	case CHIP_BARTS:
1044 		radeon_program_register_sequence(rdev,
1045 						 barts_golden_registers,
1046 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1047 		break;
1048 	case CHIP_TURKS:
1049 		radeon_program_register_sequence(rdev,
1050 						 turks_golden_registers,
1051 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1052 		break;
1053 	case CHIP_CAICOS:
1054 		radeon_program_register_sequence(rdev,
1055 						 caicos_golden_registers,
1056 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1057 		break;
1058 	default:
1059 		break;
1060 	}
1061 }
1062 
1063 /**
1064  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1065  *
1066  * @rdev: radeon_device pointer
1067  * @reg: register offset in bytes
1068  * @val: register value
1069  *
1070  * Returns 0 for success or -EINVAL for an invalid register
1071  *
1072  */
1073 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1074 					u32 reg, u32 *val)
1075 {
1076 	switch (reg) {
1077 	case GRBM_STATUS:
1078 	case GRBM_STATUS_SE0:
1079 	case GRBM_STATUS_SE1:
1080 	case SRBM_STATUS:
1081 	case SRBM_STATUS2:
1082 	case DMA_STATUS_REG:
1083 	case UVD_STATUS:
1084 		*val = RREG32(reg);
1085 		return 0;
1086 	default:
1087 		return -EINVAL;
1088 	}
1089 }
1090 
1091 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1092 			     unsigned *bankh, unsigned *mtaspect,
1093 			     unsigned *tile_split)
1094 {
1095 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1096 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1097 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1098 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1099 	switch (*bankw) {
1100 	default:
1101 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1102 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1103 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1104 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1105 	}
1106 	switch (*bankh) {
1107 	default:
1108 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1109 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1110 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1111 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1112 	}
1113 	switch (*mtaspect) {
1114 	default:
1115 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1116 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1117 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1118 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1119 	}
1120 }
1121 
1122 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1123 			      u32 cntl_reg, u32 status_reg)
1124 {
1125 	int r, i;
1126 	struct atom_clock_dividers dividers;
1127 
1128         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1129 					   clock, false, &dividers);
1130 	if (r)
1131 		return r;
1132 
1133 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1134 
1135 	for (i = 0; i < 100; i++) {
1136 		if (RREG32(status_reg) & DCLK_STATUS)
1137 			break;
1138 		mdelay(10);
1139 	}
1140 	if (i == 100)
1141 		return -ETIMEDOUT;
1142 
1143 	return 0;
1144 }
1145 
1146 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1147 {
1148 	int r = 0;
1149 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1150 
1151 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1152 	if (r)
1153 		goto done;
1154 	cg_scratch &= 0xffff0000;
1155 	cg_scratch |= vclk / 100; /* Mhz */
1156 
1157 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1158 	if (r)
1159 		goto done;
1160 	cg_scratch &= 0x0000ffff;
1161 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1162 
1163 done:
1164 	WREG32(CG_SCRATCH1, cg_scratch);
1165 
1166 	return r;
1167 }
1168 
1169 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1170 {
1171 	/* start off with something large */
1172 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1173 	int r;
1174 
1175 	/* bypass vclk and dclk with bclk */
1176 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1177 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1178 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1179 
1180 	/* put PLL in bypass mode */
1181 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1182 
1183 	if (!vclk || !dclk) {
1184 		/* keep the Bypass mode, put PLL to sleep */
1185 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1186 		return 0;
1187 	}
1188 
1189 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1190 					  16384, 0x03FFFFFF, 0, 128, 5,
1191 					  &fb_div, &vclk_div, &dclk_div);
1192 	if (r)
1193 		return r;
1194 
1195 	/* set VCO_MODE to 1 */
1196 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1197 
1198 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1199 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1200 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1201 
1202 	/* deassert UPLL_RESET */
1203 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1204 
1205 	mdelay(1);
1206 
1207 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1208 	if (r)
1209 		return r;
1210 
1211 	/* assert UPLL_RESET again */
1212 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1213 
1214 	/* disable spread spectrum. */
1215 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1216 
1217 	/* set feedback divider */
1218 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1219 
1220 	/* set ref divider to 0 */
1221 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1222 
1223 	if (fb_div < 307200)
1224 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1225 	else
1226 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1227 
1228 	/* set PDIV_A and PDIV_B */
1229 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1230 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1231 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1232 
1233 	/* give the PLL some time to settle */
1234 	mdelay(15);
1235 
1236 	/* deassert PLL_RESET */
1237 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1238 
1239 	mdelay(15);
1240 
1241 	/* switch from bypass mode to normal mode */
1242 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1243 
1244 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1245 	if (r)
1246 		return r;
1247 
1248 	/* switch VCLK and DCLK selection */
1249 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1250 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1251 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1252 
1253 	mdelay(100);
1254 
1255 	return 0;
1256 }
1257 
1258 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1259 {
1260 	int readrq;
1261 	u16 v;
1262 
1263 	readrq = pcie_get_readrq(rdev->pdev);
1264 	v = ffs(readrq) - 8;
1265 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1266 	 * to avoid hangs or perfomance issues
1267 	 */
1268 	if ((v == 0) || (v == 6) || (v == 7))
1269 		pcie_set_readrq(rdev->pdev, 512);
1270 }
1271 
1272 void dce4_program_fmt(struct drm_encoder *encoder)
1273 {
1274 	struct drm_device *dev = encoder->dev;
1275 	struct radeon_device *rdev = dev->dev_private;
1276 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1277 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1278 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1279 	int bpc = 0;
1280 	u32 tmp = 0;
1281 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1282 
1283 	if (connector) {
1284 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1285 		bpc = radeon_get_monitor_bpc(connector);
1286 		dither = radeon_connector->dither;
1287 	}
1288 
1289 	/* LVDS/eDP FMT is set up by atom */
1290 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1291 		return;
1292 
1293 	/* not needed for analog */
1294 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1295 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1296 		return;
1297 
1298 	if (bpc == 0)
1299 		return;
1300 
1301 	switch (bpc) {
1302 	case 6:
1303 		if (dither == RADEON_FMT_DITHER_ENABLE)
1304 			/* XXX sort out optimal dither settings */
1305 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1306 				FMT_SPATIAL_DITHER_EN);
1307 		else
1308 			tmp |= FMT_TRUNCATE_EN;
1309 		break;
1310 	case 8:
1311 		if (dither == RADEON_FMT_DITHER_ENABLE)
1312 			/* XXX sort out optimal dither settings */
1313 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1314 				FMT_RGB_RANDOM_ENABLE |
1315 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1316 		else
1317 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1318 		break;
1319 	case 10:
1320 	default:
1321 		/* not needed */
1322 		break;
1323 	}
1324 
1325 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1326 }
1327 
1328 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1329 {
1330 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1331 		return true;
1332 	else
1333 		return false;
1334 }
1335 
1336 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1337 {
1338 	u32 pos1, pos2;
1339 
1340 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1341 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1342 
1343 	if (pos1 != pos2)
1344 		return true;
1345 	else
1346 		return false;
1347 }
1348 
1349 /**
1350  * dce4_wait_for_vblank - vblank wait asic callback.
1351  *
1352  * @rdev: radeon_device pointer
1353  * @crtc: crtc to wait for vblank on
1354  *
1355  * Wait for vblank on the requested crtc (evergreen+).
1356  */
1357 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1358 {
1359 	unsigned i = 0;
1360 
1361 	if (crtc >= rdev->num_crtc)
1362 		return;
1363 
1364 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1365 		return;
1366 
1367 	/* depending on when we hit vblank, we may be close to active; if so,
1368 	 * wait for another frame.
1369 	 */
1370 	while (dce4_is_in_vblank(rdev, crtc)) {
1371 		if (i++ % 100 == 0) {
1372 			if (!dce4_is_counter_moving(rdev, crtc))
1373 				break;
1374 		}
1375 	}
1376 
1377 	while (!dce4_is_in_vblank(rdev, crtc)) {
1378 		if (i++ % 100 == 0) {
1379 			if (!dce4_is_counter_moving(rdev, crtc))
1380 				break;
1381 		}
1382 	}
1383 }
1384 
1385 /**
1386  * evergreen_page_flip - pageflip callback.
1387  *
1388  * @rdev: radeon_device pointer
1389  * @crtc_id: crtc to cleanup pageflip on
1390  * @crtc_base: new address of the crtc (GPU MC address)
1391  *
1392  * Triggers the actual pageflip by updating the primary
1393  * surface base address (evergreen+).
1394  */
1395 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1396 			 bool async)
1397 {
1398 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1399 
1400 	/* update the scanout addresses */
1401 	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1402 	       async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1403 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1404 	       upper_32_bits(crtc_base));
1405 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1406 	       (u32)crtc_base);
1407 	/* post the write */
1408 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1409 }
1410 
1411 /**
1412  * evergreen_page_flip_pending - check if page flip is still pending
1413  *
1414  * @rdev: radeon_device pointer
1415  * @crtc_id: crtc to check
1416  *
1417  * Returns the current update pending status.
1418  */
1419 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1420 {
1421 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1422 
1423 	/* Return current update_pending status: */
1424 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1425 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1426 }
1427 
1428 /* get temperature in millidegrees */
1429 int evergreen_get_temp(struct radeon_device *rdev)
1430 {
1431 	u32 temp, toffset;
1432 	int actual_temp = 0;
1433 
1434 	if (rdev->family == CHIP_JUNIPER) {
1435 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1436 			TOFFSET_SHIFT;
1437 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1438 			TS0_ADC_DOUT_SHIFT;
1439 
1440 		if (toffset & 0x100)
1441 			actual_temp = temp / 2 - (0x200 - toffset);
1442 		else
1443 			actual_temp = temp / 2 + toffset;
1444 
1445 		actual_temp = actual_temp * 1000;
1446 
1447 	} else {
1448 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1449 			ASIC_T_SHIFT;
1450 
1451 		if (temp & 0x400)
1452 			actual_temp = -256;
1453 		else if (temp & 0x200)
1454 			actual_temp = 255;
1455 		else if (temp & 0x100) {
1456 			actual_temp = temp & 0x1ff;
1457 			actual_temp |= ~0x1ff;
1458 		} else
1459 			actual_temp = temp & 0xff;
1460 
1461 		actual_temp = (actual_temp * 1000) / 2;
1462 	}
1463 
1464 	return actual_temp;
1465 }
1466 
1467 int sumo_get_temp(struct radeon_device *rdev)
1468 {
1469 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1470 	int actual_temp = temp - 49;
1471 
1472 	return actual_temp * 1000;
1473 }
1474 
1475 /**
1476  * sumo_pm_init_profile - Initialize power profiles callback.
1477  *
1478  * @rdev: radeon_device pointer
1479  *
1480  * Initialize the power states used in profile mode
1481  * (sumo, trinity, SI).
1482  * Used for profile mode only.
1483  */
1484 void sumo_pm_init_profile(struct radeon_device *rdev)
1485 {
1486 	int idx;
1487 
1488 	/* default */
1489 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1490 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1491 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1492 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1493 
1494 	/* low,mid sh/mh */
1495 	if (rdev->flags & RADEON_IS_MOBILITY)
1496 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1497 	else
1498 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1499 
1500 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1501 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1502 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1503 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1504 
1505 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1506 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1507 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1508 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1509 
1510 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1511 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1512 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1513 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1514 
1515 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1516 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1517 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1518 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1519 
1520 	/* high sh/mh */
1521 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1522 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1523 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1524 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1525 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1526 		rdev->pm.power_state[idx].num_clock_modes - 1;
1527 
1528 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1529 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1530 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1531 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1532 		rdev->pm.power_state[idx].num_clock_modes - 1;
1533 }
1534 
1535 /**
1536  * btc_pm_init_profile - Initialize power profiles callback.
1537  *
1538  * @rdev: radeon_device pointer
1539  *
1540  * Initialize the power states used in profile mode
1541  * (BTC, cayman).
1542  * Used for profile mode only.
1543  */
1544 void btc_pm_init_profile(struct radeon_device *rdev)
1545 {
1546 	int idx;
1547 
1548 	/* default */
1549 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1550 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1551 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1552 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1553 	/* starting with BTC, there is one state that is used for both
1554 	 * MH and SH.  Difference is that we always use the high clock index for
1555 	 * mclk.
1556 	 */
1557 	if (rdev->flags & RADEON_IS_MOBILITY)
1558 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1559 	else
1560 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1561 	/* low sh */
1562 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1563 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1564 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1565 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1566 	/* mid sh */
1567 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1568 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1569 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1570 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1571 	/* high sh */
1572 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1573 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1574 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1575 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1576 	/* low mh */
1577 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1578 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1579 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1580 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1581 	/* mid mh */
1582 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1583 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1584 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1585 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1586 	/* high mh */
1587 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1588 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1589 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1590 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1591 }
1592 
1593 /**
1594  * evergreen_pm_misc - set additional pm hw parameters callback.
1595  *
1596  * @rdev: radeon_device pointer
1597  *
1598  * Set non-clock parameters associated with a power state
1599  * (voltage, etc.) (evergreen+).
1600  */
1601 void evergreen_pm_misc(struct radeon_device *rdev)
1602 {
1603 	int req_ps_idx = rdev->pm.requested_power_state_index;
1604 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1605 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1606 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1607 
1608 	if (voltage->type == VOLTAGE_SW) {
1609 		/* 0xff0x are flags rather then an actual voltage */
1610 		if ((voltage->voltage & 0xff00) == 0xff00)
1611 			return;
1612 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1613 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1614 			rdev->pm.current_vddc = voltage->voltage;
1615 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1616 		}
1617 
1618 		/* starting with BTC, there is one state that is used for both
1619 		 * MH and SH.  Difference is that we always use the high clock index for
1620 		 * mclk and vddci.
1621 		 */
1622 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1623 		    (rdev->family >= CHIP_BARTS) &&
1624 		    rdev->pm.active_crtc_count &&
1625 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1626 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1627 			voltage = &rdev->pm.power_state[req_ps_idx].
1628 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1629 
1630 		/* 0xff0x are flags rather then an actual voltage */
1631 		if ((voltage->vddci & 0xff00) == 0xff00)
1632 			return;
1633 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1634 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1635 			rdev->pm.current_vddci = voltage->vddci;
1636 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1637 		}
1638 	}
1639 }
1640 
1641 /**
1642  * evergreen_pm_prepare - pre-power state change callback.
1643  *
1644  * @rdev: radeon_device pointer
1645  *
1646  * Prepare for a power state change (evergreen+).
1647  */
1648 void evergreen_pm_prepare(struct radeon_device *rdev)
1649 {
1650 	struct drm_device *ddev = rdev->ddev;
1651 	struct drm_crtc *crtc;
1652 	struct radeon_crtc *radeon_crtc;
1653 	u32 tmp;
1654 
1655 	/* disable any active CRTCs */
1656 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1657 		radeon_crtc = to_radeon_crtc(crtc);
1658 		if (radeon_crtc->enabled) {
1659 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1660 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1661 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1662 		}
1663 	}
1664 }
1665 
1666 /**
1667  * evergreen_pm_finish - post-power state change callback.
1668  *
1669  * @rdev: radeon_device pointer
1670  *
1671  * Clean up after a power state change (evergreen+).
1672  */
1673 void evergreen_pm_finish(struct radeon_device *rdev)
1674 {
1675 	struct drm_device *ddev = rdev->ddev;
1676 	struct drm_crtc *crtc;
1677 	struct radeon_crtc *radeon_crtc;
1678 	u32 tmp;
1679 
1680 	/* enable any active CRTCs */
1681 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1682 		radeon_crtc = to_radeon_crtc(crtc);
1683 		if (radeon_crtc->enabled) {
1684 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1685 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1686 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1687 		}
1688 	}
1689 }
1690 
1691 /**
1692  * evergreen_hpd_sense - hpd sense callback.
1693  *
1694  * @rdev: radeon_device pointer
1695  * @hpd: hpd (hotplug detect) pin
1696  *
1697  * Checks if a digital monitor is connected (evergreen+).
1698  * Returns true if connected, false if not connected.
1699  */
1700 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1701 {
1702 	bool connected = false;
1703 
1704 	switch (hpd) {
1705 	case RADEON_HPD_1:
1706 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1707 			connected = true;
1708 		break;
1709 	case RADEON_HPD_2:
1710 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1711 			connected = true;
1712 		break;
1713 	case RADEON_HPD_3:
1714 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1715 			connected = true;
1716 		break;
1717 	case RADEON_HPD_4:
1718 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1719 			connected = true;
1720 		break;
1721 	case RADEON_HPD_5:
1722 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1723 			connected = true;
1724 		break;
1725 	case RADEON_HPD_6:
1726 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1727 			connected = true;
1728 		break;
1729 	default:
1730 		break;
1731 	}
1732 
1733 	return connected;
1734 }
1735 
1736 /**
1737  * evergreen_hpd_set_polarity - hpd set polarity callback.
1738  *
1739  * @rdev: radeon_device pointer
1740  * @hpd: hpd (hotplug detect) pin
1741  *
1742  * Set the polarity of the hpd pin (evergreen+).
1743  */
1744 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1745 				enum radeon_hpd_id hpd)
1746 {
1747 	u32 tmp;
1748 	bool connected = evergreen_hpd_sense(rdev, hpd);
1749 
1750 	switch (hpd) {
1751 	case RADEON_HPD_1:
1752 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1753 		if (connected)
1754 			tmp &= ~DC_HPDx_INT_POLARITY;
1755 		else
1756 			tmp |= DC_HPDx_INT_POLARITY;
1757 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1758 		break;
1759 	case RADEON_HPD_2:
1760 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1761 		if (connected)
1762 			tmp &= ~DC_HPDx_INT_POLARITY;
1763 		else
1764 			tmp |= DC_HPDx_INT_POLARITY;
1765 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1766 		break;
1767 	case RADEON_HPD_3:
1768 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1769 		if (connected)
1770 			tmp &= ~DC_HPDx_INT_POLARITY;
1771 		else
1772 			tmp |= DC_HPDx_INT_POLARITY;
1773 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1774 		break;
1775 	case RADEON_HPD_4:
1776 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1777 		if (connected)
1778 			tmp &= ~DC_HPDx_INT_POLARITY;
1779 		else
1780 			tmp |= DC_HPDx_INT_POLARITY;
1781 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1782 		break;
1783 	case RADEON_HPD_5:
1784 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1785 		if (connected)
1786 			tmp &= ~DC_HPDx_INT_POLARITY;
1787 		else
1788 			tmp |= DC_HPDx_INT_POLARITY;
1789 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1790 			break;
1791 	case RADEON_HPD_6:
1792 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1793 		if (connected)
1794 			tmp &= ~DC_HPDx_INT_POLARITY;
1795 		else
1796 			tmp |= DC_HPDx_INT_POLARITY;
1797 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1798 		break;
1799 	default:
1800 		break;
1801 	}
1802 }
1803 
1804 /**
1805  * evergreen_hpd_init - hpd setup callback.
1806  *
1807  * @rdev: radeon_device pointer
1808  *
1809  * Setup the hpd pins used by the card (evergreen+).
1810  * Enable the pin, set the polarity, and enable the hpd interrupts.
1811  */
1812 void evergreen_hpd_init(struct radeon_device *rdev)
1813 {
1814 	struct drm_device *dev = rdev->ddev;
1815 	struct drm_connector *connector;
1816 	unsigned enabled = 0;
1817 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1818 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1819 
1820 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1821 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1822 
1823 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1824 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1825 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1826 			 * aux dp channel on imac and help (but not completely fix)
1827 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1828 			 * also avoid interrupt storms during dpms.
1829 			 */
1830 			continue;
1831 		}
1832 		switch (radeon_connector->hpd.hpd) {
1833 		case RADEON_HPD_1:
1834 			WREG32(DC_HPD1_CONTROL, tmp);
1835 			break;
1836 		case RADEON_HPD_2:
1837 			WREG32(DC_HPD2_CONTROL, tmp);
1838 			break;
1839 		case RADEON_HPD_3:
1840 			WREG32(DC_HPD3_CONTROL, tmp);
1841 			break;
1842 		case RADEON_HPD_4:
1843 			WREG32(DC_HPD4_CONTROL, tmp);
1844 			break;
1845 		case RADEON_HPD_5:
1846 			WREG32(DC_HPD5_CONTROL, tmp);
1847 			break;
1848 		case RADEON_HPD_6:
1849 			WREG32(DC_HPD6_CONTROL, tmp);
1850 			break;
1851 		default:
1852 			break;
1853 		}
1854 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1855 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1856 			enabled |= 1 << radeon_connector->hpd.hpd;
1857 	}
1858 	radeon_irq_kms_enable_hpd(rdev, enabled);
1859 }
1860 
1861 /**
1862  * evergreen_hpd_fini - hpd tear down callback.
1863  *
1864  * @rdev: radeon_device pointer
1865  *
1866  * Tear down the hpd pins used by the card (evergreen+).
1867  * Disable the hpd interrupts.
1868  */
1869 void evergreen_hpd_fini(struct radeon_device *rdev)
1870 {
1871 	struct drm_device *dev = rdev->ddev;
1872 	struct drm_connector *connector;
1873 	unsigned disabled = 0;
1874 
1875 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1876 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1877 		switch (radeon_connector->hpd.hpd) {
1878 		case RADEON_HPD_1:
1879 			WREG32(DC_HPD1_CONTROL, 0);
1880 			break;
1881 		case RADEON_HPD_2:
1882 			WREG32(DC_HPD2_CONTROL, 0);
1883 			break;
1884 		case RADEON_HPD_3:
1885 			WREG32(DC_HPD3_CONTROL, 0);
1886 			break;
1887 		case RADEON_HPD_4:
1888 			WREG32(DC_HPD4_CONTROL, 0);
1889 			break;
1890 		case RADEON_HPD_5:
1891 			WREG32(DC_HPD5_CONTROL, 0);
1892 			break;
1893 		case RADEON_HPD_6:
1894 			WREG32(DC_HPD6_CONTROL, 0);
1895 			break;
1896 		default:
1897 			break;
1898 		}
1899 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1900 			disabled |= 1 << radeon_connector->hpd.hpd;
1901 	}
1902 	radeon_irq_kms_disable_hpd(rdev, disabled);
1903 }
1904 
1905 /* watermark setup */
1906 
1907 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1908 					struct radeon_crtc *radeon_crtc,
1909 					struct drm_display_mode *mode,
1910 					struct drm_display_mode *other_mode)
1911 {
1912 	u32 tmp, buffer_alloc, i;
1913 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1914 	/*
1915 	 * Line Buffer Setup
1916 	 * There are 3 line buffers, each one shared by 2 display controllers.
1917 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1918 	 * the display controllers.  The paritioning is done via one of four
1919 	 * preset allocations specified in bits 2:0:
1920 	 * first display controller
1921 	 *  0 - first half of lb (3840 * 2)
1922 	 *  1 - first 3/4 of lb (5760 * 2)
1923 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1924 	 *  3 - first 1/4 of lb (1920 * 2)
1925 	 * second display controller
1926 	 *  4 - second half of lb (3840 * 2)
1927 	 *  5 - second 3/4 of lb (5760 * 2)
1928 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1929 	 *  7 - last 1/4 of lb (1920 * 2)
1930 	 */
1931 	/* this can get tricky if we have two large displays on a paired group
1932 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1933 	 * non-linked crtcs for maximum line buffer allocation.
1934 	 */
1935 	if (radeon_crtc->base.enabled && mode) {
1936 		if (other_mode) {
1937 			tmp = 0; /* 1/2 */
1938 			buffer_alloc = 1;
1939 		} else {
1940 			tmp = 2; /* whole */
1941 			buffer_alloc = 2;
1942 		}
1943 	} else {
1944 		tmp = 0;
1945 		buffer_alloc = 0;
1946 	}
1947 
1948 	/* second controller of the pair uses second half of the lb */
1949 	if (radeon_crtc->crtc_id % 2)
1950 		tmp += 4;
1951 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1952 
1953 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1954 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1955 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1956 		for (i = 0; i < rdev->usec_timeout; i++) {
1957 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1958 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1959 				break;
1960 			udelay(1);
1961 		}
1962 	}
1963 
1964 	if (radeon_crtc->base.enabled && mode) {
1965 		switch (tmp) {
1966 		case 0:
1967 		case 4:
1968 		default:
1969 			if (ASIC_IS_DCE5(rdev))
1970 				return 4096 * 2;
1971 			else
1972 				return 3840 * 2;
1973 		case 1:
1974 		case 5:
1975 			if (ASIC_IS_DCE5(rdev))
1976 				return 6144 * 2;
1977 			else
1978 				return 5760 * 2;
1979 		case 2:
1980 		case 6:
1981 			if (ASIC_IS_DCE5(rdev))
1982 				return 8192 * 2;
1983 			else
1984 				return 7680 * 2;
1985 		case 3:
1986 		case 7:
1987 			if (ASIC_IS_DCE5(rdev))
1988 				return 2048 * 2;
1989 			else
1990 				return 1920 * 2;
1991 		}
1992 	}
1993 
1994 	/* controller not enabled, so no lb used */
1995 	return 0;
1996 }
1997 
1998 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1999 {
2000 	u32 tmp = RREG32(MC_SHARED_CHMAP);
2001 
2002 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2003 	case 0:
2004 	default:
2005 		return 1;
2006 	case 1:
2007 		return 2;
2008 	case 2:
2009 		return 4;
2010 	case 3:
2011 		return 8;
2012 	}
2013 }
2014 
2015 struct evergreen_wm_params {
2016 	u32 dram_channels; /* number of dram channels */
2017 	u32 yclk;          /* bandwidth per dram data pin in kHz */
2018 	u32 sclk;          /* engine clock in kHz */
2019 	u32 disp_clk;      /* display clock in kHz */
2020 	u32 src_width;     /* viewport width */
2021 	u32 active_time;   /* active display time in ns */
2022 	u32 blank_time;    /* blank time in ns */
2023 	bool interlaced;    /* mode is interlaced */
2024 	fixed20_12 vsc;    /* vertical scale ratio */
2025 	u32 num_heads;     /* number of active crtcs */
2026 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2027 	u32 lb_size;       /* line buffer allocated to pipe */
2028 	u32 vtaps;         /* vertical scaler taps */
2029 };
2030 
2031 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2032 {
2033 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2034 	fixed20_12 dram_efficiency; /* 0.7 */
2035 	fixed20_12 yclk, dram_channels, bandwidth;
2036 	fixed20_12 a;
2037 
2038 	a.full = dfixed_const(1000);
2039 	yclk.full = dfixed_const(wm->yclk);
2040 	yclk.full = dfixed_div(yclk, a);
2041 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2042 	a.full = dfixed_const(10);
2043 	dram_efficiency.full = dfixed_const(7);
2044 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2045 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2046 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2047 
2048 	return dfixed_trunc(bandwidth);
2049 }
2050 
2051 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2052 {
2053 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2054 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2055 	fixed20_12 yclk, dram_channels, bandwidth;
2056 	fixed20_12 a;
2057 
2058 	a.full = dfixed_const(1000);
2059 	yclk.full = dfixed_const(wm->yclk);
2060 	yclk.full = dfixed_div(yclk, a);
2061 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2062 	a.full = dfixed_const(10);
2063 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2064 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2065 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2066 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2067 
2068 	return dfixed_trunc(bandwidth);
2069 }
2070 
2071 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2072 {
2073 	/* Calculate the display Data return Bandwidth */
2074 	fixed20_12 return_efficiency; /* 0.8 */
2075 	fixed20_12 sclk, bandwidth;
2076 	fixed20_12 a;
2077 
2078 	a.full = dfixed_const(1000);
2079 	sclk.full = dfixed_const(wm->sclk);
2080 	sclk.full = dfixed_div(sclk, a);
2081 	a.full = dfixed_const(10);
2082 	return_efficiency.full = dfixed_const(8);
2083 	return_efficiency.full = dfixed_div(return_efficiency, a);
2084 	a.full = dfixed_const(32);
2085 	bandwidth.full = dfixed_mul(a, sclk);
2086 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2087 
2088 	return dfixed_trunc(bandwidth);
2089 }
2090 
2091 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2092 {
2093 	/* Calculate the DMIF Request Bandwidth */
2094 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2095 	fixed20_12 disp_clk, bandwidth;
2096 	fixed20_12 a;
2097 
2098 	a.full = dfixed_const(1000);
2099 	disp_clk.full = dfixed_const(wm->disp_clk);
2100 	disp_clk.full = dfixed_div(disp_clk, a);
2101 	a.full = dfixed_const(10);
2102 	disp_clk_request_efficiency.full = dfixed_const(8);
2103 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2104 	a.full = dfixed_const(32);
2105 	bandwidth.full = dfixed_mul(a, disp_clk);
2106 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2107 
2108 	return dfixed_trunc(bandwidth);
2109 }
2110 
2111 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2112 {
2113 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2114 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2115 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2116 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2117 
2118 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2119 }
2120 
2121 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2122 {
2123 	/* Calculate the display mode Average Bandwidth
2124 	 * DisplayMode should contain the source and destination dimensions,
2125 	 * timing, etc.
2126 	 */
2127 	fixed20_12 bpp;
2128 	fixed20_12 line_time;
2129 	fixed20_12 src_width;
2130 	fixed20_12 bandwidth;
2131 	fixed20_12 a;
2132 
2133 	a.full = dfixed_const(1000);
2134 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2135 	line_time.full = dfixed_div(line_time, a);
2136 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2137 	src_width.full = dfixed_const(wm->src_width);
2138 	bandwidth.full = dfixed_mul(src_width, bpp);
2139 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2140 	bandwidth.full = dfixed_div(bandwidth, line_time);
2141 
2142 	return dfixed_trunc(bandwidth);
2143 }
2144 
2145 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2146 {
2147 	/* First calcualte the latency in ns */
2148 	u32 mc_latency = 2000; /* 2000 ns. */
2149 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2150 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2151 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2152 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2153 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2154 		(wm->num_heads * cursor_line_pair_return_time);
2155 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2156 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2157 	fixed20_12 a, b, c;
2158 
2159 	if (wm->num_heads == 0)
2160 		return 0;
2161 
2162 	a.full = dfixed_const(2);
2163 	b.full = dfixed_const(1);
2164 	if ((wm->vsc.full > a.full) ||
2165 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2166 	    (wm->vtaps >= 5) ||
2167 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2168 		max_src_lines_per_dst_line = 4;
2169 	else
2170 		max_src_lines_per_dst_line = 2;
2171 
2172 	a.full = dfixed_const(available_bandwidth);
2173 	b.full = dfixed_const(wm->num_heads);
2174 	a.full = dfixed_div(a, b);
2175 
2176 	b.full = dfixed_const(1000);
2177 	c.full = dfixed_const(wm->disp_clk);
2178 	b.full = dfixed_div(c, b);
2179 	c.full = dfixed_const(wm->bytes_per_pixel);
2180 	b.full = dfixed_mul(b, c);
2181 
2182 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2183 
2184 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2185 	b.full = dfixed_const(1000);
2186 	c.full = dfixed_const(lb_fill_bw);
2187 	b.full = dfixed_div(c, b);
2188 	a.full = dfixed_div(a, b);
2189 	line_fill_time = dfixed_trunc(a);
2190 
2191 	if (line_fill_time < wm->active_time)
2192 		return latency;
2193 	else
2194 		return latency + (line_fill_time - wm->active_time);
2195 
2196 }
2197 
2198 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2199 {
2200 	if (evergreen_average_bandwidth(wm) <=
2201 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2202 		return true;
2203 	else
2204 		return false;
2205 };
2206 
2207 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2208 {
2209 	if (evergreen_average_bandwidth(wm) <=
2210 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2211 		return true;
2212 	else
2213 		return false;
2214 };
2215 
2216 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2217 {
2218 	u32 lb_partitions = wm->lb_size / wm->src_width;
2219 	u32 line_time = wm->active_time + wm->blank_time;
2220 	u32 latency_tolerant_lines;
2221 	u32 latency_hiding;
2222 	fixed20_12 a;
2223 
2224 	a.full = dfixed_const(1);
2225 	if (wm->vsc.full > a.full)
2226 		latency_tolerant_lines = 1;
2227 	else {
2228 		if (lb_partitions <= (wm->vtaps + 1))
2229 			latency_tolerant_lines = 1;
2230 		else
2231 			latency_tolerant_lines = 2;
2232 	}
2233 
2234 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2235 
2236 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2237 		return true;
2238 	else
2239 		return false;
2240 }
2241 
2242 static void evergreen_program_watermarks(struct radeon_device *rdev,
2243 					 struct radeon_crtc *radeon_crtc,
2244 					 u32 lb_size, u32 num_heads)
2245 {
2246 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2247 	struct evergreen_wm_params wm_low, wm_high;
2248 	u32 dram_channels;
2249 	u32 pixel_period;
2250 	u32 line_time = 0;
2251 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2252 	u32 priority_a_mark = 0, priority_b_mark = 0;
2253 	u32 priority_a_cnt = PRIORITY_OFF;
2254 	u32 priority_b_cnt = PRIORITY_OFF;
2255 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2256 	u32 tmp, arb_control3;
2257 	fixed20_12 a, b, c;
2258 
2259 	if (radeon_crtc->base.enabled && num_heads && mode) {
2260 		pixel_period = 1000000 / (u32)mode->clock;
2261 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2262 		priority_a_cnt = 0;
2263 		priority_b_cnt = 0;
2264 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2265 
2266 		/* watermark for high clocks */
2267 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2268 			wm_high.yclk =
2269 				radeon_dpm_get_mclk(rdev, false) * 10;
2270 			wm_high.sclk =
2271 				radeon_dpm_get_sclk(rdev, false) * 10;
2272 		} else {
2273 			wm_high.yclk = rdev->pm.current_mclk * 10;
2274 			wm_high.sclk = rdev->pm.current_sclk * 10;
2275 		}
2276 
2277 		wm_high.disp_clk = mode->clock;
2278 		wm_high.src_width = mode->crtc_hdisplay;
2279 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2280 		wm_high.blank_time = line_time - wm_high.active_time;
2281 		wm_high.interlaced = false;
2282 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2283 			wm_high.interlaced = true;
2284 		wm_high.vsc = radeon_crtc->vsc;
2285 		wm_high.vtaps = 1;
2286 		if (radeon_crtc->rmx_type != RMX_OFF)
2287 			wm_high.vtaps = 2;
2288 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2289 		wm_high.lb_size = lb_size;
2290 		wm_high.dram_channels = dram_channels;
2291 		wm_high.num_heads = num_heads;
2292 
2293 		/* watermark for low clocks */
2294 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2295 			wm_low.yclk =
2296 				radeon_dpm_get_mclk(rdev, true) * 10;
2297 			wm_low.sclk =
2298 				radeon_dpm_get_sclk(rdev, true) * 10;
2299 		} else {
2300 			wm_low.yclk = rdev->pm.current_mclk * 10;
2301 			wm_low.sclk = rdev->pm.current_sclk * 10;
2302 		}
2303 
2304 		wm_low.disp_clk = mode->clock;
2305 		wm_low.src_width = mode->crtc_hdisplay;
2306 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2307 		wm_low.blank_time = line_time - wm_low.active_time;
2308 		wm_low.interlaced = false;
2309 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2310 			wm_low.interlaced = true;
2311 		wm_low.vsc = radeon_crtc->vsc;
2312 		wm_low.vtaps = 1;
2313 		if (radeon_crtc->rmx_type != RMX_OFF)
2314 			wm_low.vtaps = 2;
2315 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2316 		wm_low.lb_size = lb_size;
2317 		wm_low.dram_channels = dram_channels;
2318 		wm_low.num_heads = num_heads;
2319 
2320 		/* set for high clocks */
2321 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2322 		/* set for low clocks */
2323 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2324 
2325 		/* possibly force display priority to high */
2326 		/* should really do this at mode validation time... */
2327 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2328 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2329 		    !evergreen_check_latency_hiding(&wm_high) ||
2330 		    (rdev->disp_priority == 2)) {
2331 			DRM_DEBUG_KMS("force priority a to high\n");
2332 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2333 		}
2334 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2335 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2336 		    !evergreen_check_latency_hiding(&wm_low) ||
2337 		    (rdev->disp_priority == 2)) {
2338 			DRM_DEBUG_KMS("force priority b to high\n");
2339 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2340 		}
2341 
2342 		a.full = dfixed_const(1000);
2343 		b.full = dfixed_const(mode->clock);
2344 		b.full = dfixed_div(b, a);
2345 		c.full = dfixed_const(latency_watermark_a);
2346 		c.full = dfixed_mul(c, b);
2347 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2348 		c.full = dfixed_div(c, a);
2349 		a.full = dfixed_const(16);
2350 		c.full = dfixed_div(c, a);
2351 		priority_a_mark = dfixed_trunc(c);
2352 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2353 
2354 		a.full = dfixed_const(1000);
2355 		b.full = dfixed_const(mode->clock);
2356 		b.full = dfixed_div(b, a);
2357 		c.full = dfixed_const(latency_watermark_b);
2358 		c.full = dfixed_mul(c, b);
2359 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2360 		c.full = dfixed_div(c, a);
2361 		a.full = dfixed_const(16);
2362 		c.full = dfixed_div(c, a);
2363 		priority_b_mark = dfixed_trunc(c);
2364 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2365 
2366 		/* Save number of lines the linebuffer leads before the scanout */
2367 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2368 	}
2369 
2370 	/* select wm A */
2371 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2372 	tmp = arb_control3;
2373 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2374 	tmp |= LATENCY_WATERMARK_MASK(1);
2375 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2376 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2377 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2378 		LATENCY_HIGH_WATERMARK(line_time)));
2379 	/* select wm B */
2380 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2381 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2382 	tmp |= LATENCY_WATERMARK_MASK(2);
2383 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2384 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2385 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2386 		LATENCY_HIGH_WATERMARK(line_time)));
2387 	/* restore original selection */
2388 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2389 
2390 	/* write the priority marks */
2391 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2392 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2393 
2394 	/* save values for DPM */
2395 	radeon_crtc->line_time = line_time;
2396 	radeon_crtc->wm_high = latency_watermark_a;
2397 	radeon_crtc->wm_low = latency_watermark_b;
2398 }
2399 
2400 /**
2401  * evergreen_bandwidth_update - update display watermarks callback.
2402  *
2403  * @rdev: radeon_device pointer
2404  *
2405  * Update the display watermarks based on the requested mode(s)
2406  * (evergreen+).
2407  */
2408 void evergreen_bandwidth_update(struct radeon_device *rdev)
2409 {
2410 	struct drm_display_mode *mode0 = NULL;
2411 	struct drm_display_mode *mode1 = NULL;
2412 	u32 num_heads = 0, lb_size;
2413 	int i;
2414 
2415 	if (!rdev->mode_info.mode_config_initialized)
2416 		return;
2417 
2418 	radeon_update_display_priority(rdev);
2419 
2420 	for (i = 0; i < rdev->num_crtc; i++) {
2421 		if (rdev->mode_info.crtcs[i]->base.enabled)
2422 			num_heads++;
2423 	}
2424 	for (i = 0; i < rdev->num_crtc; i += 2) {
2425 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2426 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2427 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2428 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2429 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2430 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2431 	}
2432 }
2433 
2434 /**
2435  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2436  *
2437  * @rdev: radeon_device pointer
2438  *
2439  * Wait for the MC (memory controller) to be idle.
2440  * (evergreen+).
2441  * Returns 0 if the MC is idle, -1 if not.
2442  */
2443 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2444 {
2445 	unsigned i;
2446 	u32 tmp;
2447 
2448 	for (i = 0; i < rdev->usec_timeout; i++) {
2449 		/* read MC_STATUS */
2450 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2451 		if (!tmp)
2452 			return 0;
2453 		udelay(1);
2454 	}
2455 	return -1;
2456 }
2457 
2458 /*
2459  * GART
2460  */
2461 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2462 {
2463 	unsigned i;
2464 	u32 tmp;
2465 
2466 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2467 
2468 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2469 	for (i = 0; i < rdev->usec_timeout; i++) {
2470 		/* read MC_STATUS */
2471 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2472 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2473 		if (tmp == 2) {
2474 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2475 			return;
2476 		}
2477 		if (tmp) {
2478 			return;
2479 		}
2480 		udelay(1);
2481 	}
2482 }
2483 
2484 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2485 {
2486 	u32 tmp;
2487 	int r;
2488 
2489 	if (rdev->gart.robj == NULL) {
2490 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2491 		return -EINVAL;
2492 	}
2493 	r = radeon_gart_table_vram_pin(rdev);
2494 	if (r)
2495 		return r;
2496 	/* Setup L2 cache */
2497 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2498 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2499 				EFFECTIVE_L2_QUEUE_SIZE(7));
2500 	WREG32(VM_L2_CNTL2, 0);
2501 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2502 	/* Setup TLB control */
2503 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2504 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2505 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2506 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2507 	if (rdev->flags & RADEON_IS_IGP) {
2508 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2509 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2510 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2511 	} else {
2512 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2513 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2514 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2515 		if ((rdev->family == CHIP_JUNIPER) ||
2516 		    (rdev->family == CHIP_CYPRESS) ||
2517 		    (rdev->family == CHIP_HEMLOCK) ||
2518 		    (rdev->family == CHIP_BARTS))
2519 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2520 	}
2521 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2522 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2523 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2524 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2525 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2526 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2527 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2528 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2529 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2530 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2531 			(u32)(rdev->dummy_page.addr >> 12));
2532 	WREG32(VM_CONTEXT1_CNTL, 0);
2533 
2534 	evergreen_pcie_gart_tlb_flush(rdev);
2535 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2536 		 (unsigned)(rdev->mc.gtt_size >> 20),
2537 		 (unsigned long long)rdev->gart.table_addr);
2538 	rdev->gart.ready = true;
2539 	return 0;
2540 }
2541 
2542 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2543 {
2544 	u32 tmp;
2545 
2546 	/* Disable all tables */
2547 	WREG32(VM_CONTEXT0_CNTL, 0);
2548 	WREG32(VM_CONTEXT1_CNTL, 0);
2549 
2550 	/* Setup L2 cache */
2551 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2552 				EFFECTIVE_L2_QUEUE_SIZE(7));
2553 	WREG32(VM_L2_CNTL2, 0);
2554 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2555 	/* Setup TLB control */
2556 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2557 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2558 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2559 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2560 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2561 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2562 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2563 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2564 	radeon_gart_table_vram_unpin(rdev);
2565 }
2566 
2567 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2568 {
2569 	evergreen_pcie_gart_disable(rdev);
2570 	radeon_gart_table_vram_free(rdev);
2571 	radeon_gart_fini(rdev);
2572 }
2573 
2574 
2575 static void evergreen_agp_enable(struct radeon_device *rdev)
2576 {
2577 	u32 tmp;
2578 
2579 	/* Setup L2 cache */
2580 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2581 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2582 				EFFECTIVE_L2_QUEUE_SIZE(7));
2583 	WREG32(VM_L2_CNTL2, 0);
2584 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2585 	/* Setup TLB control */
2586 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2587 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2588 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2589 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2590 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2591 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2592 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2593 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2594 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2595 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2596 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2597 	WREG32(VM_CONTEXT0_CNTL, 0);
2598 	WREG32(VM_CONTEXT1_CNTL, 0);
2599 }
2600 
2601 static const unsigned ni_dig_offsets[] =
2602 {
2603 	NI_DIG0_REGISTER_OFFSET,
2604 	NI_DIG1_REGISTER_OFFSET,
2605 	NI_DIG2_REGISTER_OFFSET,
2606 	NI_DIG3_REGISTER_OFFSET,
2607 	NI_DIG4_REGISTER_OFFSET,
2608 	NI_DIG5_REGISTER_OFFSET
2609 };
2610 
2611 static const unsigned ni_tx_offsets[] =
2612 {
2613 	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2614 	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2615 	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2616 	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2617 	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2618 	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2619 };
2620 
2621 static const unsigned evergreen_dp_offsets[] =
2622 {
2623 	EVERGREEN_DP0_REGISTER_OFFSET,
2624 	EVERGREEN_DP1_REGISTER_OFFSET,
2625 	EVERGREEN_DP2_REGISTER_OFFSET,
2626 	EVERGREEN_DP3_REGISTER_OFFSET,
2627 	EVERGREEN_DP4_REGISTER_OFFSET,
2628 	EVERGREEN_DP5_REGISTER_OFFSET
2629 };
2630 
2631 
2632 /*
2633  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2634  * We go from crtc to connector and it is not relible  since it
2635  * should be an opposite direction .If crtc is enable then
2636  * find the dig_fe which selects this crtc and insure that it enable.
2637  * if such dig_fe is found then find dig_be which selects found dig_be and
2638  * insure that it enable and in DP_SST mode.
2639  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2640  * from dp symbols clocks .
2641  */
2642 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2643 					       unsigned crtc_id, unsigned *ret_dig_fe)
2644 {
2645 	unsigned i;
2646 	unsigned dig_fe;
2647 	unsigned dig_be;
2648 	unsigned dig_en_be;
2649 	unsigned uniphy_pll;
2650 	unsigned digs_fe_selected;
2651 	unsigned dig_be_mode;
2652 	unsigned dig_fe_mask;
2653 	bool is_enabled = false;
2654 	bool found_crtc = false;
2655 
2656 	/* loop through all running dig_fe to find selected crtc */
2657 	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2658 		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2659 		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2660 		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2661 			/* found running pipe */
2662 			found_crtc = true;
2663 			dig_fe_mask = 1 << i;
2664 			dig_fe = i;
2665 			break;
2666 		}
2667 	}
2668 
2669 	if (found_crtc) {
2670 		/* loop through all running dig_be to find selected dig_fe */
2671 		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2672 			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2673 			/* if dig_fe_selected by dig_be? */
2674 			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2675 			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2676 			if (dig_fe_mask &  digs_fe_selected &&
2677 			    /* if dig_be in sst mode? */
2678 			    dig_be_mode == NI_DIG_BE_DPSST) {
2679 				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2680 						   ni_dig_offsets[i]);
2681 				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2682 						    ni_tx_offsets[i]);
2683 				/* dig_be enable and tx is running */
2684 				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2685 				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2686 				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2687 					is_enabled = true;
2688 					*ret_dig_fe = dig_fe;
2689 					break;
2690 				}
2691 			}
2692 		}
2693 	}
2694 
2695 	return is_enabled;
2696 }
2697 
2698 /*
2699  * Blank dig when in dp sst mode
2700  * Dig ignores crtc timing
2701  */
2702 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2703 				      unsigned dig_fe)
2704 {
2705 	unsigned stream_ctrl;
2706 	unsigned fifo_ctrl;
2707 	unsigned counter = 0;
2708 
2709 	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2710 		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2711 		return;
2712 	}
2713 
2714 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2715 			     evergreen_dp_offsets[dig_fe]);
2716 	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2717 		DRM_ERROR("dig %d , should be enable\n", dig_fe);
2718 		return;
2719 	}
2720 
2721 	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2722 	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2723 	       evergreen_dp_offsets[dig_fe], stream_ctrl);
2724 
2725 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2726 			     evergreen_dp_offsets[dig_fe]);
2727 	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2728 		msleep(1);
2729 		counter++;
2730 		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2731 				     evergreen_dp_offsets[dig_fe]);
2732 	}
2733 	if (counter >= 32 )
2734 		DRM_ERROR("counter exceeds %d\n", counter);
2735 
2736 	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2737 	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2738 	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2739 
2740 }
2741 
2742 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2743 {
2744 	u32 crtc_enabled, tmp, frame_count, blackout;
2745 	int i, j;
2746 	unsigned dig_fe;
2747 
2748 	bzero(save, sizeof(*save));	/* avoid gcc warning */
2749 	if (!ASIC_IS_NODCE(rdev)) {
2750 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2751 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2752 
2753 		/* disable VGA render */
2754 		WREG32(VGA_RENDER_CONTROL, 0);
2755 	}
2756 	/* blank the display controllers */
2757 	for (i = 0; i < rdev->num_crtc; i++) {
2758 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2759 		if (crtc_enabled) {
2760 			save->crtc_enabled[i] = true;
2761 			if (ASIC_IS_DCE6(rdev)) {
2762 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2763 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2764 					radeon_wait_for_vblank(rdev, i);
2765 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2766 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2767 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2768 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2769 				}
2770 			} else {
2771 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2772 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2773 					radeon_wait_for_vblank(rdev, i);
2774 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2775 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2776 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2777 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2778 				}
2779 			}
2780 			/* wait for the next frame */
2781 			frame_count = radeon_get_vblank_counter(rdev, i);
2782 			for (j = 0; j < rdev->usec_timeout; j++) {
2783 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2784 					break;
2785 				udelay(1);
2786 			}
2787 			/*we should disable dig if it drives dp sst*/
2788 			/*but we are in radeon_device_init and the topology is unknown*/
2789 			/*and it is available after radeon_modeset_init*/
2790 			/*the following method radeon_atom_encoder_dpms_dig*/
2791 			/*does the job if we initialize it properly*/
2792 			/*for now we do it this manually*/
2793 			/**/
2794 			if (ASIC_IS_DCE5(rdev) &&
2795 			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2796 				evergreen_blank_dp_output(rdev, dig_fe);
2797 			/*we could remove 6 lines below*/
2798 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2799 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2800 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2801 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2802 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2803 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2804 			save->crtc_enabled[i] = false;
2805 			/* ***** */
2806 		} else {
2807 			save->crtc_enabled[i] = false;
2808 		}
2809 	}
2810 
2811 	radeon_mc_wait_for_idle(rdev);
2812 
2813 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2814 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2815 		/* Block CPU access */
2816 		WREG32(BIF_FB_EN, 0);
2817 		/* blackout the MC */
2818 		blackout &= ~BLACKOUT_MODE_MASK;
2819 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2820 	}
2821 	/* wait for the MC to settle */
2822 	udelay(100);
2823 
2824 	/* lock double buffered regs */
2825 	for (i = 0; i < rdev->num_crtc; i++) {
2826 		if (save->crtc_enabled[i]) {
2827 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2828 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2829 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2830 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2831 			}
2832 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2833 			if (!(tmp & 1)) {
2834 				tmp |= 1;
2835 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2836 			}
2837 		}
2838 	}
2839 }
2840 
2841 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2842 {
2843 	u32 tmp, frame_count;
2844 	int i, j;
2845 
2846 	/* update crtc base addresses */
2847 	for (i = 0; i < rdev->num_crtc; i++) {
2848 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2849 		       upper_32_bits(rdev->mc.vram_start));
2850 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2851 		       upper_32_bits(rdev->mc.vram_start));
2852 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2853 		       (u32)rdev->mc.vram_start);
2854 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2855 		       (u32)rdev->mc.vram_start);
2856 	}
2857 
2858 	if (!ASIC_IS_NODCE(rdev)) {
2859 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2860 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2861 	}
2862 
2863 	/* unlock regs and wait for update */
2864 	for (i = 0; i < rdev->num_crtc; i++) {
2865 		if (save->crtc_enabled[i]) {
2866 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2867 			if ((tmp & 0x7) != 3) {
2868 				tmp &= ~0x7;
2869 				tmp |= 0x3;
2870 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2871 			}
2872 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2873 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2874 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2875 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2876 			}
2877 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2878 			if (tmp & 1) {
2879 				tmp &= ~1;
2880 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2881 			}
2882 			for (j = 0; j < rdev->usec_timeout; j++) {
2883 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2884 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2885 					break;
2886 				udelay(1);
2887 			}
2888 		}
2889 	}
2890 
2891 	/* unblackout the MC */
2892 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2893 	tmp &= ~BLACKOUT_MODE_MASK;
2894 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2895 	/* allow CPU access */
2896 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2897 
2898 	for (i = 0; i < rdev->num_crtc; i++) {
2899 		if (save->crtc_enabled[i]) {
2900 			if (ASIC_IS_DCE6(rdev)) {
2901 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2902 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2903 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2904 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2905 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2906 			} else {
2907 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2908 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2909 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2910 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2911 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2912 			}
2913 			/* wait for the next frame */
2914 			frame_count = radeon_get_vblank_counter(rdev, i);
2915 			for (j = 0; j < rdev->usec_timeout; j++) {
2916 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2917 					break;
2918 				udelay(1);
2919 			}
2920 		}
2921 	}
2922 	if (!ASIC_IS_NODCE(rdev)) {
2923 		/* Unlock vga access */
2924 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2925 		mdelay(1);
2926 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2927 	}
2928 }
2929 
2930 void evergreen_mc_program(struct radeon_device *rdev)
2931 {
2932 	struct evergreen_mc_save save;
2933 	u32 tmp;
2934 	int i, j;
2935 
2936 	/* Initialize HDP */
2937 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2938 		WREG32((0x2c14 + j), 0x00000000);
2939 		WREG32((0x2c18 + j), 0x00000000);
2940 		WREG32((0x2c1c + j), 0x00000000);
2941 		WREG32((0x2c20 + j), 0x00000000);
2942 		WREG32((0x2c24 + j), 0x00000000);
2943 	}
2944 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2945 
2946 	evergreen_mc_stop(rdev, &save);
2947 	if (evergreen_mc_wait_for_idle(rdev)) {
2948 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2949 	}
2950 	/* Lockout access through VGA aperture*/
2951 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2952 	/* Update configuration */
2953 	if (rdev->flags & RADEON_IS_AGP) {
2954 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2955 			/* VRAM before AGP */
2956 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2957 				rdev->mc.vram_start >> 12);
2958 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2959 				rdev->mc.gtt_end >> 12);
2960 		} else {
2961 			/* VRAM after AGP */
2962 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2963 				rdev->mc.gtt_start >> 12);
2964 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2965 				rdev->mc.vram_end >> 12);
2966 		}
2967 	} else {
2968 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2969 			rdev->mc.vram_start >> 12);
2970 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2971 			rdev->mc.vram_end >> 12);
2972 	}
2973 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2974 	/* llano/ontario only */
2975 	if ((rdev->family == CHIP_PALM) ||
2976 	    (rdev->family == CHIP_SUMO) ||
2977 	    (rdev->family == CHIP_SUMO2)) {
2978 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2979 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2980 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2981 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2982 	}
2983 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2984 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2985 	WREG32(MC_VM_FB_LOCATION, tmp);
2986 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2987 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2988 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2989 	if (rdev->flags & RADEON_IS_AGP) {
2990 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2991 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2992 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2993 	} else {
2994 		WREG32(MC_VM_AGP_BASE, 0);
2995 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2996 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2997 	}
2998 	if (evergreen_mc_wait_for_idle(rdev)) {
2999 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3000 	}
3001 	evergreen_mc_resume(rdev, &save);
3002 	/* we need to own VRAM, so turn off the VGA renderer here
3003 	 * to stop it overwriting our objects */
3004 	rv515_vga_render_disable(rdev);
3005 }
3006 
3007 /*
3008  * CP.
3009  */
3010 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3011 {
3012 	struct radeon_ring *ring = &rdev->ring[ib->ring];
3013 	u32 next_rptr;
3014 
3015 	/* set to DX10/11 mode */
3016 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3017 	radeon_ring_write(ring, 1);
3018 
3019 	if (ring->rptr_save_reg) {
3020 		next_rptr = ring->wptr + 3 + 4;
3021 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3022 		radeon_ring_write(ring, ((ring->rptr_save_reg -
3023 					  PACKET3_SET_CONFIG_REG_START) >> 2));
3024 		radeon_ring_write(ring, next_rptr);
3025 	} else if (rdev->wb.enabled) {
3026 		next_rptr = ring->wptr + 5 + 4;
3027 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3028 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3029 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3030 		radeon_ring_write(ring, next_rptr);
3031 		radeon_ring_write(ring, 0);
3032 	}
3033 
3034 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3035 	radeon_ring_write(ring,
3036 #ifdef __BIG_ENDIAN
3037 			  (2 << 0) |
3038 #endif
3039 			  (ib->gpu_addr & 0xFFFFFFFC));
3040 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3041 	radeon_ring_write(ring, ib->length_dw);
3042 }
3043 
3044 
3045 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3046 {
3047 	const __be32 *fw_data;
3048 	int i;
3049 
3050 	if (!rdev->me_fw || !rdev->pfp_fw)
3051 		return -EINVAL;
3052 
3053 	r700_cp_stop(rdev);
3054 	WREG32(CP_RB_CNTL,
3055 #ifdef __BIG_ENDIAN
3056 	       BUF_SWAP_32BIT |
3057 #endif
3058 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3059 
3060 	fw_data = (const __be32 *)rdev->pfp_fw->data;
3061 	WREG32(CP_PFP_UCODE_ADDR, 0);
3062 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3063 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3064 	WREG32(CP_PFP_UCODE_ADDR, 0);
3065 
3066 	fw_data = (const __be32 *)rdev->me_fw->data;
3067 	WREG32(CP_ME_RAM_WADDR, 0);
3068 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3069 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3070 
3071 	WREG32(CP_PFP_UCODE_ADDR, 0);
3072 	WREG32(CP_ME_RAM_WADDR, 0);
3073 	WREG32(CP_ME_RAM_RADDR, 0);
3074 	return 0;
3075 }
3076 
3077 static int evergreen_cp_start(struct radeon_device *rdev)
3078 {
3079 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3080 	int r, i;
3081 	uint32_t cp_me;
3082 
3083 	r = radeon_ring_lock(rdev, ring, 7);
3084 	if (r) {
3085 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3086 		return r;
3087 	}
3088 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3089 	radeon_ring_write(ring, 0x1);
3090 	radeon_ring_write(ring, 0x0);
3091 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3092 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3093 	radeon_ring_write(ring, 0);
3094 	radeon_ring_write(ring, 0);
3095 	radeon_ring_unlock_commit(rdev, ring, false);
3096 
3097 	cp_me = 0xff;
3098 	WREG32(CP_ME_CNTL, cp_me);
3099 
3100 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3101 	if (r) {
3102 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3103 		return r;
3104 	}
3105 
3106 	/* setup clear context state */
3107 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3108 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3109 
3110 	for (i = 0; i < evergreen_default_size; i++)
3111 		radeon_ring_write(ring, evergreen_default_state[i]);
3112 
3113 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3114 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3115 
3116 	/* set clear context state */
3117 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3118 	radeon_ring_write(ring, 0);
3119 
3120 	/* SQ_VTX_BASE_VTX_LOC */
3121 	radeon_ring_write(ring, 0xc0026f00);
3122 	radeon_ring_write(ring, 0x00000000);
3123 	radeon_ring_write(ring, 0x00000000);
3124 	radeon_ring_write(ring, 0x00000000);
3125 
3126 	/* Clear consts */
3127 	radeon_ring_write(ring, 0xc0036f00);
3128 	radeon_ring_write(ring, 0x00000bc4);
3129 	radeon_ring_write(ring, 0xffffffff);
3130 	radeon_ring_write(ring, 0xffffffff);
3131 	radeon_ring_write(ring, 0xffffffff);
3132 
3133 	radeon_ring_write(ring, 0xc0026900);
3134 	radeon_ring_write(ring, 0x00000316);
3135 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3136 	radeon_ring_write(ring, 0x00000010); /*  */
3137 
3138 	radeon_ring_unlock_commit(rdev, ring, false);
3139 
3140 	return 0;
3141 }
3142 
3143 static int evergreen_cp_resume(struct radeon_device *rdev)
3144 {
3145 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3146 	u32 tmp;
3147 	u32 rb_bufsz;
3148 	int r;
3149 
3150 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3151 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3152 				 SOFT_RESET_PA |
3153 				 SOFT_RESET_SH |
3154 				 SOFT_RESET_VGT |
3155 				 SOFT_RESET_SPI |
3156 				 SOFT_RESET_SX));
3157 	RREG32(GRBM_SOFT_RESET);
3158 	mdelay(15);
3159 	WREG32(GRBM_SOFT_RESET, 0);
3160 	RREG32(GRBM_SOFT_RESET);
3161 
3162 	/* Set ring buffer size */
3163 	rb_bufsz = order_base_2(ring->ring_size / 8);
3164 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3165 #ifdef __BIG_ENDIAN
3166 	tmp |= BUF_SWAP_32BIT;
3167 #endif
3168 	WREG32(CP_RB_CNTL, tmp);
3169 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3170 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3171 
3172 	/* Set the write pointer delay */
3173 	WREG32(CP_RB_WPTR_DELAY, 0);
3174 
3175 	/* Initialize the ring buffer's read and write pointers */
3176 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3177 	WREG32(CP_RB_RPTR_WR, 0);
3178 	ring->wptr = 0;
3179 	WREG32(CP_RB_WPTR, ring->wptr);
3180 
3181 	/* set the wb address whether it's enabled or not */
3182 	WREG32(CP_RB_RPTR_ADDR,
3183 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3184 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3185 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3186 
3187 	if (rdev->wb.enabled)
3188 		WREG32(SCRATCH_UMSK, 0xff);
3189 	else {
3190 		tmp |= RB_NO_UPDATE;
3191 		WREG32(SCRATCH_UMSK, 0);
3192 	}
3193 
3194 	mdelay(1);
3195 	WREG32(CP_RB_CNTL, tmp);
3196 
3197 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3198 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3199 
3200 	evergreen_cp_start(rdev);
3201 	ring->ready = true;
3202 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3203 	if (r) {
3204 		ring->ready = false;
3205 		return r;
3206 	}
3207 	return 0;
3208 }
3209 
3210 /*
3211  * Core functions
3212  */
3213 static void evergreen_gpu_init(struct radeon_device *rdev)
3214 {
3215 	u32 gb_addr_config;
3216 	u32 mc_shared_chmap, mc_arb_ramcfg;
3217 	u32 sx_debug_1;
3218 	u32 smx_dc_ctl0;
3219 	u32 sq_config;
3220 	u32 sq_lds_resource_mgmt;
3221 	u32 sq_gpr_resource_mgmt_1;
3222 	u32 sq_gpr_resource_mgmt_2;
3223 	u32 sq_gpr_resource_mgmt_3;
3224 	u32 sq_thread_resource_mgmt;
3225 	u32 sq_thread_resource_mgmt_2;
3226 	u32 sq_stack_resource_mgmt_1;
3227 	u32 sq_stack_resource_mgmt_2;
3228 	u32 sq_stack_resource_mgmt_3;
3229 	u32 vgt_cache_invalidation;
3230 	u32 hdp_host_path_cntl, tmp;
3231 	u32 disabled_rb_mask;
3232 	int i, j, ps_thread_count;
3233 
3234 	switch (rdev->family) {
3235 	case CHIP_CYPRESS:
3236 	case CHIP_HEMLOCK:
3237 		rdev->config.evergreen.num_ses = 2;
3238 		rdev->config.evergreen.max_pipes = 4;
3239 		rdev->config.evergreen.max_tile_pipes = 8;
3240 		rdev->config.evergreen.max_simds = 10;
3241 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3242 		rdev->config.evergreen.max_gprs = 256;
3243 		rdev->config.evergreen.max_threads = 248;
3244 		rdev->config.evergreen.max_gs_threads = 32;
3245 		rdev->config.evergreen.max_stack_entries = 512;
3246 		rdev->config.evergreen.sx_num_of_sets = 4;
3247 		rdev->config.evergreen.sx_max_export_size = 256;
3248 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3249 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3250 		rdev->config.evergreen.max_hw_contexts = 8;
3251 		rdev->config.evergreen.sq_num_cf_insts = 2;
3252 
3253 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3254 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3255 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3256 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3257 		break;
3258 	case CHIP_JUNIPER:
3259 		rdev->config.evergreen.num_ses = 1;
3260 		rdev->config.evergreen.max_pipes = 4;
3261 		rdev->config.evergreen.max_tile_pipes = 4;
3262 		rdev->config.evergreen.max_simds = 10;
3263 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3264 		rdev->config.evergreen.max_gprs = 256;
3265 		rdev->config.evergreen.max_threads = 248;
3266 		rdev->config.evergreen.max_gs_threads = 32;
3267 		rdev->config.evergreen.max_stack_entries = 512;
3268 		rdev->config.evergreen.sx_num_of_sets = 4;
3269 		rdev->config.evergreen.sx_max_export_size = 256;
3270 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3271 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3272 		rdev->config.evergreen.max_hw_contexts = 8;
3273 		rdev->config.evergreen.sq_num_cf_insts = 2;
3274 
3275 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3276 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3277 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3278 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3279 		break;
3280 	case CHIP_REDWOOD:
3281 		rdev->config.evergreen.num_ses = 1;
3282 		rdev->config.evergreen.max_pipes = 4;
3283 		rdev->config.evergreen.max_tile_pipes = 4;
3284 		rdev->config.evergreen.max_simds = 5;
3285 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3286 		rdev->config.evergreen.max_gprs = 256;
3287 		rdev->config.evergreen.max_threads = 248;
3288 		rdev->config.evergreen.max_gs_threads = 32;
3289 		rdev->config.evergreen.max_stack_entries = 256;
3290 		rdev->config.evergreen.sx_num_of_sets = 4;
3291 		rdev->config.evergreen.sx_max_export_size = 256;
3292 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3293 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3294 		rdev->config.evergreen.max_hw_contexts = 8;
3295 		rdev->config.evergreen.sq_num_cf_insts = 2;
3296 
3297 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3298 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3299 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3300 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3301 		break;
3302 	case CHIP_CEDAR:
3303 	default:
3304 		rdev->config.evergreen.num_ses = 1;
3305 		rdev->config.evergreen.max_pipes = 2;
3306 		rdev->config.evergreen.max_tile_pipes = 2;
3307 		rdev->config.evergreen.max_simds = 2;
3308 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3309 		rdev->config.evergreen.max_gprs = 256;
3310 		rdev->config.evergreen.max_threads = 192;
3311 		rdev->config.evergreen.max_gs_threads = 16;
3312 		rdev->config.evergreen.max_stack_entries = 256;
3313 		rdev->config.evergreen.sx_num_of_sets = 4;
3314 		rdev->config.evergreen.sx_max_export_size = 128;
3315 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3316 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3317 		rdev->config.evergreen.max_hw_contexts = 4;
3318 		rdev->config.evergreen.sq_num_cf_insts = 1;
3319 
3320 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3321 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3322 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3323 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3324 		break;
3325 	case CHIP_PALM:
3326 		rdev->config.evergreen.num_ses = 1;
3327 		rdev->config.evergreen.max_pipes = 2;
3328 		rdev->config.evergreen.max_tile_pipes = 2;
3329 		rdev->config.evergreen.max_simds = 2;
3330 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3331 		rdev->config.evergreen.max_gprs = 256;
3332 		rdev->config.evergreen.max_threads = 192;
3333 		rdev->config.evergreen.max_gs_threads = 16;
3334 		rdev->config.evergreen.max_stack_entries = 256;
3335 		rdev->config.evergreen.sx_num_of_sets = 4;
3336 		rdev->config.evergreen.sx_max_export_size = 128;
3337 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3338 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3339 		rdev->config.evergreen.max_hw_contexts = 4;
3340 		rdev->config.evergreen.sq_num_cf_insts = 1;
3341 
3342 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3343 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3344 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3345 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3346 		break;
3347 	case CHIP_SUMO:
3348 		rdev->config.evergreen.num_ses = 1;
3349 		rdev->config.evergreen.max_pipes = 4;
3350 		rdev->config.evergreen.max_tile_pipes = 4;
3351 		if (rdev->pdev->device == 0x9648)
3352 			rdev->config.evergreen.max_simds = 3;
3353 		else if ((rdev->pdev->device == 0x9647) ||
3354 			 (rdev->pdev->device == 0x964a))
3355 			rdev->config.evergreen.max_simds = 4;
3356 		else
3357 			rdev->config.evergreen.max_simds = 5;
3358 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3359 		rdev->config.evergreen.max_gprs = 256;
3360 		rdev->config.evergreen.max_threads = 248;
3361 		rdev->config.evergreen.max_gs_threads = 32;
3362 		rdev->config.evergreen.max_stack_entries = 256;
3363 		rdev->config.evergreen.sx_num_of_sets = 4;
3364 		rdev->config.evergreen.sx_max_export_size = 256;
3365 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3366 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3367 		rdev->config.evergreen.max_hw_contexts = 8;
3368 		rdev->config.evergreen.sq_num_cf_insts = 2;
3369 
3370 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3371 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3372 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3373 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3374 		break;
3375 	case CHIP_SUMO2:
3376 		rdev->config.evergreen.num_ses = 1;
3377 		rdev->config.evergreen.max_pipes = 4;
3378 		rdev->config.evergreen.max_tile_pipes = 4;
3379 		rdev->config.evergreen.max_simds = 2;
3380 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3381 		rdev->config.evergreen.max_gprs = 256;
3382 		rdev->config.evergreen.max_threads = 248;
3383 		rdev->config.evergreen.max_gs_threads = 32;
3384 		rdev->config.evergreen.max_stack_entries = 512;
3385 		rdev->config.evergreen.sx_num_of_sets = 4;
3386 		rdev->config.evergreen.sx_max_export_size = 256;
3387 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3388 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3389 		rdev->config.evergreen.max_hw_contexts = 4;
3390 		rdev->config.evergreen.sq_num_cf_insts = 2;
3391 
3392 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3393 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3394 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3395 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3396 		break;
3397 	case CHIP_BARTS:
3398 		rdev->config.evergreen.num_ses = 2;
3399 		rdev->config.evergreen.max_pipes = 4;
3400 		rdev->config.evergreen.max_tile_pipes = 8;
3401 		rdev->config.evergreen.max_simds = 7;
3402 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3403 		rdev->config.evergreen.max_gprs = 256;
3404 		rdev->config.evergreen.max_threads = 248;
3405 		rdev->config.evergreen.max_gs_threads = 32;
3406 		rdev->config.evergreen.max_stack_entries = 512;
3407 		rdev->config.evergreen.sx_num_of_sets = 4;
3408 		rdev->config.evergreen.sx_max_export_size = 256;
3409 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3410 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3411 		rdev->config.evergreen.max_hw_contexts = 8;
3412 		rdev->config.evergreen.sq_num_cf_insts = 2;
3413 
3414 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3415 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3416 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3417 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3418 		break;
3419 	case CHIP_TURKS:
3420 		rdev->config.evergreen.num_ses = 1;
3421 		rdev->config.evergreen.max_pipes = 4;
3422 		rdev->config.evergreen.max_tile_pipes = 4;
3423 		rdev->config.evergreen.max_simds = 6;
3424 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3425 		rdev->config.evergreen.max_gprs = 256;
3426 		rdev->config.evergreen.max_threads = 248;
3427 		rdev->config.evergreen.max_gs_threads = 32;
3428 		rdev->config.evergreen.max_stack_entries = 256;
3429 		rdev->config.evergreen.sx_num_of_sets = 4;
3430 		rdev->config.evergreen.sx_max_export_size = 256;
3431 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3432 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3433 		rdev->config.evergreen.max_hw_contexts = 8;
3434 		rdev->config.evergreen.sq_num_cf_insts = 2;
3435 
3436 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3437 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3438 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3439 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3440 		break;
3441 	case CHIP_CAICOS:
3442 		rdev->config.evergreen.num_ses = 1;
3443 		rdev->config.evergreen.max_pipes = 2;
3444 		rdev->config.evergreen.max_tile_pipes = 2;
3445 		rdev->config.evergreen.max_simds = 2;
3446 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3447 		rdev->config.evergreen.max_gprs = 256;
3448 		rdev->config.evergreen.max_threads = 192;
3449 		rdev->config.evergreen.max_gs_threads = 16;
3450 		rdev->config.evergreen.max_stack_entries = 256;
3451 		rdev->config.evergreen.sx_num_of_sets = 4;
3452 		rdev->config.evergreen.sx_max_export_size = 128;
3453 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3454 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3455 		rdev->config.evergreen.max_hw_contexts = 4;
3456 		rdev->config.evergreen.sq_num_cf_insts = 1;
3457 
3458 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3459 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3460 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3461 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3462 		break;
3463 	}
3464 
3465 	/* Initialize HDP */
3466 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3467 		WREG32((0x2c14 + j), 0x00000000);
3468 		WREG32((0x2c18 + j), 0x00000000);
3469 		WREG32((0x2c1c + j), 0x00000000);
3470 		WREG32((0x2c20 + j), 0x00000000);
3471 		WREG32((0x2c24 + j), 0x00000000);
3472 	}
3473 
3474 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3475 	WREG32(SRBM_INT_CNTL, 0x1);
3476 	WREG32(SRBM_INT_ACK, 0x1);
3477 
3478 	evergreen_fix_pci_max_read_req_size(rdev);
3479 
3480 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3481 	if ((rdev->family == CHIP_PALM) ||
3482 	    (rdev->family == CHIP_SUMO) ||
3483 	    (rdev->family == CHIP_SUMO2))
3484 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3485 	else
3486 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3487 
3488 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3489 	 * not have bank info, so create a custom tiling dword.
3490 	 * bits 3:0   num_pipes
3491 	 * bits 7:4   num_banks
3492 	 * bits 11:8  group_size
3493 	 * bits 15:12 row_size
3494 	 */
3495 	rdev->config.evergreen.tile_config = 0;
3496 	switch (rdev->config.evergreen.max_tile_pipes) {
3497 	case 1:
3498 	default:
3499 		rdev->config.evergreen.tile_config |= (0 << 0);
3500 		break;
3501 	case 2:
3502 		rdev->config.evergreen.tile_config |= (1 << 0);
3503 		break;
3504 	case 4:
3505 		rdev->config.evergreen.tile_config |= (2 << 0);
3506 		break;
3507 	case 8:
3508 		rdev->config.evergreen.tile_config |= (3 << 0);
3509 		break;
3510 	}
3511 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3512 	if (rdev->flags & RADEON_IS_IGP)
3513 		rdev->config.evergreen.tile_config |= 1 << 4;
3514 	else {
3515 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3516 		case 0: /* four banks */
3517 			rdev->config.evergreen.tile_config |= 0 << 4;
3518 			break;
3519 		case 1: /* eight banks */
3520 			rdev->config.evergreen.tile_config |= 1 << 4;
3521 			break;
3522 		case 2: /* sixteen banks */
3523 		default:
3524 			rdev->config.evergreen.tile_config |= 2 << 4;
3525 			break;
3526 		}
3527 	}
3528 	rdev->config.evergreen.tile_config |= 0 << 8;
3529 	rdev->config.evergreen.tile_config |=
3530 		((gb_addr_config & 0x30000000) >> 28) << 12;
3531 
3532 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3533 		u32 efuse_straps_4;
3534 		u32 efuse_straps_3;
3535 
3536 		efuse_straps_4 = RREG32_RCU(0x204);
3537 		efuse_straps_3 = RREG32_RCU(0x203);
3538 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3539 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3540 	} else {
3541 		tmp = 0;
3542 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3543 			u32 rb_disable_bitmap;
3544 
3545 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3546 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3547 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3548 			tmp <<= 4;
3549 			tmp |= rb_disable_bitmap;
3550 		}
3551 	}
3552 	/* enabled rb are just the one not disabled :) */
3553 	disabled_rb_mask = tmp;
3554 	tmp = 0;
3555 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3556 		tmp |= (1 << i);
3557 	/* if all the backends are disabled, fix it up here */
3558 	if ((disabled_rb_mask & tmp) == tmp) {
3559 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3560 			disabled_rb_mask &= ~(1 << i);
3561 	}
3562 
3563 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3564 		u32 simd_disable_bitmap;
3565 
3566 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3567 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3568 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3569 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3570 		tmp <<= 16;
3571 		tmp |= simd_disable_bitmap;
3572 	}
3573 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3574 
3575 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3576 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3577 
3578 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3579 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3580 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3581 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3582 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3583 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3584 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3585 
3586 	if ((rdev->config.evergreen.max_backends == 1) &&
3587 	    (rdev->flags & RADEON_IS_IGP)) {
3588 		if ((disabled_rb_mask & 3) == 1) {
3589 			/* RB0 disabled, RB1 enabled */
3590 			tmp = 0x11111111;
3591 		} else {
3592 			/* RB1 disabled, RB0 enabled */
3593 			tmp = 0x00000000;
3594 		}
3595 	} else {
3596 		tmp = gb_addr_config & NUM_PIPES_MASK;
3597 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3598 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3599 	}
3600 	WREG32(GB_BACKEND_MAP, tmp);
3601 
3602 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3603 	WREG32(CGTS_TCC_DISABLE, 0);
3604 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3605 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3606 
3607 	/* set HW defaults for 3D engine */
3608 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3609 				     ROQ_IB2_START(0x2b)));
3610 
3611 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3612 
3613 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3614 			     SYNC_GRADIENT |
3615 			     SYNC_WALKER |
3616 			     SYNC_ALIGNER));
3617 
3618 	sx_debug_1 = RREG32(SX_DEBUG_1);
3619 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3620 	WREG32(SX_DEBUG_1, sx_debug_1);
3621 
3622 
3623 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3624 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3625 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3626 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3627 
3628 	if (rdev->family <= CHIP_SUMO2)
3629 		WREG32(SMX_SAR_CTL0, 0x00010000);
3630 
3631 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3632 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3633 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3634 
3635 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3636 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3637 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3638 
3639 	WREG32(VGT_NUM_INSTANCES, 1);
3640 	WREG32(SPI_CONFIG_CNTL, 0);
3641 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3642 	WREG32(CP_PERFMON_CNTL, 0);
3643 
3644 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3645 				  FETCH_FIFO_HIWATER(0x4) |
3646 				  DONE_FIFO_HIWATER(0xe0) |
3647 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3648 
3649 	sq_config = RREG32(SQ_CONFIG);
3650 	sq_config &= ~(PS_PRIO(3) |
3651 		       VS_PRIO(3) |
3652 		       GS_PRIO(3) |
3653 		       ES_PRIO(3));
3654 	sq_config |= (VC_ENABLE |
3655 		      EXPORT_SRC_C |
3656 		      PS_PRIO(0) |
3657 		      VS_PRIO(1) |
3658 		      GS_PRIO(2) |
3659 		      ES_PRIO(3));
3660 
3661 	switch (rdev->family) {
3662 	case CHIP_CEDAR:
3663 	case CHIP_PALM:
3664 	case CHIP_SUMO:
3665 	case CHIP_SUMO2:
3666 	case CHIP_CAICOS:
3667 		/* no vertex cache */
3668 		sq_config &= ~VC_ENABLE;
3669 		break;
3670 	default:
3671 		break;
3672 	}
3673 
3674 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3675 
3676 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3677 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3678 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3679 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3680 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3681 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3682 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3683 
3684 	switch (rdev->family) {
3685 	case CHIP_CEDAR:
3686 	case CHIP_PALM:
3687 	case CHIP_SUMO:
3688 	case CHIP_SUMO2:
3689 		ps_thread_count = 96;
3690 		break;
3691 	default:
3692 		ps_thread_count = 128;
3693 		break;
3694 	}
3695 
3696 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3697 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3698 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3699 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3700 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3701 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3702 
3703 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3704 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3705 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3706 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3707 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3708 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3709 
3710 	WREG32(SQ_CONFIG, sq_config);
3711 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3712 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3713 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3714 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3715 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3716 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3717 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3718 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3719 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3720 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3721 
3722 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3723 					  FORCE_EOV_MAX_REZ_CNT(255)));
3724 
3725 	switch (rdev->family) {
3726 	case CHIP_CEDAR:
3727 	case CHIP_PALM:
3728 	case CHIP_SUMO:
3729 	case CHIP_SUMO2:
3730 	case CHIP_CAICOS:
3731 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3732 		break;
3733 	default:
3734 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3735 		break;
3736 	}
3737 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3738 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3739 
3740 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3741 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3742 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3743 
3744 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3745 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3746 
3747 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3748 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3749 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3750 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3751 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3752 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3753 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3754 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3755 
3756 	/* clear render buffer base addresses */
3757 	WREG32(CB_COLOR0_BASE, 0);
3758 	WREG32(CB_COLOR1_BASE, 0);
3759 	WREG32(CB_COLOR2_BASE, 0);
3760 	WREG32(CB_COLOR3_BASE, 0);
3761 	WREG32(CB_COLOR4_BASE, 0);
3762 	WREG32(CB_COLOR5_BASE, 0);
3763 	WREG32(CB_COLOR6_BASE, 0);
3764 	WREG32(CB_COLOR7_BASE, 0);
3765 	WREG32(CB_COLOR8_BASE, 0);
3766 	WREG32(CB_COLOR9_BASE, 0);
3767 	WREG32(CB_COLOR10_BASE, 0);
3768 	WREG32(CB_COLOR11_BASE, 0);
3769 
3770 	/* set the shader const cache sizes to 0 */
3771 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3772 		WREG32(i, 0);
3773 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3774 		WREG32(i, 0);
3775 
3776 	tmp = RREG32(HDP_MISC_CNTL);
3777 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3778 	WREG32(HDP_MISC_CNTL, tmp);
3779 
3780 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3781 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3782 
3783 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3784 
3785 	udelay(50);
3786 
3787 }
3788 
3789 int evergreen_mc_init(struct radeon_device *rdev)
3790 {
3791 	u32 tmp;
3792 	int chansize, numchan;
3793 
3794 	/* Get VRAM informations */
3795 	rdev->mc.vram_is_ddr = true;
3796 	if ((rdev->family == CHIP_PALM) ||
3797 	    (rdev->family == CHIP_SUMO) ||
3798 	    (rdev->family == CHIP_SUMO2))
3799 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3800 	else
3801 		tmp = RREG32(MC_ARB_RAMCFG);
3802 	if (tmp & CHANSIZE_OVERRIDE) {
3803 		chansize = 16;
3804 	} else if (tmp & CHANSIZE_MASK) {
3805 		chansize = 64;
3806 	} else {
3807 		chansize = 32;
3808 	}
3809 	tmp = RREG32(MC_SHARED_CHMAP);
3810 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3811 	case 0:
3812 	default:
3813 		numchan = 1;
3814 		break;
3815 	case 1:
3816 		numchan = 2;
3817 		break;
3818 	case 2:
3819 		numchan = 4;
3820 		break;
3821 	case 3:
3822 		numchan = 8;
3823 		break;
3824 	}
3825 	rdev->mc.vram_width = numchan * chansize;
3826 	/* Could aper size report 0 ? */
3827 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3828 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3829 	/* Setup GPU memory space */
3830 	if ((rdev->family == CHIP_PALM) ||
3831 	    (rdev->family == CHIP_SUMO) ||
3832 	    (rdev->family == CHIP_SUMO2)) {
3833 		/* size in bytes on fusion */
3834 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3835 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3836 	} else {
3837 		/* size in MB on evergreen/cayman/tn */
3838 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3839 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3840 	}
3841 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3842 	r700_vram_gtt_location(rdev, &rdev->mc);
3843 	radeon_update_bandwidth_info(rdev);
3844 
3845 	return 0;
3846 }
3847 
3848 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3849 {
3850 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3851 		RREG32(GRBM_STATUS));
3852 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3853 		RREG32(GRBM_STATUS_SE0));
3854 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3855 		RREG32(GRBM_STATUS_SE1));
3856 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3857 		RREG32(SRBM_STATUS));
3858 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3859 		RREG32(SRBM_STATUS2));
3860 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3861 		RREG32(CP_STALLED_STAT1));
3862 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3863 		RREG32(CP_STALLED_STAT2));
3864 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3865 		RREG32(CP_BUSY_STAT));
3866 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3867 		RREG32(CP_STAT));
3868 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3869 		RREG32(DMA_STATUS_REG));
3870 	if (rdev->family >= CHIP_CAYMAN) {
3871 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3872 			 RREG32(DMA_STATUS_REG + 0x800));
3873 	}
3874 }
3875 
3876 bool evergreen_is_display_hung(struct radeon_device *rdev)
3877 {
3878 	u32 crtc_hung = 0;
3879 	u32 crtc_status[6];
3880 	u32 i, j, tmp;
3881 
3882 	for (i = 0; i < rdev->num_crtc; i++) {
3883 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3884 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3885 			crtc_hung |= (1 << i);
3886 		}
3887 	}
3888 
3889 	for (j = 0; j < 10; j++) {
3890 		for (i = 0; i < rdev->num_crtc; i++) {
3891 			if (crtc_hung & (1 << i)) {
3892 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3893 				if (tmp != crtc_status[i])
3894 					crtc_hung &= ~(1 << i);
3895 			}
3896 		}
3897 		if (crtc_hung == 0)
3898 			return false;
3899 		udelay(100);
3900 	}
3901 
3902 	return true;
3903 }
3904 
3905 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3906 {
3907 	u32 reset_mask = 0;
3908 	u32 tmp;
3909 
3910 	/* GRBM_STATUS */
3911 	tmp = RREG32(GRBM_STATUS);
3912 	if (tmp & (PA_BUSY | SC_BUSY |
3913 		   SH_BUSY | SX_BUSY |
3914 		   TA_BUSY | VGT_BUSY |
3915 		   DB_BUSY | CB_BUSY |
3916 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3917 		reset_mask |= RADEON_RESET_GFX;
3918 
3919 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3920 		   CP_BUSY | CP_COHERENCY_BUSY))
3921 		reset_mask |= RADEON_RESET_CP;
3922 
3923 	if (tmp & GRBM_EE_BUSY)
3924 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3925 
3926 	/* DMA_STATUS_REG */
3927 	tmp = RREG32(DMA_STATUS_REG);
3928 	if (!(tmp & DMA_IDLE))
3929 		reset_mask |= RADEON_RESET_DMA;
3930 
3931 	/* SRBM_STATUS2 */
3932 	tmp = RREG32(SRBM_STATUS2);
3933 	if (tmp & DMA_BUSY)
3934 		reset_mask |= RADEON_RESET_DMA;
3935 
3936 	/* SRBM_STATUS */
3937 	tmp = RREG32(SRBM_STATUS);
3938 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3939 		reset_mask |= RADEON_RESET_RLC;
3940 
3941 	if (tmp & IH_BUSY)
3942 		reset_mask |= RADEON_RESET_IH;
3943 
3944 	if (tmp & SEM_BUSY)
3945 		reset_mask |= RADEON_RESET_SEM;
3946 
3947 	if (tmp & GRBM_RQ_PENDING)
3948 		reset_mask |= RADEON_RESET_GRBM;
3949 
3950 	if (tmp & VMC_BUSY)
3951 		reset_mask |= RADEON_RESET_VMC;
3952 
3953 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3954 		   MCC_BUSY | MCD_BUSY))
3955 		reset_mask |= RADEON_RESET_MC;
3956 
3957 	if (evergreen_is_display_hung(rdev))
3958 		reset_mask |= RADEON_RESET_DISPLAY;
3959 
3960 	/* VM_L2_STATUS */
3961 	tmp = RREG32(VM_L2_STATUS);
3962 	if (tmp & L2_BUSY)
3963 		reset_mask |= RADEON_RESET_VMC;
3964 
3965 	/* Skip MC reset as it's mostly likely not hung, just busy */
3966 	if (reset_mask & RADEON_RESET_MC) {
3967 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3968 		reset_mask &= ~RADEON_RESET_MC;
3969 	}
3970 
3971 	return reset_mask;
3972 }
3973 
3974 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3975 {
3976 	struct evergreen_mc_save save;
3977 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3978 	u32 tmp;
3979 
3980 	if (reset_mask == 0)
3981 		return;
3982 
3983 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3984 
3985 	evergreen_print_gpu_status_regs(rdev);
3986 
3987 	/* Disable CP parsing/prefetching */
3988 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3989 
3990 	if (reset_mask & RADEON_RESET_DMA) {
3991 		/* Disable DMA */
3992 		tmp = RREG32(DMA_RB_CNTL);
3993 		tmp &= ~DMA_RB_ENABLE;
3994 		WREG32(DMA_RB_CNTL, tmp);
3995 	}
3996 
3997 	udelay(50);
3998 
3999 	evergreen_mc_stop(rdev, &save);
4000 	if (evergreen_mc_wait_for_idle(rdev)) {
4001 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4002 	}
4003 
4004 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4005 		grbm_soft_reset |= SOFT_RESET_DB |
4006 			SOFT_RESET_CB |
4007 			SOFT_RESET_PA |
4008 			SOFT_RESET_SC |
4009 			SOFT_RESET_SPI |
4010 			SOFT_RESET_SX |
4011 			SOFT_RESET_SH |
4012 			SOFT_RESET_TC |
4013 			SOFT_RESET_TA |
4014 			SOFT_RESET_VC |
4015 			SOFT_RESET_VGT;
4016 	}
4017 
4018 	if (reset_mask & RADEON_RESET_CP) {
4019 		grbm_soft_reset |= SOFT_RESET_CP |
4020 			SOFT_RESET_VGT;
4021 
4022 		srbm_soft_reset |= SOFT_RESET_GRBM;
4023 	}
4024 
4025 	if (reset_mask & RADEON_RESET_DMA)
4026 		srbm_soft_reset |= SOFT_RESET_DMA;
4027 
4028 	if (reset_mask & RADEON_RESET_DISPLAY)
4029 		srbm_soft_reset |= SOFT_RESET_DC;
4030 
4031 	if (reset_mask & RADEON_RESET_RLC)
4032 		srbm_soft_reset |= SOFT_RESET_RLC;
4033 
4034 	if (reset_mask & RADEON_RESET_SEM)
4035 		srbm_soft_reset |= SOFT_RESET_SEM;
4036 
4037 	if (reset_mask & RADEON_RESET_IH)
4038 		srbm_soft_reset |= SOFT_RESET_IH;
4039 
4040 	if (reset_mask & RADEON_RESET_GRBM)
4041 		srbm_soft_reset |= SOFT_RESET_GRBM;
4042 
4043 	if (reset_mask & RADEON_RESET_VMC)
4044 		srbm_soft_reset |= SOFT_RESET_VMC;
4045 
4046 	if (!(rdev->flags & RADEON_IS_IGP)) {
4047 		if (reset_mask & RADEON_RESET_MC)
4048 			srbm_soft_reset |= SOFT_RESET_MC;
4049 	}
4050 
4051 	if (grbm_soft_reset) {
4052 		tmp = RREG32(GRBM_SOFT_RESET);
4053 		tmp |= grbm_soft_reset;
4054 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4055 		WREG32(GRBM_SOFT_RESET, tmp);
4056 		tmp = RREG32(GRBM_SOFT_RESET);
4057 
4058 		udelay(50);
4059 
4060 		tmp &= ~grbm_soft_reset;
4061 		WREG32(GRBM_SOFT_RESET, tmp);
4062 		tmp = RREG32(GRBM_SOFT_RESET);
4063 	}
4064 
4065 	if (srbm_soft_reset) {
4066 		tmp = RREG32(SRBM_SOFT_RESET);
4067 		tmp |= srbm_soft_reset;
4068 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4069 		WREG32(SRBM_SOFT_RESET, tmp);
4070 		tmp = RREG32(SRBM_SOFT_RESET);
4071 
4072 		udelay(50);
4073 
4074 		tmp &= ~srbm_soft_reset;
4075 		WREG32(SRBM_SOFT_RESET, tmp);
4076 		tmp = RREG32(SRBM_SOFT_RESET);
4077 	}
4078 
4079 	/* Wait a little for things to settle down */
4080 	udelay(50);
4081 
4082 	evergreen_mc_resume(rdev, &save);
4083 	udelay(50);
4084 
4085 	evergreen_print_gpu_status_regs(rdev);
4086 }
4087 
4088 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4089 {
4090 	struct evergreen_mc_save save;
4091 	u32 tmp, i;
4092 
4093 	dev_info(rdev->dev, "GPU pci config reset\n");
4094 
4095 	/* disable dpm? */
4096 
4097 	/* Disable CP parsing/prefetching */
4098 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4099 	udelay(50);
4100 	/* Disable DMA */
4101 	tmp = RREG32(DMA_RB_CNTL);
4102 	tmp &= ~DMA_RB_ENABLE;
4103 	WREG32(DMA_RB_CNTL, tmp);
4104 	/* XXX other engines? */
4105 
4106 	/* halt the rlc */
4107 	r600_rlc_stop(rdev);
4108 
4109 	udelay(50);
4110 
4111 	/* set mclk/sclk to bypass */
4112 	rv770_set_clk_bypass_mode(rdev);
4113 	/* disable BM */
4114 	pci_disable_busmaster(rdev->pdev->dev.bsddev);
4115 	/* disable mem access */
4116 	evergreen_mc_stop(rdev, &save);
4117 	if (evergreen_mc_wait_for_idle(rdev)) {
4118 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4119 	}
4120 	/* reset */
4121 	radeon_pci_config_reset(rdev);
4122 	/* wait for asic to come out of reset */
4123 	for (i = 0; i < rdev->usec_timeout; i++) {
4124 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4125 			break;
4126 		udelay(1);
4127 	}
4128 }
4129 
4130 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4131 {
4132 	u32 reset_mask;
4133 
4134 	if (hard) {
4135 		evergreen_gpu_pci_config_reset(rdev);
4136 		return 0;
4137 	}
4138 
4139 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4140 
4141 	if (reset_mask)
4142 		r600_set_bios_scratch_engine_hung(rdev, true);
4143 
4144 	/* try soft reset */
4145 	evergreen_gpu_soft_reset(rdev, reset_mask);
4146 
4147 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4148 
4149 	/* try pci config reset */
4150 	if (reset_mask && radeon_hard_reset)
4151 		evergreen_gpu_pci_config_reset(rdev);
4152 
4153 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4154 
4155 	if (!reset_mask)
4156 		r600_set_bios_scratch_engine_hung(rdev, false);
4157 
4158 	return 0;
4159 }
4160 
4161 /**
4162  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4163  *
4164  * @rdev: radeon_device pointer
4165  * @ring: radeon_ring structure holding ring information
4166  *
4167  * Check if the GFX engine is locked up.
4168  * Returns true if the engine appears to be locked up, false if not.
4169  */
4170 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4171 {
4172 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4173 
4174 	if (!(reset_mask & (RADEON_RESET_GFX |
4175 			    RADEON_RESET_COMPUTE |
4176 			    RADEON_RESET_CP))) {
4177 		radeon_ring_lockup_update(rdev, ring);
4178 		return false;
4179 	}
4180 	return radeon_ring_test_lockup(rdev, ring);
4181 }
4182 
4183 /*
4184  * RLC
4185  */
4186 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4187 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4188 
4189 void sumo_rlc_fini(struct radeon_device *rdev)
4190 {
4191 	int r;
4192 
4193 	/* save restore block */
4194 	if (rdev->rlc.save_restore_obj) {
4195 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4196 		if (unlikely(r != 0))
4197 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4198 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4199 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4200 
4201 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4202 		rdev->rlc.save_restore_obj = NULL;
4203 	}
4204 
4205 	/* clear state block */
4206 	if (rdev->rlc.clear_state_obj) {
4207 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4208 		if (unlikely(r != 0))
4209 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4210 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4211 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4212 
4213 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4214 		rdev->rlc.clear_state_obj = NULL;
4215 	}
4216 
4217 	/* clear state block */
4218 	if (rdev->rlc.cp_table_obj) {
4219 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4220 		if (unlikely(r != 0))
4221 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4222 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4223 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4224 
4225 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4226 		rdev->rlc.cp_table_obj = NULL;
4227 	}
4228 }
4229 
4230 #define CP_ME_TABLE_SIZE    96
4231 
4232 int sumo_rlc_init(struct radeon_device *rdev)
4233 {
4234 	const u32 *src_ptr;
4235 	volatile u32 *dst_ptr;
4236 	u32 dws, data, i, j, k, reg_num;
4237 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4238 	u64 reg_list_mc_addr;
4239 	const struct cs_section_def *cs_data;
4240 	int r;
4241 	void *vptr;
4242 
4243 	vptr = NULL;
4244 	src_ptr = rdev->rlc.reg_list;
4245 	dws = rdev->rlc.reg_list_size;
4246 	if (rdev->family >= CHIP_BONAIRE) {
4247 		dws += (5 * 16) + 48 + 48 + 64;
4248 	}
4249 	cs_data = rdev->rlc.cs_data;
4250 
4251 	if (src_ptr) {
4252 		/* save restore block */
4253 		if (rdev->rlc.save_restore_obj == NULL) {
4254 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4255 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4256 					     &rdev->rlc.save_restore_obj);
4257 			if (r) {
4258 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4259 				return r;
4260 			}
4261 		}
4262 
4263 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4264 		if (unlikely(r != 0)) {
4265 			sumo_rlc_fini(rdev);
4266 			return r;
4267 		}
4268 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4269 				  &rdev->rlc.save_restore_gpu_addr);
4270 		if (r) {
4271 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4272 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4273 			sumo_rlc_fini(rdev);
4274 			return r;
4275 		}
4276 
4277 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void**)&vptr);
4278 		if (r) {
4279 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4280 			sumo_rlc_fini(rdev);
4281 			return r;
4282 		}
4283 		rdev->rlc.sr_ptr = vptr;
4284 		/* write the sr buffer */
4285 		dst_ptr = rdev->rlc.sr_ptr;
4286 		if (rdev->family >= CHIP_TAHITI) {
4287 			/* SI */
4288 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4289 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4290 		} else {
4291 			/* ON/LN/TN */
4292 			/* format:
4293 			 * dw0: (reg2 << 16) | reg1
4294 			 * dw1: reg1 save space
4295 			 * dw2: reg2 save space
4296 			 */
4297 			for (i = 0; i < dws; i++) {
4298 				data = src_ptr[i] >> 2;
4299 				i++;
4300 				if (i < dws)
4301 					data |= (src_ptr[i] >> 2) << 16;
4302 				j = (((i - 1) * 3) / 2);
4303 				dst_ptr[j] = cpu_to_le32(data);
4304 			}
4305 			j = ((i * 3) / 2);
4306 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4307 		}
4308 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4309 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4310 	}
4311 
4312 	if (cs_data) {
4313 		/* clear state block */
4314 		if (rdev->family >= CHIP_BONAIRE) {
4315 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4316 		} else if (rdev->family >= CHIP_TAHITI) {
4317 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4318 			dws = rdev->rlc.clear_state_size + (256 / 4);
4319 		} else {
4320 			reg_list_num = 0;
4321 			dws = 0;
4322 			for (i = 0; cs_data[i].section != NULL; i++) {
4323 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4324 					reg_list_num++;
4325 					dws += cs_data[i].section[j].reg_count;
4326 				}
4327 			}
4328 			reg_list_blk_index = (3 * reg_list_num + 2);
4329 			dws += reg_list_blk_index;
4330 			rdev->rlc.clear_state_size = dws;
4331 		}
4332 
4333 		if (rdev->rlc.clear_state_obj == NULL) {
4334 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4335 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4336 					     &rdev->rlc.clear_state_obj);
4337 			if (r) {
4338 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4339 				sumo_rlc_fini(rdev);
4340 				return r;
4341 			}
4342 		}
4343 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4344 		if (unlikely(r != 0)) {
4345 			sumo_rlc_fini(rdev);
4346 			return r;
4347 		}
4348 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4349 				  &rdev->rlc.clear_state_gpu_addr);
4350 		if (r) {
4351 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4352 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4353 			sumo_rlc_fini(rdev);
4354 			return r;
4355 		}
4356 
4357 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void**)&vptr);
4358 		if (r) {
4359 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4360 			sumo_rlc_fini(rdev);
4361 			return r;
4362 		}
4363 		rdev->rlc.cs_ptr = vptr;
4364 		/* set up the cs buffer */
4365 		dst_ptr = rdev->rlc.cs_ptr;
4366 		if (rdev->family >= CHIP_BONAIRE) {
4367 			cik_get_csb_buffer(rdev, dst_ptr);
4368 		} else if (rdev->family >= CHIP_TAHITI) {
4369 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4370 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4371 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4372 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4373 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4374 		} else {
4375 			reg_list_hdr_blk_index = 0;
4376 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4377 			data = upper_32_bits(reg_list_mc_addr);
4378 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4379 			reg_list_hdr_blk_index++;
4380 			for (i = 0; cs_data[i].section != NULL; i++) {
4381 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4382 					reg_num = cs_data[i].section[j].reg_count;
4383 					data = reg_list_mc_addr & 0xffffffff;
4384 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4385 					reg_list_hdr_blk_index++;
4386 
4387 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4388 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4389 					reg_list_hdr_blk_index++;
4390 
4391 					data = 0x08000000 | (reg_num * 4);
4392 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4393 					reg_list_hdr_blk_index++;
4394 
4395 					for (k = 0; k < reg_num; k++) {
4396 						data = cs_data[i].section[j].extent[k];
4397 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4398 					}
4399 					reg_list_mc_addr += reg_num * 4;
4400 					reg_list_blk_index += reg_num;
4401 				}
4402 			}
4403 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4404 		}
4405 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4406 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4407 	}
4408 
4409 	if (rdev->rlc.cp_table_size) {
4410 		if (rdev->rlc.cp_table_obj == NULL) {
4411 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4412 					     PAGE_SIZE, true,
4413 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4414 					     &rdev->rlc.cp_table_obj);
4415 			if (r) {
4416 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4417 				sumo_rlc_fini(rdev);
4418 				return r;
4419 			}
4420 		}
4421 
4422 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4423 		if (unlikely(r != 0)) {
4424 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4425 			sumo_rlc_fini(rdev);
4426 			return r;
4427 		}
4428 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4429 				  &rdev->rlc.cp_table_gpu_addr);
4430 		if (r) {
4431 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4432 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4433 			sumo_rlc_fini(rdev);
4434 			return r;
4435 		}
4436 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void**)&vptr);
4437 		if (r) {
4438 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4439 			sumo_rlc_fini(rdev);
4440 			return r;
4441 		}
4442 		rdev->rlc.cp_table_ptr = vptr;
4443 
4444 		cik_init_cp_pg_table(rdev);
4445 
4446 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4447 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4448 
4449 	}
4450 
4451 	return 0;
4452 }
4453 
4454 static void evergreen_rlc_start(struct radeon_device *rdev)
4455 {
4456 	u32 mask = RLC_ENABLE;
4457 
4458 	if (rdev->flags & RADEON_IS_IGP) {
4459 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4460 	}
4461 
4462 	WREG32(RLC_CNTL, mask);
4463 }
4464 
4465 int evergreen_rlc_resume(struct radeon_device *rdev)
4466 {
4467 	u32 i;
4468 	const __be32 *fw_data;
4469 
4470 	if (!rdev->rlc_fw)
4471 		return -EINVAL;
4472 
4473 	r600_rlc_stop(rdev);
4474 
4475 	WREG32(RLC_HB_CNTL, 0);
4476 
4477 	if (rdev->flags & RADEON_IS_IGP) {
4478 		if (rdev->family == CHIP_ARUBA) {
4479 			u32 always_on_bitmap =
4480 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4481 			/* find out the number of active simds */
4482 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4483 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4484 			tmp = hweight32(~tmp);
4485 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4486 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4487 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4488 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4489 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4490 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4491 			}
4492 		} else {
4493 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4494 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4495 		}
4496 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4497 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4498 	} else {
4499 		WREG32(RLC_HB_BASE, 0);
4500 		WREG32(RLC_HB_RPTR, 0);
4501 		WREG32(RLC_HB_WPTR, 0);
4502 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4503 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4504 	}
4505 	WREG32(RLC_MC_CNTL, 0);
4506 	WREG32(RLC_UCODE_CNTL, 0);
4507 
4508 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4509 	if (rdev->family >= CHIP_ARUBA) {
4510 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4511 			WREG32(RLC_UCODE_ADDR, i);
4512 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4513 		}
4514 	} else if (rdev->family >= CHIP_CAYMAN) {
4515 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4516 			WREG32(RLC_UCODE_ADDR, i);
4517 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4518 		}
4519 	} else {
4520 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4521 			WREG32(RLC_UCODE_ADDR, i);
4522 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4523 		}
4524 	}
4525 	WREG32(RLC_UCODE_ADDR, 0);
4526 
4527 	evergreen_rlc_start(rdev);
4528 
4529 	return 0;
4530 }
4531 
4532 /* Interrupts */
4533 
4534 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4535 {
4536 	if (crtc >= rdev->num_crtc)
4537 		return 0;
4538 	else
4539 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4540 }
4541 
4542 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4543 {
4544 	u32 tmp;
4545 
4546 	if (rdev->family >= CHIP_CAYMAN) {
4547 		cayman_cp_int_cntl_setup(rdev, 0,
4548 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4549 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4550 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4551 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4552 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4553 	} else
4554 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4555 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4556 	WREG32(DMA_CNTL, tmp);
4557 	WREG32(GRBM_INT_CNTL, 0);
4558 	WREG32(SRBM_INT_CNTL, 0);
4559 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4560 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4561 	if (rdev->num_crtc >= 4) {
4562 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4563 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4564 	}
4565 	if (rdev->num_crtc >= 6) {
4566 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4567 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4568 	}
4569 
4570 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4571 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4572 	if (rdev->num_crtc >= 4) {
4573 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4574 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4575 	}
4576 	if (rdev->num_crtc >= 6) {
4577 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4578 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4579 	}
4580 
4581 	/* only one DAC on DCE5 */
4582 	if (!ASIC_IS_DCE5(rdev))
4583 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4584 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4585 
4586 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4587 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4588 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4589 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4590 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4591 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4592 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4593 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4594 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4595 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4596 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4597 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4598 
4599 }
4600 
4601 int evergreen_irq_set(struct radeon_device *rdev)
4602 {
4603 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4604 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4605 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4606 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4607 	u32 grbm_int_cntl = 0;
4608 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4609 	u32 dma_cntl, dma_cntl1 = 0;
4610 	u32 thermal_int = 0;
4611 
4612 	if (!rdev->irq.installed) {
4613 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4614 		return -EINVAL;
4615 	}
4616 	/* don't enable anything if the ih is disabled */
4617 	if (!rdev->ih.enabled) {
4618 		r600_disable_interrupts(rdev);
4619 		/* force the active interrupt state to all disabled */
4620 		evergreen_disable_interrupt_state(rdev);
4621 		return 0;
4622 	}
4623 
4624 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4625 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4626 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4627 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4628 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4629 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4630 	if (rdev->family == CHIP_ARUBA)
4631 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4632 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4633 	else
4634 		thermal_int = RREG32(CG_THERMAL_INT) &
4635 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4636 
4637 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4638 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4639 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4640 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4641 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4642 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4643 
4644 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4645 
4646 	if (rdev->family >= CHIP_CAYMAN) {
4647 		/* enable CP interrupts on all rings */
4648 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4649 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4650 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4651 		}
4652 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4653 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4654 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4655 		}
4656 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4657 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4658 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4659 		}
4660 	} else {
4661 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4662 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4663 			cp_int_cntl |= RB_INT_ENABLE;
4664 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4665 		}
4666 	}
4667 
4668 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4669 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4670 		dma_cntl |= TRAP_ENABLE;
4671 	}
4672 
4673 	if (rdev->family >= CHIP_CAYMAN) {
4674 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4675 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4676 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4677 			dma_cntl1 |= TRAP_ENABLE;
4678 		}
4679 	}
4680 
4681 	if (rdev->irq.dpm_thermal) {
4682 		DRM_DEBUG("dpm thermal\n");
4683 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4684 	}
4685 
4686 	if (rdev->irq.crtc_vblank_int[0] ||
4687 	    atomic_read(&rdev->irq.pflip[0])) {
4688 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4689 		crtc1 |= VBLANK_INT_MASK;
4690 	}
4691 	if (rdev->irq.crtc_vblank_int[1] ||
4692 	    atomic_read(&rdev->irq.pflip[1])) {
4693 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4694 		crtc2 |= VBLANK_INT_MASK;
4695 	}
4696 	if (rdev->irq.crtc_vblank_int[2] ||
4697 	    atomic_read(&rdev->irq.pflip[2])) {
4698 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4699 		crtc3 |= VBLANK_INT_MASK;
4700 	}
4701 	if (rdev->irq.crtc_vblank_int[3] ||
4702 	    atomic_read(&rdev->irq.pflip[3])) {
4703 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4704 		crtc4 |= VBLANK_INT_MASK;
4705 	}
4706 	if (rdev->irq.crtc_vblank_int[4] ||
4707 	    atomic_read(&rdev->irq.pflip[4])) {
4708 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4709 		crtc5 |= VBLANK_INT_MASK;
4710 	}
4711 	if (rdev->irq.crtc_vblank_int[5] ||
4712 	    atomic_read(&rdev->irq.pflip[5])) {
4713 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4714 		crtc6 |= VBLANK_INT_MASK;
4715 	}
4716 	if (rdev->irq.hpd[0]) {
4717 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4718 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4719 	}
4720 	if (rdev->irq.hpd[1]) {
4721 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4722 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4723 	}
4724 	if (rdev->irq.hpd[2]) {
4725 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4726 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4727 	}
4728 	if (rdev->irq.hpd[3]) {
4729 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4730 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4731 	}
4732 	if (rdev->irq.hpd[4]) {
4733 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4734 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4735 	}
4736 	if (rdev->irq.hpd[5]) {
4737 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4738 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4739 	}
4740 	if (rdev->irq.afmt[0]) {
4741 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4742 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4743 	}
4744 	if (rdev->irq.afmt[1]) {
4745 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4746 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4747 	}
4748 	if (rdev->irq.afmt[2]) {
4749 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4750 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4751 	}
4752 	if (rdev->irq.afmt[3]) {
4753 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4754 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4755 	}
4756 	if (rdev->irq.afmt[4]) {
4757 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4758 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4759 	}
4760 	if (rdev->irq.afmt[5]) {
4761 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4762 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4763 	}
4764 
4765 	if (rdev->family >= CHIP_CAYMAN) {
4766 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4767 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4768 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4769 	} else
4770 		WREG32(CP_INT_CNTL, cp_int_cntl);
4771 
4772 	WREG32(DMA_CNTL, dma_cntl);
4773 
4774 	if (rdev->family >= CHIP_CAYMAN)
4775 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4776 
4777 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4778 
4779 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4780 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4781 	if (rdev->num_crtc >= 4) {
4782 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4783 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4784 	}
4785 	if (rdev->num_crtc >= 6) {
4786 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4787 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4788 	}
4789 
4790 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4791 	       GRPH_PFLIP_INT_MASK);
4792 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4793 	       GRPH_PFLIP_INT_MASK);
4794 	if (rdev->num_crtc >= 4) {
4795 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4796 		       GRPH_PFLIP_INT_MASK);
4797 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4798 		       GRPH_PFLIP_INT_MASK);
4799 	}
4800 	if (rdev->num_crtc >= 6) {
4801 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4802 		       GRPH_PFLIP_INT_MASK);
4803 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4804 		       GRPH_PFLIP_INT_MASK);
4805 	}
4806 
4807 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4808 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4809 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4810 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4811 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4812 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4813 	if (rdev->family == CHIP_ARUBA)
4814 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4815 	else
4816 		WREG32(CG_THERMAL_INT, thermal_int);
4817 
4818 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4819 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4820 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4821 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4822 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4823 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4824 
4825 	/* posting read */
4826 	RREG32(SRBM_STATUS);
4827 
4828 	return 0;
4829 }
4830 
4831 static void evergreen_irq_ack(struct radeon_device *rdev)
4832 {
4833 	u32 tmp;
4834 
4835 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4836 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4837 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4838 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4839 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4840 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4841 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4842 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4843 	if (rdev->num_crtc >= 4) {
4844 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4845 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4846 	}
4847 	if (rdev->num_crtc >= 6) {
4848 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4849 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4850 	}
4851 
4852 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4853 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4854 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4855 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4856 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4857 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4858 
4859 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4860 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4861 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4862 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4863 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4864 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4865 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4866 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4867 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4868 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4869 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4870 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4871 
4872 	if (rdev->num_crtc >= 4) {
4873 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4874 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4875 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4876 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4877 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4878 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4879 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4880 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4881 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4882 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4883 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4884 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4885 	}
4886 
4887 	if (rdev->num_crtc >= 6) {
4888 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4889 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4890 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4891 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4892 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4893 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4894 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4895 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4896 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4897 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4898 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4899 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4900 	}
4901 
4902 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4903 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4904 		tmp |= DC_HPDx_INT_ACK;
4905 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4906 	}
4907 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4908 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4909 		tmp |= DC_HPDx_INT_ACK;
4910 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4911 	}
4912 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4913 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4914 		tmp |= DC_HPDx_INT_ACK;
4915 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4916 	}
4917 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4918 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4919 		tmp |= DC_HPDx_INT_ACK;
4920 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4921 	}
4922 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4923 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4924 		tmp |= DC_HPDx_INT_ACK;
4925 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4926 	}
4927 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4928 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4929 		tmp |= DC_HPDx_INT_ACK;
4930 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4931 	}
4932 
4933 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4934 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4935 		tmp |= DC_HPDx_RX_INT_ACK;
4936 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4937 	}
4938 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4939 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4940 		tmp |= DC_HPDx_RX_INT_ACK;
4941 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4942 	}
4943 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4944 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4945 		tmp |= DC_HPDx_RX_INT_ACK;
4946 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4947 	}
4948 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4949 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4950 		tmp |= DC_HPDx_RX_INT_ACK;
4951 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4952 	}
4953 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4954 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4955 		tmp |= DC_HPDx_RX_INT_ACK;
4956 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4957 	}
4958 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4959 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4960 		tmp |= DC_HPDx_RX_INT_ACK;
4961 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4962 	}
4963 
4964 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4965 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4966 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4967 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4968 	}
4969 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4970 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4971 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4972 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4973 	}
4974 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4975 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4976 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4977 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4978 	}
4979 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4980 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4981 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4982 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4983 	}
4984 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4985 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4986 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4987 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4988 	}
4989 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4990 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4991 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4992 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4993 	}
4994 }
4995 
4996 static void evergreen_irq_disable(struct radeon_device *rdev)
4997 {
4998 	r600_disable_interrupts(rdev);
4999 	/* Wait and acknowledge irq */
5000 	mdelay(1);
5001 	evergreen_irq_ack(rdev);
5002 	evergreen_disable_interrupt_state(rdev);
5003 }
5004 
5005 void evergreen_irq_suspend(struct radeon_device *rdev)
5006 {
5007 	evergreen_irq_disable(rdev);
5008 	r600_rlc_stop(rdev);
5009 }
5010 
5011 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5012 {
5013 	u32 wptr, tmp;
5014 
5015 	if (rdev->wb.enabled)
5016 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5017 	else
5018 		wptr = RREG32(IH_RB_WPTR);
5019 
5020 	if (wptr & RB_OVERFLOW) {
5021 		wptr &= ~RB_OVERFLOW;
5022 		/* When a ring buffer overflow happen start parsing interrupt
5023 		 * from the last not overwritten vector (wptr + 16). Hopefully
5024 		 * this should allow us to catchup.
5025 		 */
5026 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5027 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5028 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5029 		tmp = RREG32(IH_RB_CNTL);
5030 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
5031 		WREG32(IH_RB_CNTL, tmp);
5032 	}
5033 	return (wptr & rdev->ih.ptr_mask);
5034 }
5035 
5036 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
5037 {
5038 	u32 wptr;
5039 	u32 rptr;
5040 	u32 src_id, src_data;
5041 	u32 ring_index;
5042 	bool queue_hotplug = false;
5043 	bool queue_hdmi = false;
5044 	bool queue_dp = false;
5045 	bool queue_thermal = false;
5046 	u32 status, addr;
5047 
5048 	if (!rdev->ih.enabled || rdev->shutdown)
5049 		return IRQ_NONE;
5050 
5051 	wptr = evergreen_get_ih_wptr(rdev);
5052 
5053 restart_ih:
5054 	/* is somebody else already processing irqs? */
5055 	if (atomic_xchg(&rdev->ih.lock, 1))
5056 		return IRQ_NONE;
5057 
5058 	rptr = rdev->ih.rptr;
5059 	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5060 
5061 	/* Order reading of wptr vs. reading of IH ring data */
5062 	rmb();
5063 
5064 	/* display interrupts */
5065 	evergreen_irq_ack(rdev);
5066 
5067 	while (rptr != wptr) {
5068 		/* wptr/rptr are in bytes! */
5069 		ring_index = rptr / 4;
5070 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5071 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5072 
5073 		switch (src_id) {
5074 		case 1: /* D1 vblank/vline */
5075 			switch (src_data) {
5076 			case 0: /* D1 vblank */
5077 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5078 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5079 
5080 				if (rdev->irq.crtc_vblank_int[0]) {
5081 					drm_handle_vblank(rdev->ddev, 0);
5082 					rdev->pm.vblank_sync = true;
5083 					wake_up(&rdev->irq.vblank_queue);
5084  				}
5085 				if (atomic_read(&rdev->irq.pflip[0]))
5086 					radeon_crtc_handle_vblank(rdev, 0);
5087 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5088 				DRM_DEBUG("IH: D1 vblank\n");
5089 
5090 				break;
5091 			case 1: /* D1 vline */
5092 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5093 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5094 
5095 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5096 				DRM_DEBUG("IH: D1 vline\n");
5097 
5098 				break;
5099 			default:
5100 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5101 				break;
5102 			}
5103 			break;
5104 		case 2: /* D2 vblank/vline */
5105 			switch (src_data) {
5106 			case 0: /* D2 vblank */
5107 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5108 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5109 
5110 				if (rdev->irq.crtc_vblank_int[1]) {
5111 					drm_handle_vblank(rdev->ddev, 1);
5112 					rdev->pm.vblank_sync = true;
5113 					wake_up(&rdev->irq.vblank_queue);
5114  				}
5115 				if (atomic_read(&rdev->irq.pflip[1]))
5116 					radeon_crtc_handle_vblank(rdev, 1);
5117 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5118 				DRM_DEBUG("IH: D2 vblank\n");
5119 
5120 				break;
5121 			case 1: /* D2 vline */
5122 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5123 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5124 
5125 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5126 				DRM_DEBUG("IH: D2 vline\n");
5127 
5128 				break;
5129 			default:
5130 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5131 				break;
5132 			}
5133 			break;
5134 		case 3: /* D3 vblank/vline */
5135 			switch (src_data) {
5136 			case 0: /* D3 vblank */
5137 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5138 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5139 
5140 				if (rdev->irq.crtc_vblank_int[2]) {
5141 					drm_handle_vblank(rdev->ddev, 2);
5142 					rdev->pm.vblank_sync = true;
5143 					wake_up(&rdev->irq.vblank_queue);
5144  				}
5145 				if (atomic_read(&rdev->irq.pflip[2]))
5146 					radeon_crtc_handle_vblank(rdev, 2);
5147 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5148 				DRM_DEBUG("IH: D3 vblank\n");
5149 
5150 				break;
5151 			case 1: /* D3 vline */
5152 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5153 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5154 
5155 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5156 				DRM_DEBUG("IH: D3 vline\n");
5157 
5158 				break;
5159 			default:
5160 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5161 				break;
5162 			}
5163 			break;
5164 		case 4: /* D4 vblank/vline */
5165 			switch (src_data) {
5166 			case 0: /* D4 vblank */
5167 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5168 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5169 
5170 				if (rdev->irq.crtc_vblank_int[3]) {
5171 					drm_handle_vblank(rdev->ddev, 3);
5172 					rdev->pm.vblank_sync = true;
5173 					wake_up(&rdev->irq.vblank_queue);
5174  				}
5175 				if (atomic_read(&rdev->irq.pflip[3]))
5176 					radeon_crtc_handle_vblank(rdev, 3);
5177 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5178 				DRM_DEBUG("IH: D4 vblank\n");
5179 
5180 				break;
5181 			case 1: /* D4 vline */
5182 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5183 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5184 
5185 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5186 				DRM_DEBUG("IH: D4 vline\n");
5187 
5188 				break;
5189 			default:
5190 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5191 				break;
5192 			}
5193 			break;
5194 		case 5: /* D5 vblank/vline */
5195 			switch (src_data) {
5196 			case 0: /* D5 vblank */
5197 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5198 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5199 
5200 				if (rdev->irq.crtc_vblank_int[4]) {
5201 					drm_handle_vblank(rdev->ddev, 4);
5202 					rdev->pm.vblank_sync = true;
5203 					wake_up(&rdev->irq.vblank_queue);
5204  				}
5205 				if (atomic_read(&rdev->irq.pflip[4]))
5206 					radeon_crtc_handle_vblank(rdev, 4);
5207 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5208 				DRM_DEBUG("IH: D5 vblank\n");
5209 
5210 				break;
5211 			case 1: /* D5 vline */
5212 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5213 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5214 
5215 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5216 				DRM_DEBUG("IH: D5 vline\n");
5217 
5218 				break;
5219 			default:
5220 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5221 				break;
5222 			}
5223 			break;
5224 		case 6: /* D6 vblank/vline */
5225 			switch (src_data) {
5226 			case 0: /* D6 vblank */
5227 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5228 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5229 
5230 				if (rdev->irq.crtc_vblank_int[5]) {
5231 					drm_handle_vblank(rdev->ddev, 5);
5232 					rdev->pm.vblank_sync = true;
5233 					wake_up(&rdev->irq.vblank_queue);
5234  				}
5235 				if (atomic_read(&rdev->irq.pflip[5]))
5236 					radeon_crtc_handle_vblank(rdev, 5);
5237 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5238 				DRM_DEBUG("IH: D6 vblank\n");
5239 
5240 				break;
5241 			case 1: /* D6 vline */
5242 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5243 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5244 
5245 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5246 				DRM_DEBUG("IH: D6 vline\n");
5247 
5248 				break;
5249 			default:
5250 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5251 				break;
5252 			}
5253 			break;
5254 		case 8: /* D1 page flip */
5255 		case 10: /* D2 page flip */
5256 		case 12: /* D3 page flip */
5257 		case 14: /* D4 page flip */
5258 		case 16: /* D5 page flip */
5259 		case 18: /* D6 page flip */
5260 			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5261 			if (radeon_use_pflipirq > 0)
5262 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5263 			break;
5264 		case 42: /* HPD hotplug */
5265 			switch (src_data) {
5266 			case 0:
5267 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5268 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5269 
5270 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5271 				queue_hotplug = true;
5272 				DRM_DEBUG("IH: HPD1\n");
5273 				break;
5274 			case 1:
5275 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5276 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5277 
5278 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5279 				queue_hotplug = true;
5280 				DRM_DEBUG("IH: HPD2\n");
5281 				break;
5282 			case 2:
5283 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5284 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5285 
5286 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5287 				queue_hotplug = true;
5288 				DRM_DEBUG("IH: HPD3\n");
5289 				break;
5290 			case 3:
5291 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5292 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5293 
5294 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5295 				queue_hotplug = true;
5296 				DRM_DEBUG("IH: HPD4\n");
5297 				break;
5298 			case 4:
5299 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5300 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5301 
5302 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5303 				queue_hotplug = true;
5304 				DRM_DEBUG("IH: HPD5\n");
5305 				break;
5306 			case 5:
5307 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5308 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5309 
5310 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5311 				queue_hotplug = true;
5312 				DRM_DEBUG("IH: HPD6\n");
5313 				break;
5314 			case 6:
5315 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5316 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5317 
5318 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5319 				queue_dp = true;
5320 				DRM_DEBUG("IH: HPD_RX 1\n");
5321 				break;
5322 			case 7:
5323 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5324 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5325 
5326 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5327 				queue_dp = true;
5328 				DRM_DEBUG("IH: HPD_RX 2\n");
5329 				break;
5330 			case 8:
5331 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5332 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5333 
5334 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5335 				queue_dp = true;
5336 				DRM_DEBUG("IH: HPD_RX 3\n");
5337 				break;
5338 			case 9:
5339 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5340 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5341 
5342 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5343 				queue_dp = true;
5344 				DRM_DEBUG("IH: HPD_RX 4\n");
5345 				break;
5346 			case 10:
5347 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5348 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5349 
5350 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5351 				queue_dp = true;
5352 				DRM_DEBUG("IH: HPD_RX 5\n");
5353 				break;
5354 			case 11:
5355 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5356 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5357 
5358 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5359 				queue_dp = true;
5360 				DRM_DEBUG("IH: HPD_RX 6\n");
5361 				break;
5362 			default:
5363 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5364 				break;
5365 			}
5366 			break;
5367 		case 44: /* hdmi */
5368 			switch (src_data) {
5369 			case 0:
5370 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5371 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5372 
5373 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5374 				queue_hdmi = true;
5375 				DRM_DEBUG("IH: HDMI0\n");
5376 				break;
5377 			case 1:
5378 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5379 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5380 
5381 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5382 				queue_hdmi = true;
5383 				DRM_DEBUG("IH: HDMI1\n");
5384 				break;
5385 			case 2:
5386 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5387 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5388 
5389 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5390 				queue_hdmi = true;
5391 				DRM_DEBUG("IH: HDMI2\n");
5392 				break;
5393 			case 3:
5394 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5395 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5396 
5397 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5398 				queue_hdmi = true;
5399 				DRM_DEBUG("IH: HDMI3\n");
5400 				break;
5401 			case 4:
5402 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5403 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5404 
5405 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5406 				queue_hdmi = true;
5407 				DRM_DEBUG("IH: HDMI4\n");
5408 				break;
5409 			case 5:
5410 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5411 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5412 
5413 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5414 				queue_hdmi = true;
5415 				DRM_DEBUG("IH: HDMI5\n");
5416 				break;
5417 			default:
5418 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5419 				break;
5420 			}
5421 		case 96:
5422 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5423 			WREG32(SRBM_INT_ACK, 0x1);
5424 			break;
5425 		case 124: /* UVD */
5426 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5427 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5428 			break;
5429 		case 146:
5430 		case 147:
5431 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5432 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5433 			/* reset addr and status */
5434 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5435 			if (addr == 0x0 && status == 0x0)
5436 				break;
5437 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5438 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5439 				addr);
5440 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5441 				status);
5442 			cayman_vm_decode_fault(rdev, status, addr);
5443 			break;
5444 		case 176: /* CP_INT in ring buffer */
5445 		case 177: /* CP_INT in IB1 */
5446 		case 178: /* CP_INT in IB2 */
5447 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5448 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5449 			break;
5450 		case 181: /* CP EOP event */
5451 			DRM_DEBUG("IH: CP EOP\n");
5452 			if (rdev->family >= CHIP_CAYMAN) {
5453 				switch (src_data) {
5454 				case 0:
5455 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5456 					break;
5457 				case 1:
5458 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5459 					break;
5460 				case 2:
5461 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5462 					break;
5463 				}
5464 			} else
5465 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5466 			break;
5467 		case 224: /* DMA trap event */
5468 			DRM_DEBUG("IH: DMA trap\n");
5469 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5470 			break;
5471 		case 230: /* thermal low to high */
5472 			DRM_DEBUG("IH: thermal low to high\n");
5473 			rdev->pm.dpm.thermal.high_to_low = false;
5474 			queue_thermal = true;
5475 			break;
5476 		case 231: /* thermal high to low */
5477 			DRM_DEBUG("IH: thermal high to low\n");
5478 			rdev->pm.dpm.thermal.high_to_low = true;
5479 			queue_thermal = true;
5480 			break;
5481 		case 233: /* GUI IDLE */
5482 			DRM_DEBUG("IH: GUI idle\n");
5483 			break;
5484 		case 244: /* DMA trap event */
5485 			if (rdev->family >= CHIP_CAYMAN) {
5486 				DRM_DEBUG("IH: DMA1 trap\n");
5487 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5488 			}
5489 			break;
5490 		default:
5491 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5492 			break;
5493 		}
5494 
5495 		/* wptr/rptr are in bytes! */
5496 		rptr += 16;
5497 		rptr &= rdev->ih.ptr_mask;
5498 		WREG32(IH_RB_RPTR, rptr);
5499 	}
5500 	if (queue_dp)
5501 		schedule_work(&rdev->dp_work);
5502 	if (queue_hotplug)
5503 		taskqueue_enqueue(rdev->tq, &rdev->hotplug_work);
5504 	if (queue_hdmi)
5505 		taskqueue_enqueue(rdev->tq, &rdev->audio_work);
5506 	if (queue_thermal && rdev->pm.dpm_enabled)
5507 		taskqueue_enqueue(rdev->tq, &rdev->pm.dpm.thermal.work);
5508 	rdev->ih.rptr = rptr;
5509 	atomic_set(&rdev->ih.lock, 0);
5510 
5511 	/* make sure wptr hasn't changed while processing */
5512 	wptr = evergreen_get_ih_wptr(rdev);
5513 	if (wptr != rptr)
5514 		goto restart_ih;
5515 
5516 	return IRQ_HANDLED;
5517 }
5518 
5519 static void evergreen_uvd_init(struct radeon_device *rdev)
5520 {
5521 	int r;
5522 
5523 	if (!rdev->has_uvd)
5524 		return;
5525 
5526 	r = radeon_uvd_init(rdev);
5527 	if (r) {
5528 		dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5529 		/*
5530 		 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5531 		 * to early fails uvd_v2_2_resume() and thus nothing happens
5532 		 * there. So it is pointless to try to go through that code
5533 		 * hence why we disable uvd here.
5534 		 */
5535 		rdev->has_uvd = 0;
5536 		return;
5537 	}
5538 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5539 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5540 }
5541 
5542 static void evergreen_uvd_start(struct radeon_device *rdev)
5543 {
5544 	int r;
5545 
5546 	if (!rdev->has_uvd)
5547 		return;
5548 
5549 	r = uvd_v2_2_resume(rdev);
5550 	if (r) {
5551 		dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5552 		goto error;
5553 	}
5554 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5555 	if (r) {
5556 		dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5557 		goto error;
5558 	}
5559 	return;
5560 
5561 error:
5562 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5563 }
5564 
5565 static void evergreen_uvd_resume(struct radeon_device *rdev)
5566 {
5567 	struct radeon_ring *ring;
5568 	int r;
5569 
5570 	if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5571 		return;
5572 
5573 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5574 	r = radeon_ring_init(rdev, ring, ring->ring_size, 0, RADEON_CP_PACKET2);
5575 	if (r) {
5576 		dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5577 		return;
5578 	}
5579 	r = uvd_v1_0_init(rdev);
5580 	if (r) {
5581 		dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5582 		return;
5583 	}
5584 }
5585 
5586 static int evergreen_startup(struct radeon_device *rdev)
5587 {
5588 	struct radeon_ring *ring;
5589 	int r;
5590 
5591 	/* enable pcie gen2 link */
5592 	evergreen_pcie_gen2_enable(rdev);
5593 	/* enable aspm */
5594 	evergreen_program_aspm(rdev);
5595 
5596 	/* scratch needs to be initialized before MC */
5597 	r = r600_vram_scratch_init(rdev);
5598 	if (r)
5599 		return r;
5600 
5601 	evergreen_mc_program(rdev);
5602 
5603 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5604 		r = ni_mc_load_microcode(rdev);
5605 		if (r) {
5606 			DRM_ERROR("Failed to load MC firmware!\n");
5607 			return r;
5608 		}
5609 	}
5610 
5611 	if (rdev->flags & RADEON_IS_AGP) {
5612 		evergreen_agp_enable(rdev);
5613 	} else {
5614 		r = evergreen_pcie_gart_enable(rdev);
5615 		if (r)
5616 			return r;
5617 	}
5618 	evergreen_gpu_init(rdev);
5619 
5620 	/* allocate rlc buffers */
5621 	if (rdev->flags & RADEON_IS_IGP) {
5622 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5623 		rdev->rlc.reg_list_size =
5624 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5625 		rdev->rlc.cs_data = evergreen_cs_data;
5626 		r = sumo_rlc_init(rdev);
5627 		if (r) {
5628 			DRM_ERROR("Failed to init rlc BOs!\n");
5629 			return r;
5630 		}
5631 	}
5632 
5633 	/* allocate wb buffer */
5634 	r = radeon_wb_init(rdev);
5635 	if (r)
5636 		return r;
5637 
5638 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5639 	if (r) {
5640 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5641 		return r;
5642 	}
5643 
5644 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5645 	if (r) {
5646 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5647 		return r;
5648 	}
5649 
5650 	evergreen_uvd_start(rdev);
5651 
5652 	/* Enable IRQ */
5653 	if (!rdev->irq.installed) {
5654 		r = radeon_irq_kms_init(rdev);
5655 		if (r)
5656 			return r;
5657 	}
5658 
5659 	r = r600_irq_init(rdev);
5660 	if (r) {
5661 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5662 		radeon_irq_kms_fini(rdev);
5663 		return r;
5664 	}
5665 	evergreen_irq_set(rdev);
5666 
5667 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5668 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5669 			     RADEON_CP_PACKET2);
5670 	if (r)
5671 		return r;
5672 
5673 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5674 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5675 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5676 	if (r)
5677 		return r;
5678 
5679 	r = evergreen_cp_load_microcode(rdev);
5680 	if (r)
5681 		return r;
5682 	r = evergreen_cp_resume(rdev);
5683 	if (r)
5684 		return r;
5685 	r = r600_dma_resume(rdev);
5686 	if (r)
5687 		return r;
5688 
5689 	evergreen_uvd_resume(rdev);
5690 
5691 	r = radeon_ib_pool_init(rdev);
5692 	if (r) {
5693 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5694 		return r;
5695 	}
5696 
5697 	r = radeon_audio_init(rdev);
5698 	if (r) {
5699 		DRM_ERROR("radeon: audio init failed\n");
5700 		return r;
5701 	}
5702 
5703 	return 0;
5704 }
5705 
5706 int evergreen_resume(struct radeon_device *rdev)
5707 {
5708 	int r;
5709 
5710 	/* reset the asic, the gfx blocks are often in a bad state
5711 	 * after the driver is unloaded or after a resume
5712 	 */
5713 	if (radeon_asic_reset(rdev))
5714 		dev_warn(rdev->dev, "GPU reset failed !\n");
5715 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5716 	 * posting will perform necessary task to bring back GPU into good
5717 	 * shape.
5718 	 */
5719 	/* post card */
5720 	atom_asic_init(rdev->mode_info.atom_context);
5721 
5722 	/* init golden registers */
5723 	evergreen_init_golden_registers(rdev);
5724 
5725 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5726 		radeon_pm_resume(rdev);
5727 
5728 	rdev->accel_working = true;
5729 	r = evergreen_startup(rdev);
5730 	if (r) {
5731 		DRM_ERROR("evergreen startup failed on resume\n");
5732 		rdev->accel_working = false;
5733 		return r;
5734 	}
5735 
5736 	return r;
5737 
5738 }
5739 
5740 int evergreen_suspend(struct radeon_device *rdev)
5741 {
5742 	radeon_pm_suspend(rdev);
5743 	radeon_audio_fini(rdev);
5744 	if (rdev->has_uvd) {
5745 		uvd_v1_0_fini(rdev);
5746 		radeon_uvd_suspend(rdev);
5747 	}
5748 	r700_cp_stop(rdev);
5749 	r600_dma_stop(rdev);
5750 	evergreen_irq_suspend(rdev);
5751 	radeon_wb_disable(rdev);
5752 	evergreen_pcie_gart_disable(rdev);
5753 
5754 	return 0;
5755 }
5756 
5757 /* Plan is to move initialization in that function and use
5758  * helper function so that radeon_device_init pretty much
5759  * do nothing more than calling asic specific function. This
5760  * should also allow to remove a bunch of callback function
5761  * like vram_info.
5762  */
5763 int evergreen_init(struct radeon_device *rdev)
5764 {
5765 	int r;
5766 
5767 	/* Read BIOS */
5768 	if (!radeon_get_bios(rdev)) {
5769 		if (ASIC_IS_AVIVO(rdev))
5770 			return -EINVAL;
5771 	}
5772 	/* Must be an ATOMBIOS */
5773 	if (!rdev->is_atom_bios) {
5774 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5775 		return -EINVAL;
5776 	}
5777 	r = radeon_atombios_init(rdev);
5778 	if (r)
5779 		return r;
5780 	/* reset the asic, the gfx blocks are often in a bad state
5781 	 * after the driver is unloaded or after a resume
5782 	 */
5783 	if (radeon_asic_reset(rdev))
5784 		dev_warn(rdev->dev, "GPU reset failed !\n");
5785 	/* Post card if necessary */
5786 	if (!radeon_card_posted(rdev)) {
5787 		if (!rdev->bios) {
5788 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5789 			return -EINVAL;
5790 		}
5791 		DRM_INFO("GPU not posted. posting now...\n");
5792 		atom_asic_init(rdev->mode_info.atom_context);
5793 	}
5794 	/* init golden registers */
5795 	evergreen_init_golden_registers(rdev);
5796 	/* Initialize scratch registers */
5797 	r600_scratch_init(rdev);
5798 	/* Initialize surface registers */
5799 	radeon_surface_init(rdev);
5800 	/* Initialize clocks */
5801 	radeon_get_clock_info(rdev->ddev);
5802 	/* Fence driver */
5803 	r = radeon_fence_driver_init(rdev);
5804 	if (r)
5805 		return r;
5806 	/* initialize AGP */
5807 	if (rdev->flags & RADEON_IS_AGP) {
5808 		r = radeon_agp_init(rdev);
5809 		if (r)
5810 			radeon_agp_disable(rdev);
5811 	}
5812 	/* initialize memory controller */
5813 	r = evergreen_mc_init(rdev);
5814 	if (r)
5815 		return r;
5816 	/* Memory manager */
5817 	r = radeon_bo_init(rdev);
5818 	if (r)
5819 		return r;
5820 
5821 	if (ASIC_IS_DCE5(rdev)) {
5822 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5823 			r = ni_init_microcode(rdev);
5824 			if (r) {
5825 				DRM_ERROR("Failed to load firmware!\n");
5826 				return r;
5827 			}
5828 		}
5829 	} else {
5830 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5831 			r = r600_init_microcode(rdev);
5832 			if (r) {
5833 				DRM_ERROR("Failed to load firmware!\n");
5834 				return r;
5835 			}
5836 		}
5837 	}
5838 
5839 	/* Initialize power management */
5840 	radeon_pm_init(rdev);
5841 
5842 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5843 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5844 
5845 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5846 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5847 
5848 	evergreen_uvd_init(rdev);
5849 
5850 	rdev->ih.ring_obj = NULL;
5851 	r600_ih_ring_init(rdev, 64 * 1024);
5852 
5853 	r = r600_pcie_gart_init(rdev);
5854 	if (r)
5855 		return r;
5856 
5857 	rdev->accel_working = true;
5858 	r = evergreen_startup(rdev);
5859 	if (r) {
5860 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5861 		r700_cp_fini(rdev);
5862 		r600_dma_fini(rdev);
5863 		r600_irq_fini(rdev);
5864 		if (rdev->flags & RADEON_IS_IGP)
5865 			sumo_rlc_fini(rdev);
5866 		radeon_wb_fini(rdev);
5867 		radeon_ib_pool_fini(rdev);
5868 		radeon_irq_kms_fini(rdev);
5869 		evergreen_pcie_gart_fini(rdev);
5870 		rdev->accel_working = false;
5871 	}
5872 
5873 	/* Don't start up if the MC ucode is missing on BTC parts.
5874 	 * The default clocks and voltages before the MC ucode
5875 	 * is loaded are not suffient for advanced operations.
5876 	 */
5877 	if (ASIC_IS_DCE5(rdev)) {
5878 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5879 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5880 			return -EINVAL;
5881 		}
5882 	}
5883 
5884 	return 0;
5885 }
5886 
5887 void evergreen_fini(struct radeon_device *rdev)
5888 {
5889 	radeon_pm_fini(rdev);
5890 	radeon_audio_fini(rdev);
5891 	r700_cp_fini(rdev);
5892 	r600_dma_fini(rdev);
5893 	r600_irq_fini(rdev);
5894 	if (rdev->flags & RADEON_IS_IGP)
5895 		sumo_rlc_fini(rdev);
5896 	radeon_wb_fini(rdev);
5897 	radeon_ib_pool_fini(rdev);
5898 	radeon_irq_kms_fini(rdev);
5899 	uvd_v1_0_fini(rdev);
5900 	radeon_uvd_fini(rdev);
5901 	evergreen_pcie_gart_fini(rdev);
5902 	r600_vram_scratch_fini(rdev);
5903 	radeon_gem_fini(rdev);
5904 	radeon_fence_driver_fini(rdev);
5905 	radeon_agp_fini(rdev);
5906 	radeon_bo_fini(rdev);
5907 	radeon_atombios_fini(rdev);
5908 	if (ASIC_IS_DCE5(rdev))
5909 		ni_fini_microcode(rdev);
5910 	else
5911 		r600_fini_microcode(rdev);
5912 	kfree(rdev->bios);
5913 	rdev->bios = NULL;
5914 }
5915 
5916 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5917 {
5918 	u32 link_width_cntl, speed_cntl, mask;
5919 	int ret;
5920 
5921 	if (radeon_pcie_gen2 == 0)
5922 		return;
5923 
5924 	if (rdev->flags & RADEON_IS_IGP)
5925 		return;
5926 
5927 	if (!(rdev->flags & RADEON_IS_PCIE))
5928 		return;
5929 
5930 	/* x2 cards have a special sequence */
5931 	if (ASIC_IS_X2(rdev))
5932 		return;
5933 
5934 	ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
5935 	if (ret != 0)
5936 		return;
5937 
5938 	if (!(mask & (DRM_PCIE_SPEED_50 | DRM_PCIE_SPEED_80)))
5939 		return;
5940 
5941 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5942 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5943 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5944 		return;
5945 	}
5946 
5947 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5948 
5949 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5950 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5951 
5952 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5953 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5954 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5955 
5956 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5957 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5958 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5959 
5960 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5961 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5962 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5963 
5964 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5965 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5966 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5967 
5968 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5969 		speed_cntl |= LC_GEN2_EN_STRAP;
5970 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5971 
5972 	} else {
5973 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5974 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5975 		if (1)
5976 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5977 		else
5978 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5979 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5980 	}
5981 }
5982 
5983 void evergreen_program_aspm(struct radeon_device *rdev)
5984 {
5985 	u32 data, orig;
5986 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5987 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5988 	/* fusion_platform = true
5989 	 * if the system is a fusion system
5990 	 * (APU or DGPU in a fusion system).
5991 	 * todo: check if the system is a fusion platform.
5992 	 */
5993 	bool fusion_platform = false;
5994 
5995 	if (radeon_aspm == 0)
5996 		return;
5997 
5998 	if (!(rdev->flags & RADEON_IS_PCIE))
5999 		return;
6000 
6001 	switch (rdev->family) {
6002 	case CHIP_CYPRESS:
6003 	case CHIP_HEMLOCK:
6004 	case CHIP_JUNIPER:
6005 	case CHIP_REDWOOD:
6006 	case CHIP_CEDAR:
6007 	case CHIP_SUMO:
6008 	case CHIP_SUMO2:
6009 	case CHIP_PALM:
6010 	case CHIP_ARUBA:
6011 		disable_l0s = true;
6012 		break;
6013 	default:
6014 		disable_l0s = false;
6015 		break;
6016 	}
6017 
6018 	if (rdev->flags & RADEON_IS_IGP)
6019 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
6020 
6021 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6022 	if (fusion_platform)
6023 		data &= ~MULTI_PIF;
6024 	else
6025 		data |= MULTI_PIF;
6026 	if (data != orig)
6027 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6028 
6029 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6030 	if (fusion_platform)
6031 		data &= ~MULTI_PIF;
6032 	else
6033 		data |= MULTI_PIF;
6034 	if (data != orig)
6035 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6036 
6037 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6038 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6039 	if (!disable_l0s) {
6040 		if (rdev->family >= CHIP_BARTS)
6041 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6042 		else
6043 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6044 	}
6045 
6046 	if (!disable_l1) {
6047 		if (rdev->family >= CHIP_BARTS)
6048 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6049 		else
6050 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6051 
6052 		if (!disable_plloff_in_l1) {
6053 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6054 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6055 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6056 			if (data != orig)
6057 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6058 
6059 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6060 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6061 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6062 			if (data != orig)
6063 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6064 
6065 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6066 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6067 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6068 			if (data != orig)
6069 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6070 
6071 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6072 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6073 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6074 			if (data != orig)
6075 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6076 
6077 			if (rdev->family >= CHIP_BARTS) {
6078 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6079 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6080 				data |= PLL_RAMP_UP_TIME_0(4);
6081 				if (data != orig)
6082 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6083 
6084 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6085 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6086 				data |= PLL_RAMP_UP_TIME_1(4);
6087 				if (data != orig)
6088 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6089 
6090 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6091 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6092 				data |= PLL_RAMP_UP_TIME_0(4);
6093 				if (data != orig)
6094 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6095 
6096 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6097 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6098 				data |= PLL_RAMP_UP_TIME_1(4);
6099 				if (data != orig)
6100 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6101 			}
6102 
6103 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6104 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6105 			data |= LC_DYN_LANES_PWR_STATE(3);
6106 			if (data != orig)
6107 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6108 
6109 			if (rdev->family >= CHIP_BARTS) {
6110 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6111 				data &= ~LS2_EXIT_TIME_MASK;
6112 				data |= LS2_EXIT_TIME(1);
6113 				if (data != orig)
6114 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6115 
6116 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6117 				data &= ~LS2_EXIT_TIME_MASK;
6118 				data |= LS2_EXIT_TIME(1);
6119 				if (data != orig)
6120 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6121 			}
6122 		}
6123 	}
6124 
6125 	/* evergreen parts only */
6126 	if (rdev->family < CHIP_BARTS)
6127 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6128 
6129 	if (pcie_lc_cntl != pcie_lc_cntl_old)
6130 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6131 }
6132