xref: /dragonfly/sys/dev/drm/radeon/evergreen.c (revision b9a6fe08)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <drm/drmP.h>
26 #include "radeon.h"
27 #include "radeon_asic.h"
28 #include "radeon_audio.h"
29 #include <drm/radeon_drm.h>
30 #include "evergreend.h"
31 #include "atom.h"
32 #include "avivod.h"
33 #include "evergreen_reg.h"
34 #include "evergreen_blit_shaders.h"
35 #include "radeon_ucode.h"
36 
37 /*
38  * Indirect registers accessor
39  */
40 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
41 {
42 	u32 r;
43 
44 	lockmgr(&rdev->cg_idx_lock, LK_EXCLUSIVE);
45 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
46 	r = RREG32(EVERGREEN_CG_IND_DATA);
47 	lockmgr(&rdev->cg_idx_lock, LK_RELEASE);
48 	return r;
49 }
50 
51 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
52 {
53 	lockmgr(&rdev->cg_idx_lock, LK_EXCLUSIVE);
54 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
55 	WREG32(EVERGREEN_CG_IND_DATA, (v));
56 	lockmgr(&rdev->cg_idx_lock, LK_RELEASE);
57 }
58 
59 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
60 {
61 	u32 r;
62 
63 	lockmgr(&rdev->pif_idx_lock, LK_EXCLUSIVE);
64 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
65 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
66 	lockmgr(&rdev->pif_idx_lock, LK_RELEASE);
67 	return r;
68 }
69 
70 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
71 {
72 	lockmgr(&rdev->pif_idx_lock, LK_EXCLUSIVE);
73 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
74 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
75 	lockmgr(&rdev->pif_idx_lock, LK_RELEASE);
76 }
77 
78 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
79 {
80 	u32 r;
81 
82 	lockmgr(&rdev->pif_idx_lock, LK_EXCLUSIVE);
83 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
84 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
85 	lockmgr(&rdev->pif_idx_lock, LK_RELEASE);
86 	return r;
87 }
88 
89 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
90 {
91 	lockmgr(&rdev->pif_idx_lock, LK_EXCLUSIVE);
92 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
93 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
94 	lockmgr(&rdev->pif_idx_lock, LK_RELEASE);
95 }
96 
97 static const u32 crtc_offsets[6] =
98 {
99 	EVERGREEN_CRTC0_REGISTER_OFFSET,
100 	EVERGREEN_CRTC1_REGISTER_OFFSET,
101 	EVERGREEN_CRTC2_REGISTER_OFFSET,
102 	EVERGREEN_CRTC3_REGISTER_OFFSET,
103 	EVERGREEN_CRTC4_REGISTER_OFFSET,
104 	EVERGREEN_CRTC5_REGISTER_OFFSET
105 };
106 
107 #include "clearstate_evergreen.h"
108 
109 static const u32 sumo_rlc_save_restore_register_list[] =
110 {
111 	0x98fc,
112 	0x9830,
113 	0x9834,
114 	0x9838,
115 	0x9870,
116 	0x9874,
117 	0x8a14,
118 	0x8b24,
119 	0x8bcc,
120 	0x8b10,
121 	0x8d00,
122 	0x8d04,
123 	0x8c00,
124 	0x8c04,
125 	0x8c08,
126 	0x8c0c,
127 	0x8d8c,
128 	0x8c20,
129 	0x8c24,
130 	0x8c28,
131 	0x8c18,
132 	0x8c1c,
133 	0x8cf0,
134 	0x8e2c,
135 	0x8e38,
136 	0x8c30,
137 	0x9508,
138 	0x9688,
139 	0x9608,
140 	0x960c,
141 	0x9610,
142 	0x9614,
143 	0x88c4,
144 	0x88d4,
145 	0xa008,
146 	0x900c,
147 	0x9100,
148 	0x913c,
149 	0x98f8,
150 	0x98f4,
151 	0x9b7c,
152 	0x3f8c,
153 	0x8950,
154 	0x8954,
155 	0x8a18,
156 	0x8b28,
157 	0x9144,
158 	0x9148,
159 	0x914c,
160 	0x3f90,
161 	0x3f94,
162 	0x915c,
163 	0x9160,
164 	0x9178,
165 	0x917c,
166 	0x9180,
167 	0x918c,
168 	0x9190,
169 	0x9194,
170 	0x9198,
171 	0x919c,
172 	0x91a8,
173 	0x91ac,
174 	0x91b0,
175 	0x91b4,
176 	0x91b8,
177 	0x91c4,
178 	0x91c8,
179 	0x91cc,
180 	0x91d0,
181 	0x91d4,
182 	0x91e0,
183 	0x91e4,
184 	0x91ec,
185 	0x91f0,
186 	0x91f4,
187 	0x9200,
188 	0x9204,
189 	0x929c,
190 	0x9150,
191 	0x802c,
192 };
193 
194 static void evergreen_gpu_init(struct radeon_device *rdev);
195 
196 static const u32 evergreen_golden_registers[] =
197 {
198 	0x3f90, 0xffff0000, 0xff000000,
199 	0x9148, 0xffff0000, 0xff000000,
200 	0x3f94, 0xffff0000, 0xff000000,
201 	0x914c, 0xffff0000, 0xff000000,
202 	0x9b7c, 0xffffffff, 0x00000000,
203 	0x8a14, 0xffffffff, 0x00000007,
204 	0x8b10, 0xffffffff, 0x00000000,
205 	0x960c, 0xffffffff, 0x54763210,
206 	0x88c4, 0xffffffff, 0x000000c2,
207 	0x88d4, 0xffffffff, 0x00000010,
208 	0x8974, 0xffffffff, 0x00000000,
209 	0xc78, 0x00000080, 0x00000080,
210 	0x5eb4, 0xffffffff, 0x00000002,
211 	0x5e78, 0xffffffff, 0x001000f0,
212 	0x6104, 0x01000300, 0x00000000,
213 	0x5bc0, 0x00300000, 0x00000000,
214 	0x7030, 0xffffffff, 0x00000011,
215 	0x7c30, 0xffffffff, 0x00000011,
216 	0x10830, 0xffffffff, 0x00000011,
217 	0x11430, 0xffffffff, 0x00000011,
218 	0x12030, 0xffffffff, 0x00000011,
219 	0x12c30, 0xffffffff, 0x00000011,
220 	0xd02c, 0xffffffff, 0x08421000,
221 	0x240c, 0xffffffff, 0x00000380,
222 	0x8b24, 0xffffffff, 0x00ff0fff,
223 	0x28a4c, 0x06000000, 0x06000000,
224 	0x10c, 0x00000001, 0x00000001,
225 	0x8d00, 0xffffffff, 0x100e4848,
226 	0x8d04, 0xffffffff, 0x00164745,
227 	0x8c00, 0xffffffff, 0xe4000003,
228 	0x8c04, 0xffffffff, 0x40600060,
229 	0x8c08, 0xffffffff, 0x001c001c,
230 	0x8cf0, 0xffffffff, 0x08e00620,
231 	0x8c20, 0xffffffff, 0x00800080,
232 	0x8c24, 0xffffffff, 0x00800080,
233 	0x8c18, 0xffffffff, 0x20202078,
234 	0x8c1c, 0xffffffff, 0x00001010,
235 	0x28350, 0xffffffff, 0x00000000,
236 	0xa008, 0xffffffff, 0x00010000,
237 	0x5c4, 0xffffffff, 0x00000001,
238 	0x9508, 0xffffffff, 0x00000002,
239 	0x913c, 0x0000000f, 0x0000000a
240 };
241 
242 static const u32 evergreen_golden_registers2[] =
243 {
244 	0x2f4c, 0xffffffff, 0x00000000,
245 	0x54f4, 0xffffffff, 0x00000000,
246 	0x54f0, 0xffffffff, 0x00000000,
247 	0x5498, 0xffffffff, 0x00000000,
248 	0x549c, 0xffffffff, 0x00000000,
249 	0x5494, 0xffffffff, 0x00000000,
250 	0x53cc, 0xffffffff, 0x00000000,
251 	0x53c8, 0xffffffff, 0x00000000,
252 	0x53c4, 0xffffffff, 0x00000000,
253 	0x53c0, 0xffffffff, 0x00000000,
254 	0x53bc, 0xffffffff, 0x00000000,
255 	0x53b8, 0xffffffff, 0x00000000,
256 	0x53b4, 0xffffffff, 0x00000000,
257 	0x53b0, 0xffffffff, 0x00000000
258 };
259 
260 static const u32 cypress_mgcg_init[] =
261 {
262 	0x802c, 0xffffffff, 0xc0000000,
263 	0x5448, 0xffffffff, 0x00000100,
264 	0x55e4, 0xffffffff, 0x00000100,
265 	0x160c, 0xffffffff, 0x00000100,
266 	0x5644, 0xffffffff, 0x00000100,
267 	0xc164, 0xffffffff, 0x00000100,
268 	0x8a18, 0xffffffff, 0x00000100,
269 	0x897c, 0xffffffff, 0x06000100,
270 	0x8b28, 0xffffffff, 0x00000100,
271 	0x9144, 0xffffffff, 0x00000100,
272 	0x9a60, 0xffffffff, 0x00000100,
273 	0x9868, 0xffffffff, 0x00000100,
274 	0x8d58, 0xffffffff, 0x00000100,
275 	0x9510, 0xffffffff, 0x00000100,
276 	0x949c, 0xffffffff, 0x00000100,
277 	0x9654, 0xffffffff, 0x00000100,
278 	0x9030, 0xffffffff, 0x00000100,
279 	0x9034, 0xffffffff, 0x00000100,
280 	0x9038, 0xffffffff, 0x00000100,
281 	0x903c, 0xffffffff, 0x00000100,
282 	0x9040, 0xffffffff, 0x00000100,
283 	0xa200, 0xffffffff, 0x00000100,
284 	0xa204, 0xffffffff, 0x00000100,
285 	0xa208, 0xffffffff, 0x00000100,
286 	0xa20c, 0xffffffff, 0x00000100,
287 	0x971c, 0xffffffff, 0x00000100,
288 	0x977c, 0xffffffff, 0x00000100,
289 	0x3f80, 0xffffffff, 0x00000100,
290 	0xa210, 0xffffffff, 0x00000100,
291 	0xa214, 0xffffffff, 0x00000100,
292 	0x4d8, 0xffffffff, 0x00000100,
293 	0x9784, 0xffffffff, 0x00000100,
294 	0x9698, 0xffffffff, 0x00000100,
295 	0x4d4, 0xffffffff, 0x00000200,
296 	0x30cc, 0xffffffff, 0x00000100,
297 	0xd0c0, 0xffffffff, 0xff000100,
298 	0x802c, 0xffffffff, 0x40000000,
299 	0x915c, 0xffffffff, 0x00010000,
300 	0x9160, 0xffffffff, 0x00030002,
301 	0x9178, 0xffffffff, 0x00070000,
302 	0x917c, 0xffffffff, 0x00030002,
303 	0x9180, 0xffffffff, 0x00050004,
304 	0x918c, 0xffffffff, 0x00010006,
305 	0x9190, 0xffffffff, 0x00090008,
306 	0x9194, 0xffffffff, 0x00070000,
307 	0x9198, 0xffffffff, 0x00030002,
308 	0x919c, 0xffffffff, 0x00050004,
309 	0x91a8, 0xffffffff, 0x00010006,
310 	0x91ac, 0xffffffff, 0x00090008,
311 	0x91b0, 0xffffffff, 0x00070000,
312 	0x91b4, 0xffffffff, 0x00030002,
313 	0x91b8, 0xffffffff, 0x00050004,
314 	0x91c4, 0xffffffff, 0x00010006,
315 	0x91c8, 0xffffffff, 0x00090008,
316 	0x91cc, 0xffffffff, 0x00070000,
317 	0x91d0, 0xffffffff, 0x00030002,
318 	0x91d4, 0xffffffff, 0x00050004,
319 	0x91e0, 0xffffffff, 0x00010006,
320 	0x91e4, 0xffffffff, 0x00090008,
321 	0x91e8, 0xffffffff, 0x00000000,
322 	0x91ec, 0xffffffff, 0x00070000,
323 	0x91f0, 0xffffffff, 0x00030002,
324 	0x91f4, 0xffffffff, 0x00050004,
325 	0x9200, 0xffffffff, 0x00010006,
326 	0x9204, 0xffffffff, 0x00090008,
327 	0x9208, 0xffffffff, 0x00070000,
328 	0x920c, 0xffffffff, 0x00030002,
329 	0x9210, 0xffffffff, 0x00050004,
330 	0x921c, 0xffffffff, 0x00010006,
331 	0x9220, 0xffffffff, 0x00090008,
332 	0x9224, 0xffffffff, 0x00070000,
333 	0x9228, 0xffffffff, 0x00030002,
334 	0x922c, 0xffffffff, 0x00050004,
335 	0x9238, 0xffffffff, 0x00010006,
336 	0x923c, 0xffffffff, 0x00090008,
337 	0x9240, 0xffffffff, 0x00070000,
338 	0x9244, 0xffffffff, 0x00030002,
339 	0x9248, 0xffffffff, 0x00050004,
340 	0x9254, 0xffffffff, 0x00010006,
341 	0x9258, 0xffffffff, 0x00090008,
342 	0x925c, 0xffffffff, 0x00070000,
343 	0x9260, 0xffffffff, 0x00030002,
344 	0x9264, 0xffffffff, 0x00050004,
345 	0x9270, 0xffffffff, 0x00010006,
346 	0x9274, 0xffffffff, 0x00090008,
347 	0x9278, 0xffffffff, 0x00070000,
348 	0x927c, 0xffffffff, 0x00030002,
349 	0x9280, 0xffffffff, 0x00050004,
350 	0x928c, 0xffffffff, 0x00010006,
351 	0x9290, 0xffffffff, 0x00090008,
352 	0x9294, 0xffffffff, 0x00000000,
353 	0x929c, 0xffffffff, 0x00000001,
354 	0x802c, 0xffffffff, 0x40010000,
355 	0x915c, 0xffffffff, 0x00010000,
356 	0x9160, 0xffffffff, 0x00030002,
357 	0x9178, 0xffffffff, 0x00070000,
358 	0x917c, 0xffffffff, 0x00030002,
359 	0x9180, 0xffffffff, 0x00050004,
360 	0x918c, 0xffffffff, 0x00010006,
361 	0x9190, 0xffffffff, 0x00090008,
362 	0x9194, 0xffffffff, 0x00070000,
363 	0x9198, 0xffffffff, 0x00030002,
364 	0x919c, 0xffffffff, 0x00050004,
365 	0x91a8, 0xffffffff, 0x00010006,
366 	0x91ac, 0xffffffff, 0x00090008,
367 	0x91b0, 0xffffffff, 0x00070000,
368 	0x91b4, 0xffffffff, 0x00030002,
369 	0x91b8, 0xffffffff, 0x00050004,
370 	0x91c4, 0xffffffff, 0x00010006,
371 	0x91c8, 0xffffffff, 0x00090008,
372 	0x91cc, 0xffffffff, 0x00070000,
373 	0x91d0, 0xffffffff, 0x00030002,
374 	0x91d4, 0xffffffff, 0x00050004,
375 	0x91e0, 0xffffffff, 0x00010006,
376 	0x91e4, 0xffffffff, 0x00090008,
377 	0x91e8, 0xffffffff, 0x00000000,
378 	0x91ec, 0xffffffff, 0x00070000,
379 	0x91f0, 0xffffffff, 0x00030002,
380 	0x91f4, 0xffffffff, 0x00050004,
381 	0x9200, 0xffffffff, 0x00010006,
382 	0x9204, 0xffffffff, 0x00090008,
383 	0x9208, 0xffffffff, 0x00070000,
384 	0x920c, 0xffffffff, 0x00030002,
385 	0x9210, 0xffffffff, 0x00050004,
386 	0x921c, 0xffffffff, 0x00010006,
387 	0x9220, 0xffffffff, 0x00090008,
388 	0x9224, 0xffffffff, 0x00070000,
389 	0x9228, 0xffffffff, 0x00030002,
390 	0x922c, 0xffffffff, 0x00050004,
391 	0x9238, 0xffffffff, 0x00010006,
392 	0x923c, 0xffffffff, 0x00090008,
393 	0x9240, 0xffffffff, 0x00070000,
394 	0x9244, 0xffffffff, 0x00030002,
395 	0x9248, 0xffffffff, 0x00050004,
396 	0x9254, 0xffffffff, 0x00010006,
397 	0x9258, 0xffffffff, 0x00090008,
398 	0x925c, 0xffffffff, 0x00070000,
399 	0x9260, 0xffffffff, 0x00030002,
400 	0x9264, 0xffffffff, 0x00050004,
401 	0x9270, 0xffffffff, 0x00010006,
402 	0x9274, 0xffffffff, 0x00090008,
403 	0x9278, 0xffffffff, 0x00070000,
404 	0x927c, 0xffffffff, 0x00030002,
405 	0x9280, 0xffffffff, 0x00050004,
406 	0x928c, 0xffffffff, 0x00010006,
407 	0x9290, 0xffffffff, 0x00090008,
408 	0x9294, 0xffffffff, 0x00000000,
409 	0x929c, 0xffffffff, 0x00000001,
410 	0x802c, 0xffffffff, 0xc0000000
411 };
412 
413 static const u32 redwood_mgcg_init[] =
414 {
415 	0x802c, 0xffffffff, 0xc0000000,
416 	0x5448, 0xffffffff, 0x00000100,
417 	0x55e4, 0xffffffff, 0x00000100,
418 	0x160c, 0xffffffff, 0x00000100,
419 	0x5644, 0xffffffff, 0x00000100,
420 	0xc164, 0xffffffff, 0x00000100,
421 	0x8a18, 0xffffffff, 0x00000100,
422 	0x897c, 0xffffffff, 0x06000100,
423 	0x8b28, 0xffffffff, 0x00000100,
424 	0x9144, 0xffffffff, 0x00000100,
425 	0x9a60, 0xffffffff, 0x00000100,
426 	0x9868, 0xffffffff, 0x00000100,
427 	0x8d58, 0xffffffff, 0x00000100,
428 	0x9510, 0xffffffff, 0x00000100,
429 	0x949c, 0xffffffff, 0x00000100,
430 	0x9654, 0xffffffff, 0x00000100,
431 	0x9030, 0xffffffff, 0x00000100,
432 	0x9034, 0xffffffff, 0x00000100,
433 	0x9038, 0xffffffff, 0x00000100,
434 	0x903c, 0xffffffff, 0x00000100,
435 	0x9040, 0xffffffff, 0x00000100,
436 	0xa200, 0xffffffff, 0x00000100,
437 	0xa204, 0xffffffff, 0x00000100,
438 	0xa208, 0xffffffff, 0x00000100,
439 	0xa20c, 0xffffffff, 0x00000100,
440 	0x971c, 0xffffffff, 0x00000100,
441 	0x977c, 0xffffffff, 0x00000100,
442 	0x3f80, 0xffffffff, 0x00000100,
443 	0xa210, 0xffffffff, 0x00000100,
444 	0xa214, 0xffffffff, 0x00000100,
445 	0x4d8, 0xffffffff, 0x00000100,
446 	0x9784, 0xffffffff, 0x00000100,
447 	0x9698, 0xffffffff, 0x00000100,
448 	0x4d4, 0xffffffff, 0x00000200,
449 	0x30cc, 0xffffffff, 0x00000100,
450 	0xd0c0, 0xffffffff, 0xff000100,
451 	0x802c, 0xffffffff, 0x40000000,
452 	0x915c, 0xffffffff, 0x00010000,
453 	0x9160, 0xffffffff, 0x00030002,
454 	0x9178, 0xffffffff, 0x00070000,
455 	0x917c, 0xffffffff, 0x00030002,
456 	0x9180, 0xffffffff, 0x00050004,
457 	0x918c, 0xffffffff, 0x00010006,
458 	0x9190, 0xffffffff, 0x00090008,
459 	0x9194, 0xffffffff, 0x00070000,
460 	0x9198, 0xffffffff, 0x00030002,
461 	0x919c, 0xffffffff, 0x00050004,
462 	0x91a8, 0xffffffff, 0x00010006,
463 	0x91ac, 0xffffffff, 0x00090008,
464 	0x91b0, 0xffffffff, 0x00070000,
465 	0x91b4, 0xffffffff, 0x00030002,
466 	0x91b8, 0xffffffff, 0x00050004,
467 	0x91c4, 0xffffffff, 0x00010006,
468 	0x91c8, 0xffffffff, 0x00090008,
469 	0x91cc, 0xffffffff, 0x00070000,
470 	0x91d0, 0xffffffff, 0x00030002,
471 	0x91d4, 0xffffffff, 0x00050004,
472 	0x91e0, 0xffffffff, 0x00010006,
473 	0x91e4, 0xffffffff, 0x00090008,
474 	0x91e8, 0xffffffff, 0x00000000,
475 	0x91ec, 0xffffffff, 0x00070000,
476 	0x91f0, 0xffffffff, 0x00030002,
477 	0x91f4, 0xffffffff, 0x00050004,
478 	0x9200, 0xffffffff, 0x00010006,
479 	0x9204, 0xffffffff, 0x00090008,
480 	0x9294, 0xffffffff, 0x00000000,
481 	0x929c, 0xffffffff, 0x00000001,
482 	0x802c, 0xffffffff, 0xc0000000
483 };
484 
485 static const u32 cedar_golden_registers[] =
486 {
487 	0x3f90, 0xffff0000, 0xff000000,
488 	0x9148, 0xffff0000, 0xff000000,
489 	0x3f94, 0xffff0000, 0xff000000,
490 	0x914c, 0xffff0000, 0xff000000,
491 	0x9b7c, 0xffffffff, 0x00000000,
492 	0x8a14, 0xffffffff, 0x00000007,
493 	0x8b10, 0xffffffff, 0x00000000,
494 	0x960c, 0xffffffff, 0x54763210,
495 	0x88c4, 0xffffffff, 0x000000c2,
496 	0x88d4, 0xffffffff, 0x00000000,
497 	0x8974, 0xffffffff, 0x00000000,
498 	0xc78, 0x00000080, 0x00000080,
499 	0x5eb4, 0xffffffff, 0x00000002,
500 	0x5e78, 0xffffffff, 0x001000f0,
501 	0x6104, 0x01000300, 0x00000000,
502 	0x5bc0, 0x00300000, 0x00000000,
503 	0x7030, 0xffffffff, 0x00000011,
504 	0x7c30, 0xffffffff, 0x00000011,
505 	0x10830, 0xffffffff, 0x00000011,
506 	0x11430, 0xffffffff, 0x00000011,
507 	0xd02c, 0xffffffff, 0x08421000,
508 	0x240c, 0xffffffff, 0x00000380,
509 	0x8b24, 0xffffffff, 0x00ff0fff,
510 	0x28a4c, 0x06000000, 0x06000000,
511 	0x10c, 0x00000001, 0x00000001,
512 	0x8d00, 0xffffffff, 0x100e4848,
513 	0x8d04, 0xffffffff, 0x00164745,
514 	0x8c00, 0xffffffff, 0xe4000003,
515 	0x8c04, 0xffffffff, 0x40600060,
516 	0x8c08, 0xffffffff, 0x001c001c,
517 	0x8cf0, 0xffffffff, 0x08e00410,
518 	0x8c20, 0xffffffff, 0x00800080,
519 	0x8c24, 0xffffffff, 0x00800080,
520 	0x8c18, 0xffffffff, 0x20202078,
521 	0x8c1c, 0xffffffff, 0x00001010,
522 	0x28350, 0xffffffff, 0x00000000,
523 	0xa008, 0xffffffff, 0x00010000,
524 	0x5c4, 0xffffffff, 0x00000001,
525 	0x9508, 0xffffffff, 0x00000002
526 };
527 
528 static const u32 cedar_mgcg_init[] =
529 {
530 	0x802c, 0xffffffff, 0xc0000000,
531 	0x5448, 0xffffffff, 0x00000100,
532 	0x55e4, 0xffffffff, 0x00000100,
533 	0x160c, 0xffffffff, 0x00000100,
534 	0x5644, 0xffffffff, 0x00000100,
535 	0xc164, 0xffffffff, 0x00000100,
536 	0x8a18, 0xffffffff, 0x00000100,
537 	0x897c, 0xffffffff, 0x06000100,
538 	0x8b28, 0xffffffff, 0x00000100,
539 	0x9144, 0xffffffff, 0x00000100,
540 	0x9a60, 0xffffffff, 0x00000100,
541 	0x9868, 0xffffffff, 0x00000100,
542 	0x8d58, 0xffffffff, 0x00000100,
543 	0x9510, 0xffffffff, 0x00000100,
544 	0x949c, 0xffffffff, 0x00000100,
545 	0x9654, 0xffffffff, 0x00000100,
546 	0x9030, 0xffffffff, 0x00000100,
547 	0x9034, 0xffffffff, 0x00000100,
548 	0x9038, 0xffffffff, 0x00000100,
549 	0x903c, 0xffffffff, 0x00000100,
550 	0x9040, 0xffffffff, 0x00000100,
551 	0xa200, 0xffffffff, 0x00000100,
552 	0xa204, 0xffffffff, 0x00000100,
553 	0xa208, 0xffffffff, 0x00000100,
554 	0xa20c, 0xffffffff, 0x00000100,
555 	0x971c, 0xffffffff, 0x00000100,
556 	0x977c, 0xffffffff, 0x00000100,
557 	0x3f80, 0xffffffff, 0x00000100,
558 	0xa210, 0xffffffff, 0x00000100,
559 	0xa214, 0xffffffff, 0x00000100,
560 	0x4d8, 0xffffffff, 0x00000100,
561 	0x9784, 0xffffffff, 0x00000100,
562 	0x9698, 0xffffffff, 0x00000100,
563 	0x4d4, 0xffffffff, 0x00000200,
564 	0x30cc, 0xffffffff, 0x00000100,
565 	0xd0c0, 0xffffffff, 0xff000100,
566 	0x802c, 0xffffffff, 0x40000000,
567 	0x915c, 0xffffffff, 0x00010000,
568 	0x9178, 0xffffffff, 0x00050000,
569 	0x917c, 0xffffffff, 0x00030002,
570 	0x918c, 0xffffffff, 0x00010004,
571 	0x9190, 0xffffffff, 0x00070006,
572 	0x9194, 0xffffffff, 0x00050000,
573 	0x9198, 0xffffffff, 0x00030002,
574 	0x91a8, 0xffffffff, 0x00010004,
575 	0x91ac, 0xffffffff, 0x00070006,
576 	0x91e8, 0xffffffff, 0x00000000,
577 	0x9294, 0xffffffff, 0x00000000,
578 	0x929c, 0xffffffff, 0x00000001,
579 	0x802c, 0xffffffff, 0xc0000000
580 };
581 
582 static const u32 juniper_mgcg_init[] =
583 {
584 	0x802c, 0xffffffff, 0xc0000000,
585 	0x5448, 0xffffffff, 0x00000100,
586 	0x55e4, 0xffffffff, 0x00000100,
587 	0x160c, 0xffffffff, 0x00000100,
588 	0x5644, 0xffffffff, 0x00000100,
589 	0xc164, 0xffffffff, 0x00000100,
590 	0x8a18, 0xffffffff, 0x00000100,
591 	0x897c, 0xffffffff, 0x06000100,
592 	0x8b28, 0xffffffff, 0x00000100,
593 	0x9144, 0xffffffff, 0x00000100,
594 	0x9a60, 0xffffffff, 0x00000100,
595 	0x9868, 0xffffffff, 0x00000100,
596 	0x8d58, 0xffffffff, 0x00000100,
597 	0x9510, 0xffffffff, 0x00000100,
598 	0x949c, 0xffffffff, 0x00000100,
599 	0x9654, 0xffffffff, 0x00000100,
600 	0x9030, 0xffffffff, 0x00000100,
601 	0x9034, 0xffffffff, 0x00000100,
602 	0x9038, 0xffffffff, 0x00000100,
603 	0x903c, 0xffffffff, 0x00000100,
604 	0x9040, 0xffffffff, 0x00000100,
605 	0xa200, 0xffffffff, 0x00000100,
606 	0xa204, 0xffffffff, 0x00000100,
607 	0xa208, 0xffffffff, 0x00000100,
608 	0xa20c, 0xffffffff, 0x00000100,
609 	0x971c, 0xffffffff, 0x00000100,
610 	0xd0c0, 0xffffffff, 0xff000100,
611 	0x802c, 0xffffffff, 0x40000000,
612 	0x915c, 0xffffffff, 0x00010000,
613 	0x9160, 0xffffffff, 0x00030002,
614 	0x9178, 0xffffffff, 0x00070000,
615 	0x917c, 0xffffffff, 0x00030002,
616 	0x9180, 0xffffffff, 0x00050004,
617 	0x918c, 0xffffffff, 0x00010006,
618 	0x9190, 0xffffffff, 0x00090008,
619 	0x9194, 0xffffffff, 0x00070000,
620 	0x9198, 0xffffffff, 0x00030002,
621 	0x919c, 0xffffffff, 0x00050004,
622 	0x91a8, 0xffffffff, 0x00010006,
623 	0x91ac, 0xffffffff, 0x00090008,
624 	0x91b0, 0xffffffff, 0x00070000,
625 	0x91b4, 0xffffffff, 0x00030002,
626 	0x91b8, 0xffffffff, 0x00050004,
627 	0x91c4, 0xffffffff, 0x00010006,
628 	0x91c8, 0xffffffff, 0x00090008,
629 	0x91cc, 0xffffffff, 0x00070000,
630 	0x91d0, 0xffffffff, 0x00030002,
631 	0x91d4, 0xffffffff, 0x00050004,
632 	0x91e0, 0xffffffff, 0x00010006,
633 	0x91e4, 0xffffffff, 0x00090008,
634 	0x91e8, 0xffffffff, 0x00000000,
635 	0x91ec, 0xffffffff, 0x00070000,
636 	0x91f0, 0xffffffff, 0x00030002,
637 	0x91f4, 0xffffffff, 0x00050004,
638 	0x9200, 0xffffffff, 0x00010006,
639 	0x9204, 0xffffffff, 0x00090008,
640 	0x9208, 0xffffffff, 0x00070000,
641 	0x920c, 0xffffffff, 0x00030002,
642 	0x9210, 0xffffffff, 0x00050004,
643 	0x921c, 0xffffffff, 0x00010006,
644 	0x9220, 0xffffffff, 0x00090008,
645 	0x9224, 0xffffffff, 0x00070000,
646 	0x9228, 0xffffffff, 0x00030002,
647 	0x922c, 0xffffffff, 0x00050004,
648 	0x9238, 0xffffffff, 0x00010006,
649 	0x923c, 0xffffffff, 0x00090008,
650 	0x9240, 0xffffffff, 0x00070000,
651 	0x9244, 0xffffffff, 0x00030002,
652 	0x9248, 0xffffffff, 0x00050004,
653 	0x9254, 0xffffffff, 0x00010006,
654 	0x9258, 0xffffffff, 0x00090008,
655 	0x925c, 0xffffffff, 0x00070000,
656 	0x9260, 0xffffffff, 0x00030002,
657 	0x9264, 0xffffffff, 0x00050004,
658 	0x9270, 0xffffffff, 0x00010006,
659 	0x9274, 0xffffffff, 0x00090008,
660 	0x9278, 0xffffffff, 0x00070000,
661 	0x927c, 0xffffffff, 0x00030002,
662 	0x9280, 0xffffffff, 0x00050004,
663 	0x928c, 0xffffffff, 0x00010006,
664 	0x9290, 0xffffffff, 0x00090008,
665 	0x9294, 0xffffffff, 0x00000000,
666 	0x929c, 0xffffffff, 0x00000001,
667 	0x802c, 0xffffffff, 0xc0000000,
668 	0x977c, 0xffffffff, 0x00000100,
669 	0x3f80, 0xffffffff, 0x00000100,
670 	0xa210, 0xffffffff, 0x00000100,
671 	0xa214, 0xffffffff, 0x00000100,
672 	0x4d8, 0xffffffff, 0x00000100,
673 	0x9784, 0xffffffff, 0x00000100,
674 	0x9698, 0xffffffff, 0x00000100,
675 	0x4d4, 0xffffffff, 0x00000200,
676 	0x30cc, 0xffffffff, 0x00000100,
677 	0x802c, 0xffffffff, 0xc0000000
678 };
679 
680 static const u32 supersumo_golden_registers[] =
681 {
682 	0x5eb4, 0xffffffff, 0x00000002,
683 	0x5c4, 0xffffffff, 0x00000001,
684 	0x7030, 0xffffffff, 0x00000011,
685 	0x7c30, 0xffffffff, 0x00000011,
686 	0x6104, 0x01000300, 0x00000000,
687 	0x5bc0, 0x00300000, 0x00000000,
688 	0x8c04, 0xffffffff, 0x40600060,
689 	0x8c08, 0xffffffff, 0x001c001c,
690 	0x8c20, 0xffffffff, 0x00800080,
691 	0x8c24, 0xffffffff, 0x00800080,
692 	0x8c18, 0xffffffff, 0x20202078,
693 	0x8c1c, 0xffffffff, 0x00001010,
694 	0x918c, 0xffffffff, 0x00010006,
695 	0x91a8, 0xffffffff, 0x00010006,
696 	0x91c4, 0xffffffff, 0x00010006,
697 	0x91e0, 0xffffffff, 0x00010006,
698 	0x9200, 0xffffffff, 0x00010006,
699 	0x9150, 0xffffffff, 0x6e944040,
700 	0x917c, 0xffffffff, 0x00030002,
701 	0x9180, 0xffffffff, 0x00050004,
702 	0x9198, 0xffffffff, 0x00030002,
703 	0x919c, 0xffffffff, 0x00050004,
704 	0x91b4, 0xffffffff, 0x00030002,
705 	0x91b8, 0xffffffff, 0x00050004,
706 	0x91d0, 0xffffffff, 0x00030002,
707 	0x91d4, 0xffffffff, 0x00050004,
708 	0x91f0, 0xffffffff, 0x00030002,
709 	0x91f4, 0xffffffff, 0x00050004,
710 	0x915c, 0xffffffff, 0x00010000,
711 	0x9160, 0xffffffff, 0x00030002,
712 	0x3f90, 0xffff0000, 0xff000000,
713 	0x9178, 0xffffffff, 0x00070000,
714 	0x9194, 0xffffffff, 0x00070000,
715 	0x91b0, 0xffffffff, 0x00070000,
716 	0x91cc, 0xffffffff, 0x00070000,
717 	0x91ec, 0xffffffff, 0x00070000,
718 	0x9148, 0xffff0000, 0xff000000,
719 	0x9190, 0xffffffff, 0x00090008,
720 	0x91ac, 0xffffffff, 0x00090008,
721 	0x91c8, 0xffffffff, 0x00090008,
722 	0x91e4, 0xffffffff, 0x00090008,
723 	0x9204, 0xffffffff, 0x00090008,
724 	0x3f94, 0xffff0000, 0xff000000,
725 	0x914c, 0xffff0000, 0xff000000,
726 	0x929c, 0xffffffff, 0x00000001,
727 	0x8a18, 0xffffffff, 0x00000100,
728 	0x8b28, 0xffffffff, 0x00000100,
729 	0x9144, 0xffffffff, 0x00000100,
730 	0x5644, 0xffffffff, 0x00000100,
731 	0x9b7c, 0xffffffff, 0x00000000,
732 	0x8030, 0xffffffff, 0x0000100a,
733 	0x8a14, 0xffffffff, 0x00000007,
734 	0x8b24, 0xffffffff, 0x00ff0fff,
735 	0x8b10, 0xffffffff, 0x00000000,
736 	0x28a4c, 0x06000000, 0x06000000,
737 	0x4d8, 0xffffffff, 0x00000100,
738 	0x913c, 0xffff000f, 0x0100000a,
739 	0x960c, 0xffffffff, 0x54763210,
740 	0x88c4, 0xffffffff, 0x000000c2,
741 	0x88d4, 0xffffffff, 0x00000010,
742 	0x8974, 0xffffffff, 0x00000000,
743 	0xc78, 0x00000080, 0x00000080,
744 	0x5e78, 0xffffffff, 0x001000f0,
745 	0xd02c, 0xffffffff, 0x08421000,
746 	0xa008, 0xffffffff, 0x00010000,
747 	0x8d00, 0xffffffff, 0x100e4848,
748 	0x8d04, 0xffffffff, 0x00164745,
749 	0x8c00, 0xffffffff, 0xe4000003,
750 	0x8cf0, 0x1fffffff, 0x08e00620,
751 	0x28350, 0xffffffff, 0x00000000,
752 	0x9508, 0xffffffff, 0x00000002
753 };
754 
755 static const u32 sumo_golden_registers[] =
756 {
757 	0x900c, 0x00ffffff, 0x0017071f,
758 	0x8c18, 0xffffffff, 0x10101060,
759 	0x8c1c, 0xffffffff, 0x00001010,
760 	0x8c30, 0x0000000f, 0x00000005,
761 	0x9688, 0x0000000f, 0x00000007
762 };
763 
764 static const u32 wrestler_golden_registers[] =
765 {
766 	0x5eb4, 0xffffffff, 0x00000002,
767 	0x5c4, 0xffffffff, 0x00000001,
768 	0x7030, 0xffffffff, 0x00000011,
769 	0x7c30, 0xffffffff, 0x00000011,
770 	0x6104, 0x01000300, 0x00000000,
771 	0x5bc0, 0x00300000, 0x00000000,
772 	0x918c, 0xffffffff, 0x00010006,
773 	0x91a8, 0xffffffff, 0x00010006,
774 	0x9150, 0xffffffff, 0x6e944040,
775 	0x917c, 0xffffffff, 0x00030002,
776 	0x9198, 0xffffffff, 0x00030002,
777 	0x915c, 0xffffffff, 0x00010000,
778 	0x3f90, 0xffff0000, 0xff000000,
779 	0x9178, 0xffffffff, 0x00070000,
780 	0x9194, 0xffffffff, 0x00070000,
781 	0x9148, 0xffff0000, 0xff000000,
782 	0x9190, 0xffffffff, 0x00090008,
783 	0x91ac, 0xffffffff, 0x00090008,
784 	0x3f94, 0xffff0000, 0xff000000,
785 	0x914c, 0xffff0000, 0xff000000,
786 	0x929c, 0xffffffff, 0x00000001,
787 	0x8a18, 0xffffffff, 0x00000100,
788 	0x8b28, 0xffffffff, 0x00000100,
789 	0x9144, 0xffffffff, 0x00000100,
790 	0x9b7c, 0xffffffff, 0x00000000,
791 	0x8030, 0xffffffff, 0x0000100a,
792 	0x8a14, 0xffffffff, 0x00000001,
793 	0x8b24, 0xffffffff, 0x00ff0fff,
794 	0x8b10, 0xffffffff, 0x00000000,
795 	0x28a4c, 0x06000000, 0x06000000,
796 	0x4d8, 0xffffffff, 0x00000100,
797 	0x913c, 0xffff000f, 0x0100000a,
798 	0x960c, 0xffffffff, 0x54763210,
799 	0x88c4, 0xffffffff, 0x000000c2,
800 	0x88d4, 0xffffffff, 0x00000010,
801 	0x8974, 0xffffffff, 0x00000000,
802 	0xc78, 0x00000080, 0x00000080,
803 	0x5e78, 0xffffffff, 0x001000f0,
804 	0xd02c, 0xffffffff, 0x08421000,
805 	0xa008, 0xffffffff, 0x00010000,
806 	0x8d00, 0xffffffff, 0x100e4848,
807 	0x8d04, 0xffffffff, 0x00164745,
808 	0x8c00, 0xffffffff, 0xe4000003,
809 	0x8cf0, 0x1fffffff, 0x08e00410,
810 	0x28350, 0xffffffff, 0x00000000,
811 	0x9508, 0xffffffff, 0x00000002,
812 	0x900c, 0xffffffff, 0x0017071f,
813 	0x8c18, 0xffffffff, 0x10101060,
814 	0x8c1c, 0xffffffff, 0x00001010
815 };
816 
817 static const u32 barts_golden_registers[] =
818 {
819 	0x5eb4, 0xffffffff, 0x00000002,
820 	0x5e78, 0x8f311ff1, 0x001000f0,
821 	0x3f90, 0xffff0000, 0xff000000,
822 	0x9148, 0xffff0000, 0xff000000,
823 	0x3f94, 0xffff0000, 0xff000000,
824 	0x914c, 0xffff0000, 0xff000000,
825 	0xc78, 0x00000080, 0x00000080,
826 	0xbd4, 0x70073777, 0x00010001,
827 	0xd02c, 0xbfffff1f, 0x08421000,
828 	0xd0b8, 0x03773777, 0x02011003,
829 	0x5bc0, 0x00200000, 0x50100000,
830 	0x98f8, 0x33773777, 0x02011003,
831 	0x98fc, 0xffffffff, 0x76543210,
832 	0x7030, 0x31000311, 0x00000011,
833 	0x2f48, 0x00000007, 0x02011003,
834 	0x6b28, 0x00000010, 0x00000012,
835 	0x7728, 0x00000010, 0x00000012,
836 	0x10328, 0x00000010, 0x00000012,
837 	0x10f28, 0x00000010, 0x00000012,
838 	0x11b28, 0x00000010, 0x00000012,
839 	0x12728, 0x00000010, 0x00000012,
840 	0x240c, 0x000007ff, 0x00000380,
841 	0x8a14, 0xf000001f, 0x00000007,
842 	0x8b24, 0x3fff3fff, 0x00ff0fff,
843 	0x8b10, 0x0000ff0f, 0x00000000,
844 	0x28a4c, 0x07ffffff, 0x06000000,
845 	0x10c, 0x00000001, 0x00010003,
846 	0xa02c, 0xffffffff, 0x0000009b,
847 	0x913c, 0x0000000f, 0x0100000a,
848 	0x8d00, 0xffff7f7f, 0x100e4848,
849 	0x8d04, 0x00ffffff, 0x00164745,
850 	0x8c00, 0xfffc0003, 0xe4000003,
851 	0x8c04, 0xf8ff00ff, 0x40600060,
852 	0x8c08, 0x00ff00ff, 0x001c001c,
853 	0x8cf0, 0x1fff1fff, 0x08e00620,
854 	0x8c20, 0x0fff0fff, 0x00800080,
855 	0x8c24, 0x0fff0fff, 0x00800080,
856 	0x8c18, 0xffffffff, 0x20202078,
857 	0x8c1c, 0x0000ffff, 0x00001010,
858 	0x28350, 0x00000f01, 0x00000000,
859 	0x9508, 0x3700001f, 0x00000002,
860 	0x960c, 0xffffffff, 0x54763210,
861 	0x88c4, 0x001f3ae3, 0x000000c2,
862 	0x88d4, 0x0000001f, 0x00000010,
863 	0x8974, 0xffffffff, 0x00000000
864 };
865 
866 static const u32 turks_golden_registers[] =
867 {
868 	0x5eb4, 0xffffffff, 0x00000002,
869 	0x5e78, 0x8f311ff1, 0x001000f0,
870 	0x8c8, 0x00003000, 0x00001070,
871 	0x8cc, 0x000fffff, 0x00040035,
872 	0x3f90, 0xffff0000, 0xfff00000,
873 	0x9148, 0xffff0000, 0xfff00000,
874 	0x3f94, 0xffff0000, 0xfff00000,
875 	0x914c, 0xffff0000, 0xfff00000,
876 	0xc78, 0x00000080, 0x00000080,
877 	0xbd4, 0x00073007, 0x00010002,
878 	0xd02c, 0xbfffff1f, 0x08421000,
879 	0xd0b8, 0x03773777, 0x02010002,
880 	0x5bc0, 0x00200000, 0x50100000,
881 	0x98f8, 0x33773777, 0x00010002,
882 	0x98fc, 0xffffffff, 0x33221100,
883 	0x7030, 0x31000311, 0x00000011,
884 	0x2f48, 0x33773777, 0x00010002,
885 	0x6b28, 0x00000010, 0x00000012,
886 	0x7728, 0x00000010, 0x00000012,
887 	0x10328, 0x00000010, 0x00000012,
888 	0x10f28, 0x00000010, 0x00000012,
889 	0x11b28, 0x00000010, 0x00000012,
890 	0x12728, 0x00000010, 0x00000012,
891 	0x240c, 0x000007ff, 0x00000380,
892 	0x8a14, 0xf000001f, 0x00000007,
893 	0x8b24, 0x3fff3fff, 0x00ff0fff,
894 	0x8b10, 0x0000ff0f, 0x00000000,
895 	0x28a4c, 0x07ffffff, 0x06000000,
896 	0x10c, 0x00000001, 0x00010003,
897 	0xa02c, 0xffffffff, 0x0000009b,
898 	0x913c, 0x0000000f, 0x0100000a,
899 	0x8d00, 0xffff7f7f, 0x100e4848,
900 	0x8d04, 0x00ffffff, 0x00164745,
901 	0x8c00, 0xfffc0003, 0xe4000003,
902 	0x8c04, 0xf8ff00ff, 0x40600060,
903 	0x8c08, 0x00ff00ff, 0x001c001c,
904 	0x8cf0, 0x1fff1fff, 0x08e00410,
905 	0x8c20, 0x0fff0fff, 0x00800080,
906 	0x8c24, 0x0fff0fff, 0x00800080,
907 	0x8c18, 0xffffffff, 0x20202078,
908 	0x8c1c, 0x0000ffff, 0x00001010,
909 	0x28350, 0x00000f01, 0x00000000,
910 	0x9508, 0x3700001f, 0x00000002,
911 	0x960c, 0xffffffff, 0x54763210,
912 	0x88c4, 0x001f3ae3, 0x000000c2,
913 	0x88d4, 0x0000001f, 0x00000010,
914 	0x8974, 0xffffffff, 0x00000000
915 };
916 
917 static const u32 caicos_golden_registers[] =
918 {
919 	0x5eb4, 0xffffffff, 0x00000002,
920 	0x5e78, 0x8f311ff1, 0x001000f0,
921 	0x8c8, 0x00003420, 0x00001450,
922 	0x8cc, 0x000fffff, 0x00040035,
923 	0x3f90, 0xffff0000, 0xfffc0000,
924 	0x9148, 0xffff0000, 0xfffc0000,
925 	0x3f94, 0xffff0000, 0xfffc0000,
926 	0x914c, 0xffff0000, 0xfffc0000,
927 	0xc78, 0x00000080, 0x00000080,
928 	0xbd4, 0x00073007, 0x00010001,
929 	0xd02c, 0xbfffff1f, 0x08421000,
930 	0xd0b8, 0x03773777, 0x02010001,
931 	0x5bc0, 0x00200000, 0x50100000,
932 	0x98f8, 0x33773777, 0x02010001,
933 	0x98fc, 0xffffffff, 0x33221100,
934 	0x7030, 0x31000311, 0x00000011,
935 	0x2f48, 0x33773777, 0x02010001,
936 	0x6b28, 0x00000010, 0x00000012,
937 	0x7728, 0x00000010, 0x00000012,
938 	0x10328, 0x00000010, 0x00000012,
939 	0x10f28, 0x00000010, 0x00000012,
940 	0x11b28, 0x00000010, 0x00000012,
941 	0x12728, 0x00000010, 0x00000012,
942 	0x240c, 0x000007ff, 0x00000380,
943 	0x8a14, 0xf000001f, 0x00000001,
944 	0x8b24, 0x3fff3fff, 0x00ff0fff,
945 	0x8b10, 0x0000ff0f, 0x00000000,
946 	0x28a4c, 0x07ffffff, 0x06000000,
947 	0x10c, 0x00000001, 0x00010003,
948 	0xa02c, 0xffffffff, 0x0000009b,
949 	0x913c, 0x0000000f, 0x0100000a,
950 	0x8d00, 0xffff7f7f, 0x100e4848,
951 	0x8d04, 0x00ffffff, 0x00164745,
952 	0x8c00, 0xfffc0003, 0xe4000003,
953 	0x8c04, 0xf8ff00ff, 0x40600060,
954 	0x8c08, 0x00ff00ff, 0x001c001c,
955 	0x8cf0, 0x1fff1fff, 0x08e00410,
956 	0x8c20, 0x0fff0fff, 0x00800080,
957 	0x8c24, 0x0fff0fff, 0x00800080,
958 	0x8c18, 0xffffffff, 0x20202078,
959 	0x8c1c, 0x0000ffff, 0x00001010,
960 	0x28350, 0x00000f01, 0x00000000,
961 	0x9508, 0x3700001f, 0x00000002,
962 	0x960c, 0xffffffff, 0x54763210,
963 	0x88c4, 0x001f3ae3, 0x000000c2,
964 	0x88d4, 0x0000001f, 0x00000010,
965 	0x8974, 0xffffffff, 0x00000000
966 };
967 
968 static void evergreen_init_golden_registers(struct radeon_device *rdev)
969 {
970 	switch (rdev->family) {
971 	case CHIP_CYPRESS:
972 	case CHIP_HEMLOCK:
973 		radeon_program_register_sequence(rdev,
974 						 evergreen_golden_registers,
975 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
976 		radeon_program_register_sequence(rdev,
977 						 evergreen_golden_registers2,
978 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
979 		radeon_program_register_sequence(rdev,
980 						 cypress_mgcg_init,
981 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
982 		break;
983 	case CHIP_JUNIPER:
984 		radeon_program_register_sequence(rdev,
985 						 evergreen_golden_registers,
986 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
987 		radeon_program_register_sequence(rdev,
988 						 evergreen_golden_registers2,
989 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
990 		radeon_program_register_sequence(rdev,
991 						 juniper_mgcg_init,
992 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
993 		break;
994 	case CHIP_REDWOOD:
995 		radeon_program_register_sequence(rdev,
996 						 evergreen_golden_registers,
997 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
998 		radeon_program_register_sequence(rdev,
999 						 evergreen_golden_registers2,
1000 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1001 		radeon_program_register_sequence(rdev,
1002 						 redwood_mgcg_init,
1003 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1004 		break;
1005 	case CHIP_CEDAR:
1006 		radeon_program_register_sequence(rdev,
1007 						 cedar_golden_registers,
1008 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1009 		radeon_program_register_sequence(rdev,
1010 						 evergreen_golden_registers2,
1011 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1012 		radeon_program_register_sequence(rdev,
1013 						 cedar_mgcg_init,
1014 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1015 		break;
1016 	case CHIP_PALM:
1017 		radeon_program_register_sequence(rdev,
1018 						 wrestler_golden_registers,
1019 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1020 		break;
1021 	case CHIP_SUMO:
1022 		radeon_program_register_sequence(rdev,
1023 						 supersumo_golden_registers,
1024 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1025 		break;
1026 	case CHIP_SUMO2:
1027 		radeon_program_register_sequence(rdev,
1028 						 supersumo_golden_registers,
1029 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1030 		radeon_program_register_sequence(rdev,
1031 						 sumo_golden_registers,
1032 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1033 		break;
1034 	case CHIP_BARTS:
1035 		radeon_program_register_sequence(rdev,
1036 						 barts_golden_registers,
1037 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1038 		break;
1039 	case CHIP_TURKS:
1040 		radeon_program_register_sequence(rdev,
1041 						 turks_golden_registers,
1042 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1043 		break;
1044 	case CHIP_CAICOS:
1045 		radeon_program_register_sequence(rdev,
1046 						 caicos_golden_registers,
1047 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1048 		break;
1049 	default:
1050 		break;
1051 	}
1052 }
1053 
1054 /**
1055  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1056  *
1057  * @rdev: radeon_device pointer
1058  * @reg: register offset in bytes
1059  * @val: register value
1060  *
1061  * Returns 0 for success or -EINVAL for an invalid register
1062  *
1063  */
1064 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1065 					u32 reg, u32 *val)
1066 {
1067 	switch (reg) {
1068 	case GRBM_STATUS:
1069 	case GRBM_STATUS_SE0:
1070 	case GRBM_STATUS_SE1:
1071 	case SRBM_STATUS:
1072 	case SRBM_STATUS2:
1073 	case DMA_STATUS_REG:
1074 	case UVD_STATUS:
1075 		*val = RREG32(reg);
1076 		return 0;
1077 	default:
1078 		return -EINVAL;
1079 	}
1080 }
1081 
1082 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1083 			     unsigned *bankh, unsigned *mtaspect,
1084 			     unsigned *tile_split)
1085 {
1086 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1087 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1088 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1089 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1090 	switch (*bankw) {
1091 	default:
1092 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1093 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1094 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1095 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1096 	}
1097 	switch (*bankh) {
1098 	default:
1099 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1100 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1101 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1102 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1103 	}
1104 	switch (*mtaspect) {
1105 	default:
1106 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1107 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1108 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1109 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1110 	}
1111 }
1112 
1113 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1114 			      u32 cntl_reg, u32 status_reg)
1115 {
1116 	int r, i;
1117 	struct atom_clock_dividers dividers;
1118 
1119 	r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1120 					   clock, false, &dividers);
1121 	if (r)
1122 		return r;
1123 
1124 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1125 
1126 	for (i = 0; i < 100; i++) {
1127 		if (RREG32(status_reg) & DCLK_STATUS)
1128 			break;
1129 		mdelay(10);
1130 	}
1131 	if (i == 100)
1132 		return -ETIMEDOUT;
1133 
1134 	return 0;
1135 }
1136 
1137 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1138 {
1139 	int r = 0;
1140 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1141 
1142 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1143 	if (r)
1144 		goto done;
1145 	cg_scratch &= 0xffff0000;
1146 	cg_scratch |= vclk / 100; /* Mhz */
1147 
1148 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1149 	if (r)
1150 		goto done;
1151 	cg_scratch &= 0x0000ffff;
1152 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1153 
1154 done:
1155 	WREG32(CG_SCRATCH1, cg_scratch);
1156 
1157 	return r;
1158 }
1159 
1160 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1161 {
1162 	/* start off with something large */
1163 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1164 	int r;
1165 
1166 	/* bypass vclk and dclk with bclk */
1167 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1168 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1169 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1170 
1171 	/* put PLL in bypass mode */
1172 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1173 
1174 	if (!vclk || !dclk) {
1175 		/* keep the Bypass mode, put PLL to sleep */
1176 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1177 		return 0;
1178 	}
1179 
1180 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1181 					  16384, 0x03FFFFFF, 0, 128, 5,
1182 					  &fb_div, &vclk_div, &dclk_div);
1183 	if (r)
1184 		return r;
1185 
1186 	/* set VCO_MODE to 1 */
1187 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1188 
1189 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1190 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1191 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1192 
1193 	/* deassert UPLL_RESET */
1194 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1195 
1196 	mdelay(1);
1197 
1198 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1199 	if (r)
1200 		return r;
1201 
1202 	/* assert UPLL_RESET again */
1203 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1204 
1205 	/* disable spread spectrum. */
1206 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1207 
1208 	/* set feedback divider */
1209 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1210 
1211 	/* set ref divider to 0 */
1212 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1213 
1214 	if (fb_div < 307200)
1215 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1216 	else
1217 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1218 
1219 	/* set PDIV_A and PDIV_B */
1220 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1221 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1222 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1223 
1224 	/* give the PLL some time to settle */
1225 	mdelay(15);
1226 
1227 	/* deassert PLL_RESET */
1228 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1229 
1230 	mdelay(15);
1231 
1232 	/* switch from bypass mode to normal mode */
1233 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1234 
1235 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1236 	if (r)
1237 		return r;
1238 
1239 	/* switch VCLK and DCLK selection */
1240 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1241 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1242 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1243 
1244 	mdelay(100);
1245 
1246 	return 0;
1247 }
1248 
1249 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1250 {
1251 	int readrq;
1252 	u16 v;
1253 
1254 	readrq = pcie_get_readrq(rdev->pdev);
1255 	v = ffs(readrq) - 8;
1256 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1257 	 * to avoid hangs or perfomance issues
1258 	 */
1259 	if ((v == 0) || (v == 6) || (v == 7))
1260 		pcie_set_readrq(rdev->pdev, 512);
1261 }
1262 
1263 void dce4_program_fmt(struct drm_encoder *encoder)
1264 {
1265 	struct drm_device *dev = encoder->dev;
1266 	struct radeon_device *rdev = dev->dev_private;
1267 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1268 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1269 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1270 	int bpc = 0;
1271 	u32 tmp = 0;
1272 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1273 
1274 	if (connector) {
1275 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1276 		bpc = radeon_get_monitor_bpc(connector);
1277 		dither = radeon_connector->dither;
1278 	}
1279 
1280 	/* LVDS/eDP FMT is set up by atom */
1281 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1282 		return;
1283 
1284 	/* not needed for analog */
1285 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1286 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1287 		return;
1288 
1289 	if (bpc == 0)
1290 		return;
1291 
1292 	switch (bpc) {
1293 	case 6:
1294 		if (dither == RADEON_FMT_DITHER_ENABLE)
1295 			/* XXX sort out optimal dither settings */
1296 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1297 				FMT_SPATIAL_DITHER_EN);
1298 		else
1299 			tmp |= FMT_TRUNCATE_EN;
1300 		break;
1301 	case 8:
1302 		if (dither == RADEON_FMT_DITHER_ENABLE)
1303 			/* XXX sort out optimal dither settings */
1304 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1305 				FMT_RGB_RANDOM_ENABLE |
1306 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1307 		else
1308 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1309 		break;
1310 	case 10:
1311 	default:
1312 		/* not needed */
1313 		break;
1314 	}
1315 
1316 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1317 }
1318 
1319 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1320 {
1321 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1322 		return true;
1323 	else
1324 		return false;
1325 }
1326 
1327 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1328 {
1329 	u32 pos1, pos2;
1330 
1331 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1332 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1333 
1334 	if (pos1 != pos2)
1335 		return true;
1336 	else
1337 		return false;
1338 }
1339 
1340 /**
1341  * dce4_wait_for_vblank - vblank wait asic callback.
1342  *
1343  * @rdev: radeon_device pointer
1344  * @crtc: crtc to wait for vblank on
1345  *
1346  * Wait for vblank on the requested crtc (evergreen+).
1347  */
1348 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1349 {
1350 	unsigned i = 0;
1351 
1352 	if (crtc >= rdev->num_crtc)
1353 		return;
1354 
1355 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1356 		return;
1357 
1358 	/* depending on when we hit vblank, we may be close to active; if so,
1359 	 * wait for another frame.
1360 	 */
1361 	while (dce4_is_in_vblank(rdev, crtc)) {
1362 		if (i++ % 100 == 0) {
1363 			if (!dce4_is_counter_moving(rdev, crtc))
1364 				break;
1365 		}
1366 	}
1367 
1368 	while (!dce4_is_in_vblank(rdev, crtc)) {
1369 		if (i++ % 100 == 0) {
1370 			if (!dce4_is_counter_moving(rdev, crtc))
1371 				break;
1372 		}
1373 	}
1374 }
1375 
1376 /**
1377  * evergreen_page_flip - pageflip callback.
1378  *
1379  * @rdev: radeon_device pointer
1380  * @crtc_id: crtc to cleanup pageflip on
1381  * @crtc_base: new address of the crtc (GPU MC address)
1382  *
1383  * Triggers the actual pageflip by updating the primary
1384  * surface base address (evergreen+).
1385  */
1386 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1387 			 bool async)
1388 {
1389 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1390 
1391 	/* update the scanout addresses */
1392 	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1393 	       async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1394 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1395 	       upper_32_bits(crtc_base));
1396 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1397 	       (u32)crtc_base);
1398 	/* post the write */
1399 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1400 }
1401 
1402 /**
1403  * evergreen_page_flip_pending - check if page flip is still pending
1404  *
1405  * @rdev: radeon_device pointer
1406  * @crtc_id: crtc to check
1407  *
1408  * Returns the current update pending status.
1409  */
1410 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1411 {
1412 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1413 
1414 	/* Return current update_pending status: */
1415 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1416 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1417 }
1418 
1419 /* get temperature in millidegrees */
1420 int evergreen_get_temp(struct radeon_device *rdev)
1421 {
1422 	u32 temp, toffset;
1423 	int actual_temp = 0;
1424 
1425 	if (rdev->family == CHIP_JUNIPER) {
1426 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1427 			TOFFSET_SHIFT;
1428 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1429 			TS0_ADC_DOUT_SHIFT;
1430 
1431 		if (toffset & 0x100)
1432 			actual_temp = temp / 2 - (0x200 - toffset);
1433 		else
1434 			actual_temp = temp / 2 + toffset;
1435 
1436 		actual_temp = actual_temp * 1000;
1437 
1438 	} else {
1439 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1440 			ASIC_T_SHIFT;
1441 
1442 		if (temp & 0x400)
1443 			actual_temp = -256;
1444 		else if (temp & 0x200)
1445 			actual_temp = 255;
1446 		else if (temp & 0x100) {
1447 			actual_temp = temp & 0x1ff;
1448 			actual_temp |= ~0x1ff;
1449 		} else
1450 			actual_temp = temp & 0xff;
1451 
1452 		actual_temp = (actual_temp * 1000) / 2;
1453 	}
1454 
1455 	return actual_temp;
1456 }
1457 
1458 int sumo_get_temp(struct radeon_device *rdev)
1459 {
1460 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1461 	int actual_temp = temp - 49;
1462 
1463 	return actual_temp * 1000;
1464 }
1465 
1466 /**
1467  * sumo_pm_init_profile - Initialize power profiles callback.
1468  *
1469  * @rdev: radeon_device pointer
1470  *
1471  * Initialize the power states used in profile mode
1472  * (sumo, trinity, SI).
1473  * Used for profile mode only.
1474  */
1475 void sumo_pm_init_profile(struct radeon_device *rdev)
1476 {
1477 	int idx;
1478 
1479 	/* default */
1480 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1481 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1482 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1483 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1484 
1485 	/* low,mid sh/mh */
1486 	if (rdev->flags & RADEON_IS_MOBILITY)
1487 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1488 	else
1489 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1490 
1491 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1492 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1493 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1494 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1495 
1496 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1497 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1498 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1499 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1500 
1501 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1502 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1503 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1504 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1505 
1506 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1507 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1508 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1509 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1510 
1511 	/* high sh/mh */
1512 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1513 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1514 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1515 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1516 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1517 		rdev->pm.power_state[idx].num_clock_modes - 1;
1518 
1519 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1520 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1521 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1522 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1523 		rdev->pm.power_state[idx].num_clock_modes - 1;
1524 }
1525 
1526 /**
1527  * btc_pm_init_profile - Initialize power profiles callback.
1528  *
1529  * @rdev: radeon_device pointer
1530  *
1531  * Initialize the power states used in profile mode
1532  * (BTC, cayman).
1533  * Used for profile mode only.
1534  */
1535 void btc_pm_init_profile(struct radeon_device *rdev)
1536 {
1537 	int idx;
1538 
1539 	/* default */
1540 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1541 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1542 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1543 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1544 	/* starting with BTC, there is one state that is used for both
1545 	 * MH and SH.  Difference is that we always use the high clock index for
1546 	 * mclk.
1547 	 */
1548 	if (rdev->flags & RADEON_IS_MOBILITY)
1549 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1550 	else
1551 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1552 	/* low sh */
1553 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1554 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1555 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1556 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1557 	/* mid sh */
1558 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1559 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1560 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1561 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1562 	/* high sh */
1563 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1564 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1565 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1566 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1567 	/* low mh */
1568 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1569 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1570 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1571 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1572 	/* mid mh */
1573 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1574 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1575 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1576 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1577 	/* high mh */
1578 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1579 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1580 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1581 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1582 }
1583 
1584 /**
1585  * evergreen_pm_misc - set additional pm hw parameters callback.
1586  *
1587  * @rdev: radeon_device pointer
1588  *
1589  * Set non-clock parameters associated with a power state
1590  * (voltage, etc.) (evergreen+).
1591  */
1592 void evergreen_pm_misc(struct radeon_device *rdev)
1593 {
1594 	int req_ps_idx = rdev->pm.requested_power_state_index;
1595 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1596 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1597 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1598 
1599 	if (voltage->type == VOLTAGE_SW) {
1600 		/* 0xff0x are flags rather then an actual voltage */
1601 		if ((voltage->voltage & 0xff00) == 0xff00)
1602 			return;
1603 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1604 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1605 			rdev->pm.current_vddc = voltage->voltage;
1606 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1607 		}
1608 
1609 		/* starting with BTC, there is one state that is used for both
1610 		 * MH and SH.  Difference is that we always use the high clock index for
1611 		 * mclk and vddci.
1612 		 */
1613 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1614 		    (rdev->family >= CHIP_BARTS) &&
1615 		    rdev->pm.active_crtc_count &&
1616 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1617 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1618 			voltage = &rdev->pm.power_state[req_ps_idx].
1619 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1620 
1621 		/* 0xff0x are flags rather then an actual voltage */
1622 		if ((voltage->vddci & 0xff00) == 0xff00)
1623 			return;
1624 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1625 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1626 			rdev->pm.current_vddci = voltage->vddci;
1627 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1628 		}
1629 	}
1630 }
1631 
1632 /**
1633  * evergreen_pm_prepare - pre-power state change callback.
1634  *
1635  * @rdev: radeon_device pointer
1636  *
1637  * Prepare for a power state change (evergreen+).
1638  */
1639 void evergreen_pm_prepare(struct radeon_device *rdev)
1640 {
1641 	struct drm_device *ddev = rdev->ddev;
1642 	struct drm_crtc *crtc;
1643 	struct radeon_crtc *radeon_crtc;
1644 	u32 tmp;
1645 
1646 	/* disable any active CRTCs */
1647 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1648 		radeon_crtc = to_radeon_crtc(crtc);
1649 		if (radeon_crtc->enabled) {
1650 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1651 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1652 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1653 		}
1654 	}
1655 }
1656 
1657 /**
1658  * evergreen_pm_finish - post-power state change callback.
1659  *
1660  * @rdev: radeon_device pointer
1661  *
1662  * Clean up after a power state change (evergreen+).
1663  */
1664 void evergreen_pm_finish(struct radeon_device *rdev)
1665 {
1666 	struct drm_device *ddev = rdev->ddev;
1667 	struct drm_crtc *crtc;
1668 	struct radeon_crtc *radeon_crtc;
1669 	u32 tmp;
1670 
1671 	/* enable any active CRTCs */
1672 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1673 		radeon_crtc = to_radeon_crtc(crtc);
1674 		if (radeon_crtc->enabled) {
1675 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1676 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1677 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1678 		}
1679 	}
1680 }
1681 
1682 /**
1683  * evergreen_hpd_sense - hpd sense callback.
1684  *
1685  * @rdev: radeon_device pointer
1686  * @hpd: hpd (hotplug detect) pin
1687  *
1688  * Checks if a digital monitor is connected (evergreen+).
1689  * Returns true if connected, false if not connected.
1690  */
1691 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1692 {
1693 	bool connected = false;
1694 
1695 	switch (hpd) {
1696 	case RADEON_HPD_1:
1697 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1698 			connected = true;
1699 		break;
1700 	case RADEON_HPD_2:
1701 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1702 			connected = true;
1703 		break;
1704 	case RADEON_HPD_3:
1705 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1706 			connected = true;
1707 		break;
1708 	case RADEON_HPD_4:
1709 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1710 			connected = true;
1711 		break;
1712 	case RADEON_HPD_5:
1713 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1714 			connected = true;
1715 		break;
1716 	case RADEON_HPD_6:
1717 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1718 			connected = true;
1719 		break;
1720 	default:
1721 		break;
1722 	}
1723 
1724 	return connected;
1725 }
1726 
1727 /**
1728  * evergreen_hpd_set_polarity - hpd set polarity callback.
1729  *
1730  * @rdev: radeon_device pointer
1731  * @hpd: hpd (hotplug detect) pin
1732  *
1733  * Set the polarity of the hpd pin (evergreen+).
1734  */
1735 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1736 				enum radeon_hpd_id hpd)
1737 {
1738 	u32 tmp;
1739 	bool connected = evergreen_hpd_sense(rdev, hpd);
1740 
1741 	switch (hpd) {
1742 	case RADEON_HPD_1:
1743 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1744 		if (connected)
1745 			tmp &= ~DC_HPDx_INT_POLARITY;
1746 		else
1747 			tmp |= DC_HPDx_INT_POLARITY;
1748 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1749 		break;
1750 	case RADEON_HPD_2:
1751 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1752 		if (connected)
1753 			tmp &= ~DC_HPDx_INT_POLARITY;
1754 		else
1755 			tmp |= DC_HPDx_INT_POLARITY;
1756 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1757 		break;
1758 	case RADEON_HPD_3:
1759 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1760 		if (connected)
1761 			tmp &= ~DC_HPDx_INT_POLARITY;
1762 		else
1763 			tmp |= DC_HPDx_INT_POLARITY;
1764 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1765 		break;
1766 	case RADEON_HPD_4:
1767 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1768 		if (connected)
1769 			tmp &= ~DC_HPDx_INT_POLARITY;
1770 		else
1771 			tmp |= DC_HPDx_INT_POLARITY;
1772 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1773 		break;
1774 	case RADEON_HPD_5:
1775 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1776 		if (connected)
1777 			tmp &= ~DC_HPDx_INT_POLARITY;
1778 		else
1779 			tmp |= DC_HPDx_INT_POLARITY;
1780 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1781 			break;
1782 	case RADEON_HPD_6:
1783 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1784 		if (connected)
1785 			tmp &= ~DC_HPDx_INT_POLARITY;
1786 		else
1787 			tmp |= DC_HPDx_INT_POLARITY;
1788 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1789 		break;
1790 	default:
1791 		break;
1792 	}
1793 }
1794 
1795 /**
1796  * evergreen_hpd_init - hpd setup callback.
1797  *
1798  * @rdev: radeon_device pointer
1799  *
1800  * Setup the hpd pins used by the card (evergreen+).
1801  * Enable the pin, set the polarity, and enable the hpd interrupts.
1802  */
1803 void evergreen_hpd_init(struct radeon_device *rdev)
1804 {
1805 	struct drm_device *dev = rdev->ddev;
1806 	struct drm_connector *connector;
1807 	unsigned enabled = 0;
1808 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1809 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1810 
1811 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1812 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1813 
1814 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1815 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1816 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1817 			 * aux dp channel on imac and help (but not completely fix)
1818 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1819 			 * also avoid interrupt storms during dpms.
1820 			 */
1821 			continue;
1822 		}
1823 		switch (radeon_connector->hpd.hpd) {
1824 		case RADEON_HPD_1:
1825 			WREG32(DC_HPD1_CONTROL, tmp);
1826 			break;
1827 		case RADEON_HPD_2:
1828 			WREG32(DC_HPD2_CONTROL, tmp);
1829 			break;
1830 		case RADEON_HPD_3:
1831 			WREG32(DC_HPD3_CONTROL, tmp);
1832 			break;
1833 		case RADEON_HPD_4:
1834 			WREG32(DC_HPD4_CONTROL, tmp);
1835 			break;
1836 		case RADEON_HPD_5:
1837 			WREG32(DC_HPD5_CONTROL, tmp);
1838 			break;
1839 		case RADEON_HPD_6:
1840 			WREG32(DC_HPD6_CONTROL, tmp);
1841 			break;
1842 		default:
1843 			break;
1844 		}
1845 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1846 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1847 			enabled |= 1 << radeon_connector->hpd.hpd;
1848 	}
1849 	radeon_irq_kms_enable_hpd(rdev, enabled);
1850 }
1851 
1852 /**
1853  * evergreen_hpd_fini - hpd tear down callback.
1854  *
1855  * @rdev: radeon_device pointer
1856  *
1857  * Tear down the hpd pins used by the card (evergreen+).
1858  * Disable the hpd interrupts.
1859  */
1860 void evergreen_hpd_fini(struct radeon_device *rdev)
1861 {
1862 	struct drm_device *dev = rdev->ddev;
1863 	struct drm_connector *connector;
1864 	unsigned disabled = 0;
1865 
1866 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1867 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1868 		switch (radeon_connector->hpd.hpd) {
1869 		case RADEON_HPD_1:
1870 			WREG32(DC_HPD1_CONTROL, 0);
1871 			break;
1872 		case RADEON_HPD_2:
1873 			WREG32(DC_HPD2_CONTROL, 0);
1874 			break;
1875 		case RADEON_HPD_3:
1876 			WREG32(DC_HPD3_CONTROL, 0);
1877 			break;
1878 		case RADEON_HPD_4:
1879 			WREG32(DC_HPD4_CONTROL, 0);
1880 			break;
1881 		case RADEON_HPD_5:
1882 			WREG32(DC_HPD5_CONTROL, 0);
1883 			break;
1884 		case RADEON_HPD_6:
1885 			WREG32(DC_HPD6_CONTROL, 0);
1886 			break;
1887 		default:
1888 			break;
1889 		}
1890 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1891 			disabled |= 1 << radeon_connector->hpd.hpd;
1892 	}
1893 	radeon_irq_kms_disable_hpd(rdev, disabled);
1894 }
1895 
1896 /* watermark setup */
1897 
1898 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1899 					struct radeon_crtc *radeon_crtc,
1900 					struct drm_display_mode *mode,
1901 					struct drm_display_mode *other_mode)
1902 {
1903 	u32 tmp, buffer_alloc, i;
1904 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1905 	/*
1906 	 * Line Buffer Setup
1907 	 * There are 3 line buffers, each one shared by 2 display controllers.
1908 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1909 	 * the display controllers.  The paritioning is done via one of four
1910 	 * preset allocations specified in bits 2:0:
1911 	 * first display controller
1912 	 *  0 - first half of lb (3840 * 2)
1913 	 *  1 - first 3/4 of lb (5760 * 2)
1914 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1915 	 *  3 - first 1/4 of lb (1920 * 2)
1916 	 * second display controller
1917 	 *  4 - second half of lb (3840 * 2)
1918 	 *  5 - second 3/4 of lb (5760 * 2)
1919 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1920 	 *  7 - last 1/4 of lb (1920 * 2)
1921 	 */
1922 	/* this can get tricky if we have two large displays on a paired group
1923 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1924 	 * non-linked crtcs for maximum line buffer allocation.
1925 	 */
1926 	if (radeon_crtc->base.enabled && mode) {
1927 		if (other_mode) {
1928 			tmp = 0; /* 1/2 */
1929 			buffer_alloc = 1;
1930 		} else {
1931 			tmp = 2; /* whole */
1932 			buffer_alloc = 2;
1933 		}
1934 	} else {
1935 		tmp = 0;
1936 		buffer_alloc = 0;
1937 	}
1938 
1939 	/* second controller of the pair uses second half of the lb */
1940 	if (radeon_crtc->crtc_id % 2)
1941 		tmp += 4;
1942 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1943 
1944 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1945 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1946 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1947 		for (i = 0; i < rdev->usec_timeout; i++) {
1948 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1949 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1950 				break;
1951 			udelay(1);
1952 		}
1953 	}
1954 
1955 	if (radeon_crtc->base.enabled && mode) {
1956 		switch (tmp) {
1957 		case 0:
1958 		case 4:
1959 		default:
1960 			if (ASIC_IS_DCE5(rdev))
1961 				return 4096 * 2;
1962 			else
1963 				return 3840 * 2;
1964 		case 1:
1965 		case 5:
1966 			if (ASIC_IS_DCE5(rdev))
1967 				return 6144 * 2;
1968 			else
1969 				return 5760 * 2;
1970 		case 2:
1971 		case 6:
1972 			if (ASIC_IS_DCE5(rdev))
1973 				return 8192 * 2;
1974 			else
1975 				return 7680 * 2;
1976 		case 3:
1977 		case 7:
1978 			if (ASIC_IS_DCE5(rdev))
1979 				return 2048 * 2;
1980 			else
1981 				return 1920 * 2;
1982 		}
1983 	}
1984 
1985 	/* controller not enabled, so no lb used */
1986 	return 0;
1987 }
1988 
1989 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1990 {
1991 	u32 tmp = RREG32(MC_SHARED_CHMAP);
1992 
1993 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1994 	case 0:
1995 	default:
1996 		return 1;
1997 	case 1:
1998 		return 2;
1999 	case 2:
2000 		return 4;
2001 	case 3:
2002 		return 8;
2003 	}
2004 }
2005 
2006 struct evergreen_wm_params {
2007 	u32 dram_channels; /* number of dram channels */
2008 	u32 yclk;          /* bandwidth per dram data pin in kHz */
2009 	u32 sclk;          /* engine clock in kHz */
2010 	u32 disp_clk;      /* display clock in kHz */
2011 	u32 src_width;     /* viewport width */
2012 	u32 active_time;   /* active display time in ns */
2013 	u32 blank_time;    /* blank time in ns */
2014 	bool interlaced;    /* mode is interlaced */
2015 	fixed20_12 vsc;    /* vertical scale ratio */
2016 	u32 num_heads;     /* number of active crtcs */
2017 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2018 	u32 lb_size;       /* line buffer allocated to pipe */
2019 	u32 vtaps;         /* vertical scaler taps */
2020 };
2021 
2022 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2023 {
2024 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2025 	fixed20_12 dram_efficiency; /* 0.7 */
2026 	fixed20_12 yclk, dram_channels, bandwidth;
2027 	fixed20_12 a;
2028 
2029 	a.full = dfixed_const(1000);
2030 	yclk.full = dfixed_const(wm->yclk);
2031 	yclk.full = dfixed_div(yclk, a);
2032 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2033 	a.full = dfixed_const(10);
2034 	dram_efficiency.full = dfixed_const(7);
2035 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2036 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2037 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2038 
2039 	return dfixed_trunc(bandwidth);
2040 }
2041 
2042 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2043 {
2044 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2045 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2046 	fixed20_12 yclk, dram_channels, bandwidth;
2047 	fixed20_12 a;
2048 
2049 	a.full = dfixed_const(1000);
2050 	yclk.full = dfixed_const(wm->yclk);
2051 	yclk.full = dfixed_div(yclk, a);
2052 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2053 	a.full = dfixed_const(10);
2054 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2055 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2056 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2057 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2058 
2059 	return dfixed_trunc(bandwidth);
2060 }
2061 
2062 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2063 {
2064 	/* Calculate the display Data return Bandwidth */
2065 	fixed20_12 return_efficiency; /* 0.8 */
2066 	fixed20_12 sclk, bandwidth;
2067 	fixed20_12 a;
2068 
2069 	a.full = dfixed_const(1000);
2070 	sclk.full = dfixed_const(wm->sclk);
2071 	sclk.full = dfixed_div(sclk, a);
2072 	a.full = dfixed_const(10);
2073 	return_efficiency.full = dfixed_const(8);
2074 	return_efficiency.full = dfixed_div(return_efficiency, a);
2075 	a.full = dfixed_const(32);
2076 	bandwidth.full = dfixed_mul(a, sclk);
2077 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2078 
2079 	return dfixed_trunc(bandwidth);
2080 }
2081 
2082 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2083 {
2084 	/* Calculate the DMIF Request Bandwidth */
2085 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2086 	fixed20_12 disp_clk, bandwidth;
2087 	fixed20_12 a;
2088 
2089 	a.full = dfixed_const(1000);
2090 	disp_clk.full = dfixed_const(wm->disp_clk);
2091 	disp_clk.full = dfixed_div(disp_clk, a);
2092 	a.full = dfixed_const(10);
2093 	disp_clk_request_efficiency.full = dfixed_const(8);
2094 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2095 	a.full = dfixed_const(32);
2096 	bandwidth.full = dfixed_mul(a, disp_clk);
2097 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2098 
2099 	return dfixed_trunc(bandwidth);
2100 }
2101 
2102 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2103 {
2104 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2105 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2106 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2107 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2108 
2109 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2110 }
2111 
2112 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2113 {
2114 	/* Calculate the display mode Average Bandwidth
2115 	 * DisplayMode should contain the source and destination dimensions,
2116 	 * timing, etc.
2117 	 */
2118 	fixed20_12 bpp;
2119 	fixed20_12 line_time;
2120 	fixed20_12 src_width;
2121 	fixed20_12 bandwidth;
2122 	fixed20_12 a;
2123 
2124 	a.full = dfixed_const(1000);
2125 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2126 	line_time.full = dfixed_div(line_time, a);
2127 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2128 	src_width.full = dfixed_const(wm->src_width);
2129 	bandwidth.full = dfixed_mul(src_width, bpp);
2130 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2131 	bandwidth.full = dfixed_div(bandwidth, line_time);
2132 
2133 	return dfixed_trunc(bandwidth);
2134 }
2135 
2136 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2137 {
2138 	/* First calcualte the latency in ns */
2139 	u32 mc_latency = 2000; /* 2000 ns. */
2140 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2141 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2142 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2143 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2144 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2145 		(wm->num_heads * cursor_line_pair_return_time);
2146 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2147 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2148 	fixed20_12 a, b, c;
2149 
2150 	if (wm->num_heads == 0)
2151 		return 0;
2152 
2153 	a.full = dfixed_const(2);
2154 	b.full = dfixed_const(1);
2155 	if ((wm->vsc.full > a.full) ||
2156 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2157 	    (wm->vtaps >= 5) ||
2158 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2159 		max_src_lines_per_dst_line = 4;
2160 	else
2161 		max_src_lines_per_dst_line = 2;
2162 
2163 	a.full = dfixed_const(available_bandwidth);
2164 	b.full = dfixed_const(wm->num_heads);
2165 	a.full = dfixed_div(a, b);
2166 
2167 	lb_fill_bw = min(dfixed_trunc(a), wm->disp_clk * wm->bytes_per_pixel / 1000);
2168 
2169 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2170 	b.full = dfixed_const(1000);
2171 	c.full = dfixed_const(lb_fill_bw);
2172 	b.full = dfixed_div(c, b);
2173 	a.full = dfixed_div(a, b);
2174 	line_fill_time = dfixed_trunc(a);
2175 
2176 	if (line_fill_time < wm->active_time)
2177 		return latency;
2178 	else
2179 		return latency + (line_fill_time - wm->active_time);
2180 
2181 }
2182 
2183 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2184 {
2185 	if (evergreen_average_bandwidth(wm) <=
2186 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2187 		return true;
2188 	else
2189 		return false;
2190 };
2191 
2192 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2193 {
2194 	if (evergreen_average_bandwidth(wm) <=
2195 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2196 		return true;
2197 	else
2198 		return false;
2199 };
2200 
2201 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2202 {
2203 	u32 lb_partitions = wm->lb_size / wm->src_width;
2204 	u32 line_time = wm->active_time + wm->blank_time;
2205 	u32 latency_tolerant_lines;
2206 	u32 latency_hiding;
2207 	fixed20_12 a;
2208 
2209 	a.full = dfixed_const(1);
2210 	if (wm->vsc.full > a.full)
2211 		latency_tolerant_lines = 1;
2212 	else {
2213 		if (lb_partitions <= (wm->vtaps + 1))
2214 			latency_tolerant_lines = 1;
2215 		else
2216 			latency_tolerant_lines = 2;
2217 	}
2218 
2219 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2220 
2221 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2222 		return true;
2223 	else
2224 		return false;
2225 }
2226 
2227 static void evergreen_program_watermarks(struct radeon_device *rdev,
2228 					 struct radeon_crtc *radeon_crtc,
2229 					 u32 lb_size, u32 num_heads)
2230 {
2231 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2232 	struct evergreen_wm_params wm_low, wm_high;
2233 	u32 dram_channels;
2234 	u32 active_time;
2235 	u32 line_time = 0;
2236 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2237 	u32 priority_a_mark = 0, priority_b_mark = 0;
2238 	u32 priority_a_cnt = PRIORITY_OFF;
2239 	u32 priority_b_cnt = PRIORITY_OFF;
2240 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2241 	u32 tmp, arb_control3;
2242 	fixed20_12 a, b, c;
2243 
2244 	if (radeon_crtc->base.enabled && num_heads && mode) {
2245 		active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000,
2246 					    (u32)mode->clock);
2247 		line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000,
2248 					  (u32)mode->clock);
2249 		line_time = min(line_time, (u32)65535);
2250 		priority_a_cnt = 0;
2251 		priority_b_cnt = 0;
2252 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2253 
2254 		/* watermark for high clocks */
2255 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2256 			wm_high.yclk =
2257 				radeon_dpm_get_mclk(rdev, false) * 10;
2258 			wm_high.sclk =
2259 				radeon_dpm_get_sclk(rdev, false) * 10;
2260 		} else {
2261 			wm_high.yclk = rdev->pm.current_mclk * 10;
2262 			wm_high.sclk = rdev->pm.current_sclk * 10;
2263 		}
2264 
2265 		wm_high.disp_clk = mode->clock;
2266 		wm_high.src_width = mode->crtc_hdisplay;
2267 		wm_high.active_time = active_time;
2268 		wm_high.blank_time = line_time - wm_high.active_time;
2269 		wm_high.interlaced = false;
2270 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2271 			wm_high.interlaced = true;
2272 		wm_high.vsc = radeon_crtc->vsc;
2273 		wm_high.vtaps = 1;
2274 		if (radeon_crtc->rmx_type != RMX_OFF)
2275 			wm_high.vtaps = 2;
2276 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2277 		wm_high.lb_size = lb_size;
2278 		wm_high.dram_channels = dram_channels;
2279 		wm_high.num_heads = num_heads;
2280 
2281 		/* watermark for low clocks */
2282 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2283 			wm_low.yclk =
2284 				radeon_dpm_get_mclk(rdev, true) * 10;
2285 			wm_low.sclk =
2286 				radeon_dpm_get_sclk(rdev, true) * 10;
2287 		} else {
2288 			wm_low.yclk = rdev->pm.current_mclk * 10;
2289 			wm_low.sclk = rdev->pm.current_sclk * 10;
2290 		}
2291 
2292 		wm_low.disp_clk = mode->clock;
2293 		wm_low.src_width = mode->crtc_hdisplay;
2294 		wm_low.active_time = active_time;
2295 		wm_low.blank_time = line_time - wm_low.active_time;
2296 		wm_low.interlaced = false;
2297 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2298 			wm_low.interlaced = true;
2299 		wm_low.vsc = radeon_crtc->vsc;
2300 		wm_low.vtaps = 1;
2301 		if (radeon_crtc->rmx_type != RMX_OFF)
2302 			wm_low.vtaps = 2;
2303 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2304 		wm_low.lb_size = lb_size;
2305 		wm_low.dram_channels = dram_channels;
2306 		wm_low.num_heads = num_heads;
2307 
2308 		/* set for high clocks */
2309 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2310 		/* set for low clocks */
2311 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2312 
2313 		/* possibly force display priority to high */
2314 		/* should really do this at mode validation time... */
2315 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2316 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2317 		    !evergreen_check_latency_hiding(&wm_high) ||
2318 		    (rdev->disp_priority == 2)) {
2319 			DRM_DEBUG_KMS("force priority a to high\n");
2320 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2321 		}
2322 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2323 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2324 		    !evergreen_check_latency_hiding(&wm_low) ||
2325 		    (rdev->disp_priority == 2)) {
2326 			DRM_DEBUG_KMS("force priority b to high\n");
2327 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2328 		}
2329 
2330 		a.full = dfixed_const(1000);
2331 		b.full = dfixed_const(mode->clock);
2332 		b.full = dfixed_div(b, a);
2333 		c.full = dfixed_const(latency_watermark_a);
2334 		c.full = dfixed_mul(c, b);
2335 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2336 		c.full = dfixed_div(c, a);
2337 		a.full = dfixed_const(16);
2338 		c.full = dfixed_div(c, a);
2339 		priority_a_mark = dfixed_trunc(c);
2340 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2341 
2342 		a.full = dfixed_const(1000);
2343 		b.full = dfixed_const(mode->clock);
2344 		b.full = dfixed_div(b, a);
2345 		c.full = dfixed_const(latency_watermark_b);
2346 		c.full = dfixed_mul(c, b);
2347 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2348 		c.full = dfixed_div(c, a);
2349 		a.full = dfixed_const(16);
2350 		c.full = dfixed_div(c, a);
2351 		priority_b_mark = dfixed_trunc(c);
2352 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2353 
2354 		/* Save number of lines the linebuffer leads before the scanout */
2355 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2356 	}
2357 
2358 	/* select wm A */
2359 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2360 	tmp = arb_control3;
2361 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2362 	tmp |= LATENCY_WATERMARK_MASK(1);
2363 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2364 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2365 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2366 		LATENCY_HIGH_WATERMARK(line_time)));
2367 	/* select wm B */
2368 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2369 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2370 	tmp |= LATENCY_WATERMARK_MASK(2);
2371 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2372 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2373 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2374 		LATENCY_HIGH_WATERMARK(line_time)));
2375 	/* restore original selection */
2376 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2377 
2378 	/* write the priority marks */
2379 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2380 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2381 
2382 	/* save values for DPM */
2383 	radeon_crtc->line_time = line_time;
2384 	radeon_crtc->wm_high = latency_watermark_a;
2385 	radeon_crtc->wm_low = latency_watermark_b;
2386 }
2387 
2388 /**
2389  * evergreen_bandwidth_update - update display watermarks callback.
2390  *
2391  * @rdev: radeon_device pointer
2392  *
2393  * Update the display watermarks based on the requested mode(s)
2394  * (evergreen+).
2395  */
2396 void evergreen_bandwidth_update(struct radeon_device *rdev)
2397 {
2398 	struct drm_display_mode *mode0 = NULL;
2399 	struct drm_display_mode *mode1 = NULL;
2400 	u32 num_heads = 0, lb_size;
2401 	int i;
2402 
2403 	if (!rdev->mode_info.mode_config_initialized)
2404 		return;
2405 
2406 	radeon_update_display_priority(rdev);
2407 
2408 	for (i = 0; i < rdev->num_crtc; i++) {
2409 		if (rdev->mode_info.crtcs[i]->base.enabled)
2410 			num_heads++;
2411 	}
2412 	for (i = 0; i < rdev->num_crtc; i += 2) {
2413 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2414 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2415 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2416 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2417 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2418 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2419 	}
2420 }
2421 
2422 /**
2423  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2424  *
2425  * @rdev: radeon_device pointer
2426  *
2427  * Wait for the MC (memory controller) to be idle.
2428  * (evergreen+).
2429  * Returns 0 if the MC is idle, -1 if not.
2430  */
2431 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2432 {
2433 	unsigned i;
2434 	u32 tmp;
2435 
2436 	for (i = 0; i < rdev->usec_timeout; i++) {
2437 		/* read MC_STATUS */
2438 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2439 		if (!tmp)
2440 			return 0;
2441 		udelay(1);
2442 	}
2443 	return -1;
2444 }
2445 
2446 /*
2447  * GART
2448  */
2449 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2450 {
2451 	unsigned i;
2452 	u32 tmp;
2453 
2454 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2455 
2456 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2457 	for (i = 0; i < rdev->usec_timeout; i++) {
2458 		/* read MC_STATUS */
2459 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2460 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2461 		if (tmp == 2) {
2462 			pr_warn("[drm] r600 flush TLB failed\n");
2463 			return;
2464 		}
2465 		if (tmp) {
2466 			return;
2467 		}
2468 		udelay(1);
2469 	}
2470 }
2471 
2472 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2473 {
2474 	u32 tmp;
2475 	int r;
2476 
2477 	if (rdev->gart.robj == NULL) {
2478 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2479 		return -EINVAL;
2480 	}
2481 	r = radeon_gart_table_vram_pin(rdev);
2482 	if (r)
2483 		return r;
2484 	/* Setup L2 cache */
2485 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2486 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2487 				EFFECTIVE_L2_QUEUE_SIZE(7));
2488 	WREG32(VM_L2_CNTL2, 0);
2489 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2490 	/* Setup TLB control */
2491 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2492 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2493 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2494 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2495 	if (rdev->flags & RADEON_IS_IGP) {
2496 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2497 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2498 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2499 	} else {
2500 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2501 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2502 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2503 		if ((rdev->family == CHIP_JUNIPER) ||
2504 		    (rdev->family == CHIP_CYPRESS) ||
2505 		    (rdev->family == CHIP_HEMLOCK) ||
2506 		    (rdev->family == CHIP_BARTS))
2507 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2508 	}
2509 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2510 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2511 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2512 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2513 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2514 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2515 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2516 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2517 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2518 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2519 			(u32)(rdev->dummy_page.addr >> 12));
2520 	WREG32(VM_CONTEXT1_CNTL, 0);
2521 
2522 	evergreen_pcie_gart_tlb_flush(rdev);
2523 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2524 		 (unsigned)(rdev->mc.gtt_size >> 20),
2525 		 (unsigned long long)rdev->gart.table_addr);
2526 	rdev->gart.ready = true;
2527 	return 0;
2528 }
2529 
2530 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2531 {
2532 	u32 tmp;
2533 
2534 	/* Disable all tables */
2535 	WREG32(VM_CONTEXT0_CNTL, 0);
2536 	WREG32(VM_CONTEXT1_CNTL, 0);
2537 
2538 	/* Setup L2 cache */
2539 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2540 				EFFECTIVE_L2_QUEUE_SIZE(7));
2541 	WREG32(VM_L2_CNTL2, 0);
2542 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2543 	/* Setup TLB control */
2544 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2545 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2546 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2547 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2548 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2549 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2550 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2551 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2552 	radeon_gart_table_vram_unpin(rdev);
2553 }
2554 
2555 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2556 {
2557 	evergreen_pcie_gart_disable(rdev);
2558 	radeon_gart_table_vram_free(rdev);
2559 	radeon_gart_fini(rdev);
2560 }
2561 
2562 
2563 static void evergreen_agp_enable(struct radeon_device *rdev)
2564 {
2565 	u32 tmp;
2566 
2567 	/* Setup L2 cache */
2568 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2569 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2570 				EFFECTIVE_L2_QUEUE_SIZE(7));
2571 	WREG32(VM_L2_CNTL2, 0);
2572 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2573 	/* Setup TLB control */
2574 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2575 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2576 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2577 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2578 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2579 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2580 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2581 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2582 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2583 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2584 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2585 	WREG32(VM_CONTEXT0_CNTL, 0);
2586 	WREG32(VM_CONTEXT1_CNTL, 0);
2587 }
2588 
2589 static const unsigned ni_dig_offsets[] =
2590 {
2591 	NI_DIG0_REGISTER_OFFSET,
2592 	NI_DIG1_REGISTER_OFFSET,
2593 	NI_DIG2_REGISTER_OFFSET,
2594 	NI_DIG3_REGISTER_OFFSET,
2595 	NI_DIG4_REGISTER_OFFSET,
2596 	NI_DIG5_REGISTER_OFFSET
2597 };
2598 
2599 static const unsigned ni_tx_offsets[] =
2600 {
2601 	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2602 	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2603 	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2604 	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2605 	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2606 	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2607 };
2608 
2609 static const unsigned evergreen_dp_offsets[] =
2610 {
2611 	EVERGREEN_DP0_REGISTER_OFFSET,
2612 	EVERGREEN_DP1_REGISTER_OFFSET,
2613 	EVERGREEN_DP2_REGISTER_OFFSET,
2614 	EVERGREEN_DP3_REGISTER_OFFSET,
2615 	EVERGREEN_DP4_REGISTER_OFFSET,
2616 	EVERGREEN_DP5_REGISTER_OFFSET
2617 };
2618 
2619 
2620 /*
2621  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2622  * We go from crtc to connector and it is not relible  since it
2623  * should be an opposite direction .If crtc is enable then
2624  * find the dig_fe which selects this crtc and insure that it enable.
2625  * if such dig_fe is found then find dig_be which selects found dig_be and
2626  * insure that it enable and in DP_SST mode.
2627  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2628  * from dp symbols clocks .
2629  */
2630 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2631 					       unsigned crtc_id, unsigned *ret_dig_fe)
2632 {
2633 	unsigned i;
2634 	unsigned dig_fe;
2635 	unsigned dig_be;
2636 	unsigned dig_en_be;
2637 	unsigned uniphy_pll;
2638 	unsigned digs_fe_selected;
2639 	unsigned dig_be_mode;
2640 	unsigned dig_fe_mask;
2641 	bool is_enabled = false;
2642 	bool found_crtc = false;
2643 
2644 	/* loop through all running dig_fe to find selected crtc */
2645 	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2646 		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2647 		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2648 		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2649 			/* found running pipe */
2650 			found_crtc = true;
2651 			dig_fe_mask = 1 << i;
2652 			dig_fe = i;
2653 			break;
2654 		}
2655 	}
2656 
2657 	if (found_crtc) {
2658 		/* loop through all running dig_be to find selected dig_fe */
2659 		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2660 			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2661 			/* if dig_fe_selected by dig_be? */
2662 			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2663 			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2664 			if (dig_fe_mask &  digs_fe_selected &&
2665 			    /* if dig_be in sst mode? */
2666 			    dig_be_mode == NI_DIG_BE_DPSST) {
2667 				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2668 						   ni_dig_offsets[i]);
2669 				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2670 						    ni_tx_offsets[i]);
2671 				/* dig_be enable and tx is running */
2672 				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2673 				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2674 				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2675 					is_enabled = true;
2676 					*ret_dig_fe = dig_fe;
2677 					break;
2678 				}
2679 			}
2680 		}
2681 	}
2682 
2683 	return is_enabled;
2684 }
2685 
2686 /*
2687  * Blank dig when in dp sst mode
2688  * Dig ignores crtc timing
2689  */
2690 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2691 				      unsigned dig_fe)
2692 {
2693 	unsigned stream_ctrl;
2694 	unsigned fifo_ctrl;
2695 	unsigned counter = 0;
2696 
2697 	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2698 		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2699 		return;
2700 	}
2701 
2702 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2703 			     evergreen_dp_offsets[dig_fe]);
2704 	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2705 		DRM_ERROR("dig %d , should be enable\n", dig_fe);
2706 		return;
2707 	}
2708 
2709 	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2710 	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2711 	       evergreen_dp_offsets[dig_fe], stream_ctrl);
2712 
2713 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2714 			     evergreen_dp_offsets[dig_fe]);
2715 	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2716 		msleep(1);
2717 		counter++;
2718 		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2719 				     evergreen_dp_offsets[dig_fe]);
2720 	}
2721 	if (counter >= 32 )
2722 		DRM_ERROR("counter exceeds %d\n", counter);
2723 
2724 	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2725 	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2726 	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2727 
2728 }
2729 
2730 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2731 {
2732 	u32 crtc_enabled, tmp, frame_count, blackout;
2733 	int i, j;
2734 	unsigned dig_fe;
2735 
2736 	if (!ASIC_IS_NODCE(rdev)) {
2737 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2738 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2739 
2740 		/* disable VGA render */
2741 		WREG32(VGA_RENDER_CONTROL, 0);
2742 	}
2743 	/* blank the display controllers */
2744 	for (i = 0; i < rdev->num_crtc; i++) {
2745 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2746 		if (crtc_enabled) {
2747 			save->crtc_enabled[i] = true;
2748 			if (ASIC_IS_DCE6(rdev)) {
2749 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2750 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2751 					radeon_wait_for_vblank(rdev, i);
2752 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2753 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2754 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2755 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2756 				}
2757 			} else {
2758 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2759 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2760 					radeon_wait_for_vblank(rdev, i);
2761 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2762 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2763 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2764 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2765 				}
2766 			}
2767 			/* wait for the next frame */
2768 			frame_count = radeon_get_vblank_counter(rdev, i);
2769 			for (j = 0; j < rdev->usec_timeout; j++) {
2770 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2771 					break;
2772 				udelay(1);
2773 			}
2774 			/*we should disable dig if it drives dp sst*/
2775 			/*but we are in radeon_device_init and the topology is unknown*/
2776 			/*and it is available after radeon_modeset_init*/
2777 			/*the following method radeon_atom_encoder_dpms_dig*/
2778 			/*does the job if we initialize it properly*/
2779 			/*for now we do it this manually*/
2780 			/**/
2781 			if (ASIC_IS_DCE5(rdev) &&
2782 			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2783 				evergreen_blank_dp_output(rdev, dig_fe);
2784 			/*we could remove 6 lines below*/
2785 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2786 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2787 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2788 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2789 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2790 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2791 			save->crtc_enabled[i] = false;
2792 			/* ***** */
2793 		} else {
2794 			save->crtc_enabled[i] = false;
2795 		}
2796 	}
2797 
2798 	radeon_mc_wait_for_idle(rdev);
2799 
2800 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2801 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2802 		/* Block CPU access */
2803 		WREG32(BIF_FB_EN, 0);
2804 		/* blackout the MC */
2805 		blackout &= ~BLACKOUT_MODE_MASK;
2806 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2807 	}
2808 	/* wait for the MC to settle */
2809 	udelay(100);
2810 
2811 	/* lock double buffered regs */
2812 	for (i = 0; i < rdev->num_crtc; i++) {
2813 		if (save->crtc_enabled[i]) {
2814 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2815 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2816 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2817 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2818 			}
2819 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2820 			if (!(tmp & 1)) {
2821 				tmp |= 1;
2822 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2823 			}
2824 		}
2825 	}
2826 }
2827 
2828 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2829 {
2830 	u32 tmp, frame_count;
2831 	int i, j;
2832 
2833 	/* update crtc base addresses */
2834 	for (i = 0; i < rdev->num_crtc; i++) {
2835 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2836 		       upper_32_bits(rdev->mc.vram_start));
2837 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2838 		       upper_32_bits(rdev->mc.vram_start));
2839 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2840 		       (u32)rdev->mc.vram_start);
2841 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2842 		       (u32)rdev->mc.vram_start);
2843 	}
2844 
2845 	if (!ASIC_IS_NODCE(rdev)) {
2846 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2847 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2848 	}
2849 
2850 	/* unlock regs and wait for update */
2851 	for (i = 0; i < rdev->num_crtc; i++) {
2852 		if (save->crtc_enabled[i]) {
2853 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2854 			if ((tmp & 0x7) != 0) {
2855 				tmp &= ~0x7;
2856 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2857 			}
2858 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2859 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2860 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2861 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2862 			}
2863 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2864 			if (tmp & 1) {
2865 				tmp &= ~1;
2866 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2867 			}
2868 			for (j = 0; j < rdev->usec_timeout; j++) {
2869 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2870 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2871 					break;
2872 				udelay(1);
2873 			}
2874 		}
2875 	}
2876 
2877 	/* unblackout the MC */
2878 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2879 	tmp &= ~BLACKOUT_MODE_MASK;
2880 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2881 	/* allow CPU access */
2882 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2883 
2884 	for (i = 0; i < rdev->num_crtc; i++) {
2885 		if (save->crtc_enabled[i]) {
2886 			if (ASIC_IS_DCE6(rdev)) {
2887 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2888 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2889 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2890 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2891 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2892 			} else {
2893 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2894 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2895 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2896 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2897 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2898 			}
2899 			/* wait for the next frame */
2900 			frame_count = radeon_get_vblank_counter(rdev, i);
2901 			for (j = 0; j < rdev->usec_timeout; j++) {
2902 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2903 					break;
2904 				udelay(1);
2905 			}
2906 		}
2907 	}
2908 	if (!ASIC_IS_NODCE(rdev)) {
2909 		/* Unlock vga access */
2910 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2911 		mdelay(1);
2912 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2913 	}
2914 }
2915 
2916 void evergreen_mc_program(struct radeon_device *rdev)
2917 {
2918 	struct evergreen_mc_save save;
2919 	u32 tmp;
2920 	int i, j;
2921 
2922 	/* Initialize HDP */
2923 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2924 		WREG32((0x2c14 + j), 0x00000000);
2925 		WREG32((0x2c18 + j), 0x00000000);
2926 		WREG32((0x2c1c + j), 0x00000000);
2927 		WREG32((0x2c20 + j), 0x00000000);
2928 		WREG32((0x2c24 + j), 0x00000000);
2929 	}
2930 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2931 
2932 	evergreen_mc_stop(rdev, &save);
2933 	if (evergreen_mc_wait_for_idle(rdev)) {
2934 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2935 	}
2936 	/* Lockout access through VGA aperture*/
2937 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2938 	/* Update configuration */
2939 	if (rdev->flags & RADEON_IS_AGP) {
2940 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2941 			/* VRAM before AGP */
2942 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2943 				rdev->mc.vram_start >> 12);
2944 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2945 				rdev->mc.gtt_end >> 12);
2946 		} else {
2947 			/* VRAM after AGP */
2948 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2949 				rdev->mc.gtt_start >> 12);
2950 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2951 				rdev->mc.vram_end >> 12);
2952 		}
2953 	} else {
2954 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2955 			rdev->mc.vram_start >> 12);
2956 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2957 			rdev->mc.vram_end >> 12);
2958 	}
2959 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2960 	/* llano/ontario only */
2961 	if ((rdev->family == CHIP_PALM) ||
2962 	    (rdev->family == CHIP_SUMO) ||
2963 	    (rdev->family == CHIP_SUMO2)) {
2964 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2965 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2966 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2967 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2968 	}
2969 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2970 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2971 	WREG32(MC_VM_FB_LOCATION, tmp);
2972 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2973 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2974 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2975 	if (rdev->flags & RADEON_IS_AGP) {
2976 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2977 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2978 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2979 	} else {
2980 		WREG32(MC_VM_AGP_BASE, 0);
2981 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2982 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2983 	}
2984 	if (evergreen_mc_wait_for_idle(rdev)) {
2985 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2986 	}
2987 	evergreen_mc_resume(rdev, &save);
2988 	/* we need to own VRAM, so turn off the VGA renderer here
2989 	 * to stop it overwriting our objects */
2990 	rv515_vga_render_disable(rdev);
2991 }
2992 
2993 /*
2994  * CP.
2995  */
2996 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2997 {
2998 	struct radeon_ring *ring = &rdev->ring[ib->ring];
2999 	u32 next_rptr;
3000 
3001 	/* set to DX10/11 mode */
3002 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3003 	radeon_ring_write(ring, 1);
3004 
3005 	if (ring->rptr_save_reg) {
3006 		next_rptr = ring->wptr + 3 + 4;
3007 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3008 		radeon_ring_write(ring, ((ring->rptr_save_reg -
3009 					  PACKET3_SET_CONFIG_REG_START) >> 2));
3010 		radeon_ring_write(ring, next_rptr);
3011 	} else if (rdev->wb.enabled) {
3012 		next_rptr = ring->wptr + 5 + 4;
3013 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3014 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3015 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3016 		radeon_ring_write(ring, next_rptr);
3017 		radeon_ring_write(ring, 0);
3018 	}
3019 
3020 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3021 	radeon_ring_write(ring,
3022 #ifdef __BIG_ENDIAN
3023 			  (2 << 0) |
3024 #endif
3025 			  (ib->gpu_addr & 0xFFFFFFFC));
3026 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3027 	radeon_ring_write(ring, ib->length_dw);
3028 }
3029 
3030 
3031 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3032 {
3033 	const __be32 *fw_data;
3034 	int i;
3035 
3036 	if (!rdev->me_fw || !rdev->pfp_fw)
3037 		return -EINVAL;
3038 
3039 	r700_cp_stop(rdev);
3040 	WREG32(CP_RB_CNTL,
3041 #ifdef __BIG_ENDIAN
3042 	       BUF_SWAP_32BIT |
3043 #endif
3044 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3045 
3046 	fw_data = (const __be32 *)rdev->pfp_fw->data;
3047 	WREG32(CP_PFP_UCODE_ADDR, 0);
3048 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3049 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3050 	WREG32(CP_PFP_UCODE_ADDR, 0);
3051 
3052 	fw_data = (const __be32 *)rdev->me_fw->data;
3053 	WREG32(CP_ME_RAM_WADDR, 0);
3054 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3055 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3056 
3057 	WREG32(CP_PFP_UCODE_ADDR, 0);
3058 	WREG32(CP_ME_RAM_WADDR, 0);
3059 	WREG32(CP_ME_RAM_RADDR, 0);
3060 	return 0;
3061 }
3062 
3063 static int evergreen_cp_start(struct radeon_device *rdev)
3064 {
3065 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3066 	int r, i;
3067 	uint32_t cp_me;
3068 
3069 	r = radeon_ring_lock(rdev, ring, 7);
3070 	if (r) {
3071 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3072 		return r;
3073 	}
3074 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3075 	radeon_ring_write(ring, 0x1);
3076 	radeon_ring_write(ring, 0x0);
3077 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3078 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3079 	radeon_ring_write(ring, 0);
3080 	radeon_ring_write(ring, 0);
3081 	radeon_ring_unlock_commit(rdev, ring, false);
3082 
3083 	cp_me = 0xff;
3084 	WREG32(CP_ME_CNTL, cp_me);
3085 
3086 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3087 	if (r) {
3088 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3089 		return r;
3090 	}
3091 
3092 	/* setup clear context state */
3093 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3094 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3095 
3096 	for (i = 0; i < evergreen_default_size; i++)
3097 		radeon_ring_write(ring, evergreen_default_state[i]);
3098 
3099 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3100 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3101 
3102 	/* set clear context state */
3103 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3104 	radeon_ring_write(ring, 0);
3105 
3106 	/* SQ_VTX_BASE_VTX_LOC */
3107 	radeon_ring_write(ring, 0xc0026f00);
3108 	radeon_ring_write(ring, 0x00000000);
3109 	radeon_ring_write(ring, 0x00000000);
3110 	radeon_ring_write(ring, 0x00000000);
3111 
3112 	/* Clear consts */
3113 	radeon_ring_write(ring, 0xc0036f00);
3114 	radeon_ring_write(ring, 0x00000bc4);
3115 	radeon_ring_write(ring, 0xffffffff);
3116 	radeon_ring_write(ring, 0xffffffff);
3117 	radeon_ring_write(ring, 0xffffffff);
3118 
3119 	radeon_ring_write(ring, 0xc0026900);
3120 	radeon_ring_write(ring, 0x00000316);
3121 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3122 	radeon_ring_write(ring, 0x00000010); /*  */
3123 
3124 	radeon_ring_unlock_commit(rdev, ring, false);
3125 
3126 	return 0;
3127 }
3128 
3129 static int evergreen_cp_resume(struct radeon_device *rdev)
3130 {
3131 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3132 	u32 tmp;
3133 	u32 rb_bufsz;
3134 	int r;
3135 
3136 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3137 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3138 				 SOFT_RESET_PA |
3139 				 SOFT_RESET_SH |
3140 				 SOFT_RESET_VGT |
3141 				 SOFT_RESET_SPI |
3142 				 SOFT_RESET_SX));
3143 	RREG32(GRBM_SOFT_RESET);
3144 	mdelay(15);
3145 	WREG32(GRBM_SOFT_RESET, 0);
3146 	RREG32(GRBM_SOFT_RESET);
3147 
3148 	/* Set ring buffer size */
3149 	rb_bufsz = order_base_2(ring->ring_size / 8);
3150 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3151 #ifdef __BIG_ENDIAN
3152 	tmp |= BUF_SWAP_32BIT;
3153 #endif
3154 	WREG32(CP_RB_CNTL, tmp);
3155 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3156 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3157 
3158 	/* Set the write pointer delay */
3159 	WREG32(CP_RB_WPTR_DELAY, 0);
3160 
3161 	/* Initialize the ring buffer's read and write pointers */
3162 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3163 	WREG32(CP_RB_RPTR_WR, 0);
3164 	ring->wptr = 0;
3165 	WREG32(CP_RB_WPTR, ring->wptr);
3166 
3167 	/* set the wb address whether it's enabled or not */
3168 	WREG32(CP_RB_RPTR_ADDR,
3169 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3170 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3171 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3172 
3173 	if (rdev->wb.enabled)
3174 		WREG32(SCRATCH_UMSK, 0xff);
3175 	else {
3176 		tmp |= RB_NO_UPDATE;
3177 		WREG32(SCRATCH_UMSK, 0);
3178 	}
3179 
3180 	mdelay(1);
3181 	WREG32(CP_RB_CNTL, tmp);
3182 
3183 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3184 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3185 
3186 	evergreen_cp_start(rdev);
3187 	ring->ready = true;
3188 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3189 	if (r) {
3190 		ring->ready = false;
3191 		return r;
3192 	}
3193 	return 0;
3194 }
3195 
3196 /*
3197  * Core functions
3198  */
3199 static void evergreen_gpu_init(struct radeon_device *rdev)
3200 {
3201 	u32 gb_addr_config;
3202 	u32 mc_shared_chmap, mc_arb_ramcfg;
3203 	u32 sx_debug_1;
3204 	u32 smx_dc_ctl0;
3205 	u32 sq_config;
3206 	u32 sq_lds_resource_mgmt;
3207 	u32 sq_gpr_resource_mgmt_1;
3208 	u32 sq_gpr_resource_mgmt_2;
3209 	u32 sq_gpr_resource_mgmt_3;
3210 	u32 sq_thread_resource_mgmt;
3211 	u32 sq_thread_resource_mgmt_2;
3212 	u32 sq_stack_resource_mgmt_1;
3213 	u32 sq_stack_resource_mgmt_2;
3214 	u32 sq_stack_resource_mgmt_3;
3215 	u32 vgt_cache_invalidation;
3216 	u32 hdp_host_path_cntl, tmp;
3217 	u32 disabled_rb_mask;
3218 	int i, j, ps_thread_count;
3219 
3220 	switch (rdev->family) {
3221 	case CHIP_CYPRESS:
3222 	case CHIP_HEMLOCK:
3223 		rdev->config.evergreen.num_ses = 2;
3224 		rdev->config.evergreen.max_pipes = 4;
3225 		rdev->config.evergreen.max_tile_pipes = 8;
3226 		rdev->config.evergreen.max_simds = 10;
3227 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3228 		rdev->config.evergreen.max_gprs = 256;
3229 		rdev->config.evergreen.max_threads = 248;
3230 		rdev->config.evergreen.max_gs_threads = 32;
3231 		rdev->config.evergreen.max_stack_entries = 512;
3232 		rdev->config.evergreen.sx_num_of_sets = 4;
3233 		rdev->config.evergreen.sx_max_export_size = 256;
3234 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3235 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3236 		rdev->config.evergreen.max_hw_contexts = 8;
3237 		rdev->config.evergreen.sq_num_cf_insts = 2;
3238 
3239 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3240 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3241 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3242 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3243 		break;
3244 	case CHIP_JUNIPER:
3245 		rdev->config.evergreen.num_ses = 1;
3246 		rdev->config.evergreen.max_pipes = 4;
3247 		rdev->config.evergreen.max_tile_pipes = 4;
3248 		rdev->config.evergreen.max_simds = 10;
3249 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3250 		rdev->config.evergreen.max_gprs = 256;
3251 		rdev->config.evergreen.max_threads = 248;
3252 		rdev->config.evergreen.max_gs_threads = 32;
3253 		rdev->config.evergreen.max_stack_entries = 512;
3254 		rdev->config.evergreen.sx_num_of_sets = 4;
3255 		rdev->config.evergreen.sx_max_export_size = 256;
3256 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3257 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3258 		rdev->config.evergreen.max_hw_contexts = 8;
3259 		rdev->config.evergreen.sq_num_cf_insts = 2;
3260 
3261 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3262 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3263 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3264 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3265 		break;
3266 	case CHIP_REDWOOD:
3267 		rdev->config.evergreen.num_ses = 1;
3268 		rdev->config.evergreen.max_pipes = 4;
3269 		rdev->config.evergreen.max_tile_pipes = 4;
3270 		rdev->config.evergreen.max_simds = 5;
3271 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3272 		rdev->config.evergreen.max_gprs = 256;
3273 		rdev->config.evergreen.max_threads = 248;
3274 		rdev->config.evergreen.max_gs_threads = 32;
3275 		rdev->config.evergreen.max_stack_entries = 256;
3276 		rdev->config.evergreen.sx_num_of_sets = 4;
3277 		rdev->config.evergreen.sx_max_export_size = 256;
3278 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3279 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3280 		rdev->config.evergreen.max_hw_contexts = 8;
3281 		rdev->config.evergreen.sq_num_cf_insts = 2;
3282 
3283 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3284 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3285 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3286 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3287 		break;
3288 	case CHIP_CEDAR:
3289 	default:
3290 		rdev->config.evergreen.num_ses = 1;
3291 		rdev->config.evergreen.max_pipes = 2;
3292 		rdev->config.evergreen.max_tile_pipes = 2;
3293 		rdev->config.evergreen.max_simds = 2;
3294 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3295 		rdev->config.evergreen.max_gprs = 256;
3296 		rdev->config.evergreen.max_threads = 192;
3297 		rdev->config.evergreen.max_gs_threads = 16;
3298 		rdev->config.evergreen.max_stack_entries = 256;
3299 		rdev->config.evergreen.sx_num_of_sets = 4;
3300 		rdev->config.evergreen.sx_max_export_size = 128;
3301 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3302 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3303 		rdev->config.evergreen.max_hw_contexts = 4;
3304 		rdev->config.evergreen.sq_num_cf_insts = 1;
3305 
3306 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3307 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3308 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3309 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3310 		break;
3311 	case CHIP_PALM:
3312 		rdev->config.evergreen.num_ses = 1;
3313 		rdev->config.evergreen.max_pipes = 2;
3314 		rdev->config.evergreen.max_tile_pipes = 2;
3315 		rdev->config.evergreen.max_simds = 2;
3316 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3317 		rdev->config.evergreen.max_gprs = 256;
3318 		rdev->config.evergreen.max_threads = 192;
3319 		rdev->config.evergreen.max_gs_threads = 16;
3320 		rdev->config.evergreen.max_stack_entries = 256;
3321 		rdev->config.evergreen.sx_num_of_sets = 4;
3322 		rdev->config.evergreen.sx_max_export_size = 128;
3323 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3324 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3325 		rdev->config.evergreen.max_hw_contexts = 4;
3326 		rdev->config.evergreen.sq_num_cf_insts = 1;
3327 
3328 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3329 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3330 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3331 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3332 		break;
3333 	case CHIP_SUMO:
3334 		rdev->config.evergreen.num_ses = 1;
3335 		rdev->config.evergreen.max_pipes = 4;
3336 		rdev->config.evergreen.max_tile_pipes = 4;
3337 		if (rdev->pdev->device == 0x9648)
3338 			rdev->config.evergreen.max_simds = 3;
3339 		else if ((rdev->pdev->device == 0x9647) ||
3340 			 (rdev->pdev->device == 0x964a))
3341 			rdev->config.evergreen.max_simds = 4;
3342 		else
3343 			rdev->config.evergreen.max_simds = 5;
3344 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3345 		rdev->config.evergreen.max_gprs = 256;
3346 		rdev->config.evergreen.max_threads = 248;
3347 		rdev->config.evergreen.max_gs_threads = 32;
3348 		rdev->config.evergreen.max_stack_entries = 256;
3349 		rdev->config.evergreen.sx_num_of_sets = 4;
3350 		rdev->config.evergreen.sx_max_export_size = 256;
3351 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3352 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3353 		rdev->config.evergreen.max_hw_contexts = 8;
3354 		rdev->config.evergreen.sq_num_cf_insts = 2;
3355 
3356 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3357 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3358 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3359 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3360 		break;
3361 	case CHIP_SUMO2:
3362 		rdev->config.evergreen.num_ses = 1;
3363 		rdev->config.evergreen.max_pipes = 4;
3364 		rdev->config.evergreen.max_tile_pipes = 4;
3365 		rdev->config.evergreen.max_simds = 2;
3366 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3367 		rdev->config.evergreen.max_gprs = 256;
3368 		rdev->config.evergreen.max_threads = 248;
3369 		rdev->config.evergreen.max_gs_threads = 32;
3370 		rdev->config.evergreen.max_stack_entries = 512;
3371 		rdev->config.evergreen.sx_num_of_sets = 4;
3372 		rdev->config.evergreen.sx_max_export_size = 256;
3373 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3374 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3375 		rdev->config.evergreen.max_hw_contexts = 4;
3376 		rdev->config.evergreen.sq_num_cf_insts = 2;
3377 
3378 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3379 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3380 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3381 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3382 		break;
3383 	case CHIP_BARTS:
3384 		rdev->config.evergreen.num_ses = 2;
3385 		rdev->config.evergreen.max_pipes = 4;
3386 		rdev->config.evergreen.max_tile_pipes = 8;
3387 		rdev->config.evergreen.max_simds = 7;
3388 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3389 		rdev->config.evergreen.max_gprs = 256;
3390 		rdev->config.evergreen.max_threads = 248;
3391 		rdev->config.evergreen.max_gs_threads = 32;
3392 		rdev->config.evergreen.max_stack_entries = 512;
3393 		rdev->config.evergreen.sx_num_of_sets = 4;
3394 		rdev->config.evergreen.sx_max_export_size = 256;
3395 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3396 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3397 		rdev->config.evergreen.max_hw_contexts = 8;
3398 		rdev->config.evergreen.sq_num_cf_insts = 2;
3399 
3400 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3401 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3402 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3403 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3404 		break;
3405 	case CHIP_TURKS:
3406 		rdev->config.evergreen.num_ses = 1;
3407 		rdev->config.evergreen.max_pipes = 4;
3408 		rdev->config.evergreen.max_tile_pipes = 4;
3409 		rdev->config.evergreen.max_simds = 6;
3410 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3411 		rdev->config.evergreen.max_gprs = 256;
3412 		rdev->config.evergreen.max_threads = 248;
3413 		rdev->config.evergreen.max_gs_threads = 32;
3414 		rdev->config.evergreen.max_stack_entries = 256;
3415 		rdev->config.evergreen.sx_num_of_sets = 4;
3416 		rdev->config.evergreen.sx_max_export_size = 256;
3417 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3418 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3419 		rdev->config.evergreen.max_hw_contexts = 8;
3420 		rdev->config.evergreen.sq_num_cf_insts = 2;
3421 
3422 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3423 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3424 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3425 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3426 		break;
3427 	case CHIP_CAICOS:
3428 		rdev->config.evergreen.num_ses = 1;
3429 		rdev->config.evergreen.max_pipes = 2;
3430 		rdev->config.evergreen.max_tile_pipes = 2;
3431 		rdev->config.evergreen.max_simds = 2;
3432 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3433 		rdev->config.evergreen.max_gprs = 256;
3434 		rdev->config.evergreen.max_threads = 192;
3435 		rdev->config.evergreen.max_gs_threads = 16;
3436 		rdev->config.evergreen.max_stack_entries = 256;
3437 		rdev->config.evergreen.sx_num_of_sets = 4;
3438 		rdev->config.evergreen.sx_max_export_size = 128;
3439 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3440 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3441 		rdev->config.evergreen.max_hw_contexts = 4;
3442 		rdev->config.evergreen.sq_num_cf_insts = 1;
3443 
3444 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3445 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3446 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3447 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3448 		break;
3449 	}
3450 
3451 	/* Initialize HDP */
3452 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3453 		WREG32((0x2c14 + j), 0x00000000);
3454 		WREG32((0x2c18 + j), 0x00000000);
3455 		WREG32((0x2c1c + j), 0x00000000);
3456 		WREG32((0x2c20 + j), 0x00000000);
3457 		WREG32((0x2c24 + j), 0x00000000);
3458 	}
3459 
3460 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3461 	WREG32(SRBM_INT_CNTL, 0x1);
3462 	WREG32(SRBM_INT_ACK, 0x1);
3463 
3464 	evergreen_fix_pci_max_read_req_size(rdev);
3465 
3466 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3467 	if ((rdev->family == CHIP_PALM) ||
3468 	    (rdev->family == CHIP_SUMO) ||
3469 	    (rdev->family == CHIP_SUMO2))
3470 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3471 	else
3472 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3473 
3474 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3475 	 * not have bank info, so create a custom tiling dword.
3476 	 * bits 3:0   num_pipes
3477 	 * bits 7:4   num_banks
3478 	 * bits 11:8  group_size
3479 	 * bits 15:12 row_size
3480 	 */
3481 	rdev->config.evergreen.tile_config = 0;
3482 	switch (rdev->config.evergreen.max_tile_pipes) {
3483 	case 1:
3484 	default:
3485 		rdev->config.evergreen.tile_config |= (0 << 0);
3486 		break;
3487 	case 2:
3488 		rdev->config.evergreen.tile_config |= (1 << 0);
3489 		break;
3490 	case 4:
3491 		rdev->config.evergreen.tile_config |= (2 << 0);
3492 		break;
3493 	case 8:
3494 		rdev->config.evergreen.tile_config |= (3 << 0);
3495 		break;
3496 	}
3497 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3498 	if (rdev->flags & RADEON_IS_IGP)
3499 		rdev->config.evergreen.tile_config |= 1 << 4;
3500 	else {
3501 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3502 		case 0: /* four banks */
3503 			rdev->config.evergreen.tile_config |= 0 << 4;
3504 			break;
3505 		case 1: /* eight banks */
3506 			rdev->config.evergreen.tile_config |= 1 << 4;
3507 			break;
3508 		case 2: /* sixteen banks */
3509 		default:
3510 			rdev->config.evergreen.tile_config |= 2 << 4;
3511 			break;
3512 		}
3513 	}
3514 	rdev->config.evergreen.tile_config |= 0 << 8;
3515 	rdev->config.evergreen.tile_config |=
3516 		((gb_addr_config & 0x30000000) >> 28) << 12;
3517 
3518 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3519 		u32 efuse_straps_4;
3520 		u32 efuse_straps_3;
3521 
3522 		efuse_straps_4 = RREG32_RCU(0x204);
3523 		efuse_straps_3 = RREG32_RCU(0x203);
3524 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3525 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3526 	} else {
3527 		tmp = 0;
3528 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3529 			u32 rb_disable_bitmap;
3530 
3531 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3532 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3533 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3534 			tmp <<= 4;
3535 			tmp |= rb_disable_bitmap;
3536 		}
3537 	}
3538 	/* enabled rb are just the one not disabled :) */
3539 	disabled_rb_mask = tmp;
3540 	tmp = 0;
3541 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3542 		tmp |= (1 << i);
3543 	/* if all the backends are disabled, fix it up here */
3544 	if ((disabled_rb_mask & tmp) == tmp) {
3545 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3546 			disabled_rb_mask &= ~(1 << i);
3547 	}
3548 
3549 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3550 		u32 simd_disable_bitmap;
3551 
3552 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3553 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3554 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3555 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3556 		tmp <<= 16;
3557 		tmp |= simd_disable_bitmap;
3558 	}
3559 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3560 
3561 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3562 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3563 
3564 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3565 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3566 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3567 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3568 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3569 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3570 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3571 
3572 	if ((rdev->config.evergreen.max_backends == 1) &&
3573 	    (rdev->flags & RADEON_IS_IGP)) {
3574 		if ((disabled_rb_mask & 3) == 1) {
3575 			/* RB0 disabled, RB1 enabled */
3576 			tmp = 0x11111111;
3577 		} else {
3578 			/* RB1 disabled, RB0 enabled */
3579 			tmp = 0x00000000;
3580 		}
3581 	} else {
3582 		tmp = gb_addr_config & NUM_PIPES_MASK;
3583 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3584 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3585 	}
3586 	WREG32(GB_BACKEND_MAP, tmp);
3587 
3588 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3589 	WREG32(CGTS_TCC_DISABLE, 0);
3590 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3591 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3592 
3593 	/* set HW defaults for 3D engine */
3594 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3595 				     ROQ_IB2_START(0x2b)));
3596 
3597 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3598 
3599 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3600 			     SYNC_GRADIENT |
3601 			     SYNC_WALKER |
3602 			     SYNC_ALIGNER));
3603 
3604 	sx_debug_1 = RREG32(SX_DEBUG_1);
3605 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3606 	WREG32(SX_DEBUG_1, sx_debug_1);
3607 
3608 
3609 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3610 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3611 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3612 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3613 
3614 	if (rdev->family <= CHIP_SUMO2)
3615 		WREG32(SMX_SAR_CTL0, 0x00010000);
3616 
3617 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3618 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3619 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3620 
3621 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3622 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3623 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3624 
3625 	WREG32(VGT_NUM_INSTANCES, 1);
3626 	WREG32(SPI_CONFIG_CNTL, 0);
3627 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3628 	WREG32(CP_PERFMON_CNTL, 0);
3629 
3630 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3631 				  FETCH_FIFO_HIWATER(0x4) |
3632 				  DONE_FIFO_HIWATER(0xe0) |
3633 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3634 
3635 	sq_config = RREG32(SQ_CONFIG);
3636 	sq_config &= ~(PS_PRIO(3) |
3637 		       VS_PRIO(3) |
3638 		       GS_PRIO(3) |
3639 		       ES_PRIO(3));
3640 	sq_config |= (VC_ENABLE |
3641 		      EXPORT_SRC_C |
3642 		      PS_PRIO(0) |
3643 		      VS_PRIO(1) |
3644 		      GS_PRIO(2) |
3645 		      ES_PRIO(3));
3646 
3647 	switch (rdev->family) {
3648 	case CHIP_CEDAR:
3649 	case CHIP_PALM:
3650 	case CHIP_SUMO:
3651 	case CHIP_SUMO2:
3652 	case CHIP_CAICOS:
3653 		/* no vertex cache */
3654 		sq_config &= ~VC_ENABLE;
3655 		break;
3656 	default:
3657 		break;
3658 	}
3659 
3660 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3661 
3662 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3663 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3664 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3665 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3666 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3667 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3668 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3669 
3670 	switch (rdev->family) {
3671 	case CHIP_CEDAR:
3672 	case CHIP_PALM:
3673 	case CHIP_SUMO:
3674 	case CHIP_SUMO2:
3675 		ps_thread_count = 96;
3676 		break;
3677 	default:
3678 		ps_thread_count = 128;
3679 		break;
3680 	}
3681 
3682 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3683 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3684 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3685 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3686 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3687 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3688 
3689 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3690 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3691 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3692 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3693 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3694 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3695 
3696 	WREG32(SQ_CONFIG, sq_config);
3697 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3698 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3699 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3700 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3701 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3702 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3703 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3704 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3705 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3706 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3707 
3708 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3709 					  FORCE_EOV_MAX_REZ_CNT(255)));
3710 
3711 	switch (rdev->family) {
3712 	case CHIP_CEDAR:
3713 	case CHIP_PALM:
3714 	case CHIP_SUMO:
3715 	case CHIP_SUMO2:
3716 	case CHIP_CAICOS:
3717 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3718 		break;
3719 	default:
3720 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3721 		break;
3722 	}
3723 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3724 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3725 
3726 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3727 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3728 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3729 
3730 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3731 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3732 
3733 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3734 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3735 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3736 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3737 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3738 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3739 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3740 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3741 
3742 	/* clear render buffer base addresses */
3743 	WREG32(CB_COLOR0_BASE, 0);
3744 	WREG32(CB_COLOR1_BASE, 0);
3745 	WREG32(CB_COLOR2_BASE, 0);
3746 	WREG32(CB_COLOR3_BASE, 0);
3747 	WREG32(CB_COLOR4_BASE, 0);
3748 	WREG32(CB_COLOR5_BASE, 0);
3749 	WREG32(CB_COLOR6_BASE, 0);
3750 	WREG32(CB_COLOR7_BASE, 0);
3751 	WREG32(CB_COLOR8_BASE, 0);
3752 	WREG32(CB_COLOR9_BASE, 0);
3753 	WREG32(CB_COLOR10_BASE, 0);
3754 	WREG32(CB_COLOR11_BASE, 0);
3755 
3756 	/* set the shader const cache sizes to 0 */
3757 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3758 		WREG32(i, 0);
3759 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3760 		WREG32(i, 0);
3761 
3762 	tmp = RREG32(HDP_MISC_CNTL);
3763 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3764 	WREG32(HDP_MISC_CNTL, tmp);
3765 
3766 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3767 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3768 
3769 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3770 
3771 	udelay(50);
3772 
3773 }
3774 
3775 int evergreen_mc_init(struct radeon_device *rdev)
3776 {
3777 	u32 tmp;
3778 	int chansize, numchan;
3779 
3780 	/* Get VRAM informations */
3781 	rdev->mc.vram_is_ddr = true;
3782 	if ((rdev->family == CHIP_PALM) ||
3783 	    (rdev->family == CHIP_SUMO) ||
3784 	    (rdev->family == CHIP_SUMO2))
3785 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3786 	else
3787 		tmp = RREG32(MC_ARB_RAMCFG);
3788 	if (tmp & CHANSIZE_OVERRIDE) {
3789 		chansize = 16;
3790 	} else if (tmp & CHANSIZE_MASK) {
3791 		chansize = 64;
3792 	} else {
3793 		chansize = 32;
3794 	}
3795 	tmp = RREG32(MC_SHARED_CHMAP);
3796 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3797 	case 0:
3798 	default:
3799 		numchan = 1;
3800 		break;
3801 	case 1:
3802 		numchan = 2;
3803 		break;
3804 	case 2:
3805 		numchan = 4;
3806 		break;
3807 	case 3:
3808 		numchan = 8;
3809 		break;
3810 	}
3811 	rdev->mc.vram_width = numchan * chansize;
3812 	/* Could aper size report 0 ? */
3813 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3814 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3815 	/* Setup GPU memory space */
3816 	if ((rdev->family == CHIP_PALM) ||
3817 	    (rdev->family == CHIP_SUMO) ||
3818 	    (rdev->family == CHIP_SUMO2)) {
3819 		/* size in bytes on fusion */
3820 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3821 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3822 	} else {
3823 		/* size in MB on evergreen/cayman/tn */
3824 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3825 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3826 	}
3827 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3828 	r700_vram_gtt_location(rdev, &rdev->mc);
3829 	radeon_update_bandwidth_info(rdev);
3830 
3831 	return 0;
3832 }
3833 
3834 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3835 {
3836 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3837 		RREG32(GRBM_STATUS));
3838 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3839 		RREG32(GRBM_STATUS_SE0));
3840 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3841 		RREG32(GRBM_STATUS_SE1));
3842 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3843 		RREG32(SRBM_STATUS));
3844 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3845 		RREG32(SRBM_STATUS2));
3846 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3847 		RREG32(CP_STALLED_STAT1));
3848 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3849 		RREG32(CP_STALLED_STAT2));
3850 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3851 		RREG32(CP_BUSY_STAT));
3852 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3853 		RREG32(CP_STAT));
3854 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3855 		RREG32(DMA_STATUS_REG));
3856 	if (rdev->family >= CHIP_CAYMAN) {
3857 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3858 			 RREG32(DMA_STATUS_REG + 0x800));
3859 	}
3860 }
3861 
3862 bool evergreen_is_display_hung(struct radeon_device *rdev)
3863 {
3864 	u32 crtc_hung = 0;
3865 	u32 crtc_status[6];
3866 	u32 i, j, tmp;
3867 
3868 	for (i = 0; i < rdev->num_crtc; i++) {
3869 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3870 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3871 			crtc_hung |= (1 << i);
3872 		}
3873 	}
3874 
3875 	for (j = 0; j < 10; j++) {
3876 		for (i = 0; i < rdev->num_crtc; i++) {
3877 			if (crtc_hung & (1 << i)) {
3878 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3879 				if (tmp != crtc_status[i])
3880 					crtc_hung &= ~(1 << i);
3881 			}
3882 		}
3883 		if (crtc_hung == 0)
3884 			return false;
3885 		udelay(100);
3886 	}
3887 
3888 	return true;
3889 }
3890 
3891 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3892 {
3893 	u32 reset_mask = 0;
3894 	u32 tmp;
3895 
3896 	/* GRBM_STATUS */
3897 	tmp = RREG32(GRBM_STATUS);
3898 	if (tmp & (PA_BUSY | SC_BUSY |
3899 		   SH_BUSY | SX_BUSY |
3900 		   TA_BUSY | VGT_BUSY |
3901 		   DB_BUSY | CB_BUSY |
3902 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3903 		reset_mask |= RADEON_RESET_GFX;
3904 
3905 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3906 		   CP_BUSY | CP_COHERENCY_BUSY))
3907 		reset_mask |= RADEON_RESET_CP;
3908 
3909 	if (tmp & GRBM_EE_BUSY)
3910 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3911 
3912 	/* DMA_STATUS_REG */
3913 	tmp = RREG32(DMA_STATUS_REG);
3914 	if (!(tmp & DMA_IDLE))
3915 		reset_mask |= RADEON_RESET_DMA;
3916 
3917 	/* SRBM_STATUS2 */
3918 	tmp = RREG32(SRBM_STATUS2);
3919 	if (tmp & DMA_BUSY)
3920 		reset_mask |= RADEON_RESET_DMA;
3921 
3922 	/* SRBM_STATUS */
3923 	tmp = RREG32(SRBM_STATUS);
3924 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3925 		reset_mask |= RADEON_RESET_RLC;
3926 
3927 	if (tmp & IH_BUSY)
3928 		reset_mask |= RADEON_RESET_IH;
3929 
3930 	if (tmp & SEM_BUSY)
3931 		reset_mask |= RADEON_RESET_SEM;
3932 
3933 	if (tmp & GRBM_RQ_PENDING)
3934 		reset_mask |= RADEON_RESET_GRBM;
3935 
3936 	if (tmp & VMC_BUSY)
3937 		reset_mask |= RADEON_RESET_VMC;
3938 
3939 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3940 		   MCC_BUSY | MCD_BUSY))
3941 		reset_mask |= RADEON_RESET_MC;
3942 
3943 	if (evergreen_is_display_hung(rdev))
3944 		reset_mask |= RADEON_RESET_DISPLAY;
3945 
3946 	/* VM_L2_STATUS */
3947 	tmp = RREG32(VM_L2_STATUS);
3948 	if (tmp & L2_BUSY)
3949 		reset_mask |= RADEON_RESET_VMC;
3950 
3951 	/* Skip MC reset as it's mostly likely not hung, just busy */
3952 	if (reset_mask & RADEON_RESET_MC) {
3953 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3954 		reset_mask &= ~RADEON_RESET_MC;
3955 	}
3956 
3957 	return reset_mask;
3958 }
3959 
3960 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3961 {
3962 	struct evergreen_mc_save save;
3963 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3964 	u32 tmp;
3965 
3966 	if (reset_mask == 0)
3967 		return;
3968 
3969 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3970 
3971 	evergreen_print_gpu_status_regs(rdev);
3972 
3973 	/* Disable CP parsing/prefetching */
3974 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3975 
3976 	if (reset_mask & RADEON_RESET_DMA) {
3977 		/* Disable DMA */
3978 		tmp = RREG32(DMA_RB_CNTL);
3979 		tmp &= ~DMA_RB_ENABLE;
3980 		WREG32(DMA_RB_CNTL, tmp);
3981 	}
3982 
3983 	udelay(50);
3984 
3985 	evergreen_mc_stop(rdev, &save);
3986 	if (evergreen_mc_wait_for_idle(rdev)) {
3987 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3988 	}
3989 
3990 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3991 		grbm_soft_reset |= SOFT_RESET_DB |
3992 			SOFT_RESET_CB |
3993 			SOFT_RESET_PA |
3994 			SOFT_RESET_SC |
3995 			SOFT_RESET_SPI |
3996 			SOFT_RESET_SX |
3997 			SOFT_RESET_SH |
3998 			SOFT_RESET_TC |
3999 			SOFT_RESET_TA |
4000 			SOFT_RESET_VC |
4001 			SOFT_RESET_VGT;
4002 	}
4003 
4004 	if (reset_mask & RADEON_RESET_CP) {
4005 		grbm_soft_reset |= SOFT_RESET_CP |
4006 			SOFT_RESET_VGT;
4007 
4008 		srbm_soft_reset |= SOFT_RESET_GRBM;
4009 	}
4010 
4011 	if (reset_mask & RADEON_RESET_DMA)
4012 		srbm_soft_reset |= SOFT_RESET_DMA;
4013 
4014 	if (reset_mask & RADEON_RESET_DISPLAY)
4015 		srbm_soft_reset |= SOFT_RESET_DC;
4016 
4017 	if (reset_mask & RADEON_RESET_RLC)
4018 		srbm_soft_reset |= SOFT_RESET_RLC;
4019 
4020 	if (reset_mask & RADEON_RESET_SEM)
4021 		srbm_soft_reset |= SOFT_RESET_SEM;
4022 
4023 	if (reset_mask & RADEON_RESET_IH)
4024 		srbm_soft_reset |= SOFT_RESET_IH;
4025 
4026 	if (reset_mask & RADEON_RESET_GRBM)
4027 		srbm_soft_reset |= SOFT_RESET_GRBM;
4028 
4029 	if (reset_mask & RADEON_RESET_VMC)
4030 		srbm_soft_reset |= SOFT_RESET_VMC;
4031 
4032 	if (!(rdev->flags & RADEON_IS_IGP)) {
4033 		if (reset_mask & RADEON_RESET_MC)
4034 			srbm_soft_reset |= SOFT_RESET_MC;
4035 	}
4036 
4037 	if (grbm_soft_reset) {
4038 		tmp = RREG32(GRBM_SOFT_RESET);
4039 		tmp |= grbm_soft_reset;
4040 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4041 		WREG32(GRBM_SOFT_RESET, tmp);
4042 		tmp = RREG32(GRBM_SOFT_RESET);
4043 
4044 		udelay(50);
4045 
4046 		tmp &= ~grbm_soft_reset;
4047 		WREG32(GRBM_SOFT_RESET, tmp);
4048 		tmp = RREG32(GRBM_SOFT_RESET);
4049 	}
4050 
4051 	if (srbm_soft_reset) {
4052 		tmp = RREG32(SRBM_SOFT_RESET);
4053 		tmp |= srbm_soft_reset;
4054 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4055 		WREG32(SRBM_SOFT_RESET, tmp);
4056 		tmp = RREG32(SRBM_SOFT_RESET);
4057 
4058 		udelay(50);
4059 
4060 		tmp &= ~srbm_soft_reset;
4061 		WREG32(SRBM_SOFT_RESET, tmp);
4062 		tmp = RREG32(SRBM_SOFT_RESET);
4063 	}
4064 
4065 	/* Wait a little for things to settle down */
4066 	udelay(50);
4067 
4068 	evergreen_mc_resume(rdev, &save);
4069 	udelay(50);
4070 
4071 	evergreen_print_gpu_status_regs(rdev);
4072 }
4073 
4074 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4075 {
4076 	struct evergreen_mc_save save;
4077 	u32 tmp, i;
4078 
4079 	dev_info(rdev->dev, "GPU pci config reset\n");
4080 
4081 	/* disable dpm? */
4082 
4083 	/* Disable CP parsing/prefetching */
4084 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4085 	udelay(50);
4086 	/* Disable DMA */
4087 	tmp = RREG32(DMA_RB_CNTL);
4088 	tmp &= ~DMA_RB_ENABLE;
4089 	WREG32(DMA_RB_CNTL, tmp);
4090 	/* XXX other engines? */
4091 
4092 	/* halt the rlc */
4093 	r600_rlc_stop(rdev);
4094 
4095 	udelay(50);
4096 
4097 	/* set mclk/sclk to bypass */
4098 	rv770_set_clk_bypass_mode(rdev);
4099 	/* disable BM */
4100 	pci_clear_master(rdev->pdev);
4101 	/* disable mem access */
4102 	evergreen_mc_stop(rdev, &save);
4103 	if (evergreen_mc_wait_for_idle(rdev)) {
4104 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4105 	}
4106 	/* reset */
4107 	radeon_pci_config_reset(rdev);
4108 	/* wait for asic to come out of reset */
4109 	for (i = 0; i < rdev->usec_timeout; i++) {
4110 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4111 			break;
4112 		udelay(1);
4113 	}
4114 }
4115 
4116 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4117 {
4118 	u32 reset_mask;
4119 
4120 	if (hard) {
4121 		evergreen_gpu_pci_config_reset(rdev);
4122 		return 0;
4123 	}
4124 
4125 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4126 
4127 	if (reset_mask)
4128 		r600_set_bios_scratch_engine_hung(rdev, true);
4129 
4130 	/* try soft reset */
4131 	evergreen_gpu_soft_reset(rdev, reset_mask);
4132 
4133 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4134 
4135 	/* try pci config reset */
4136 	if (reset_mask && radeon_hard_reset)
4137 		evergreen_gpu_pci_config_reset(rdev);
4138 
4139 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4140 
4141 	if (!reset_mask)
4142 		r600_set_bios_scratch_engine_hung(rdev, false);
4143 
4144 	return 0;
4145 }
4146 
4147 /**
4148  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4149  *
4150  * @rdev: radeon_device pointer
4151  * @ring: radeon_ring structure holding ring information
4152  *
4153  * Check if the GFX engine is locked up.
4154  * Returns true if the engine appears to be locked up, false if not.
4155  */
4156 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4157 {
4158 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4159 
4160 	if (!(reset_mask & (RADEON_RESET_GFX |
4161 			    RADEON_RESET_COMPUTE |
4162 			    RADEON_RESET_CP))) {
4163 		radeon_ring_lockup_update(rdev, ring);
4164 		return false;
4165 	}
4166 	return radeon_ring_test_lockup(rdev, ring);
4167 }
4168 
4169 /*
4170  * RLC
4171  */
4172 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4173 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4174 
4175 void sumo_rlc_fini(struct radeon_device *rdev)
4176 {
4177 	int r;
4178 
4179 	/* save restore block */
4180 	if (rdev->rlc.save_restore_obj) {
4181 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4182 		if (unlikely(r != 0))
4183 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4184 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4185 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4186 
4187 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4188 		rdev->rlc.save_restore_obj = NULL;
4189 	}
4190 
4191 	/* clear state block */
4192 	if (rdev->rlc.clear_state_obj) {
4193 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4194 		if (unlikely(r != 0))
4195 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4196 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4197 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4198 
4199 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4200 		rdev->rlc.clear_state_obj = NULL;
4201 	}
4202 
4203 	/* clear state block */
4204 	if (rdev->rlc.cp_table_obj) {
4205 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4206 		if (unlikely(r != 0))
4207 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4208 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4209 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4210 
4211 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4212 		rdev->rlc.cp_table_obj = NULL;
4213 	}
4214 }
4215 
4216 #define CP_ME_TABLE_SIZE    96
4217 
4218 #pragma GCC diagnostic push
4219 #pragma GCC diagnostic ignored "-Wcast-qual"
4220 int sumo_rlc_init(struct radeon_device *rdev)
4221 {
4222 	const u32 *src_ptr;
4223 	volatile u32 *dst_ptr;
4224 	u32 dws, data, i, j, k, reg_num;
4225 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4226 	u64 reg_list_mc_addr;
4227 	const struct cs_section_def *cs_data;
4228 	int r;
4229 
4230 	src_ptr = rdev->rlc.reg_list;
4231 	dws = rdev->rlc.reg_list_size;
4232 	if (rdev->family >= CHIP_BONAIRE) {
4233 		dws += (5 * 16) + 48 + 48 + 64;
4234 	}
4235 	cs_data = rdev->rlc.cs_data;
4236 
4237 	if (src_ptr) {
4238 		/* save restore block */
4239 		if (rdev->rlc.save_restore_obj == NULL) {
4240 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4241 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4242 					     NULL, &rdev->rlc.save_restore_obj);
4243 			if (r) {
4244 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4245 				return r;
4246 			}
4247 		}
4248 
4249 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4250 		if (unlikely(r != 0)) {
4251 			sumo_rlc_fini(rdev);
4252 			return r;
4253 		}
4254 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4255 				  &rdev->rlc.save_restore_gpu_addr);
4256 		if (r) {
4257 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4258 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4259 			sumo_rlc_fini(rdev);
4260 			return r;
4261 		}
4262 
4263 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4264 		if (r) {
4265 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4266 			sumo_rlc_fini(rdev);
4267 			return r;
4268 		}
4269 		/* write the sr buffer */
4270 		dst_ptr = rdev->rlc.sr_ptr;
4271 		if (rdev->family >= CHIP_TAHITI) {
4272 			/* SI */
4273 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4274 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4275 		} else {
4276 			/* ON/LN/TN */
4277 			/* format:
4278 			 * dw0: (reg2 << 16) | reg1
4279 			 * dw1: reg1 save space
4280 			 * dw2: reg2 save space
4281 			 */
4282 			for (i = 0; i < dws; i++) {
4283 				data = src_ptr[i] >> 2;
4284 				i++;
4285 				if (i < dws)
4286 					data |= (src_ptr[i] >> 2) << 16;
4287 				j = (((i - 1) * 3) / 2);
4288 				dst_ptr[j] = cpu_to_le32(data);
4289 			}
4290 			j = ((i * 3) / 2);
4291 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4292 		}
4293 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4294 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4295 	}
4296 
4297 	if (cs_data) {
4298 		/* clear state block */
4299 		if (rdev->family >= CHIP_BONAIRE) {
4300 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4301 		} else if (rdev->family >= CHIP_TAHITI) {
4302 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4303 			dws = rdev->rlc.clear_state_size + (256 / 4);
4304 		} else {
4305 			reg_list_num = 0;
4306 			dws = 0;
4307 			for (i = 0; cs_data[i].section != NULL; i++) {
4308 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4309 					reg_list_num++;
4310 					dws += cs_data[i].section[j].reg_count;
4311 				}
4312 			}
4313 			reg_list_blk_index = (3 * reg_list_num + 2);
4314 			dws += reg_list_blk_index;
4315 			rdev->rlc.clear_state_size = dws;
4316 		}
4317 
4318 		if (rdev->rlc.clear_state_obj == NULL) {
4319 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4320 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4321 					     NULL, &rdev->rlc.clear_state_obj);
4322 			if (r) {
4323 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4324 				sumo_rlc_fini(rdev);
4325 				return r;
4326 			}
4327 		}
4328 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4329 		if (unlikely(r != 0)) {
4330 			sumo_rlc_fini(rdev);
4331 			return r;
4332 		}
4333 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4334 				  &rdev->rlc.clear_state_gpu_addr);
4335 		if (r) {
4336 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4337 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4338 			sumo_rlc_fini(rdev);
4339 			return r;
4340 		}
4341 
4342 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4343 		if (r) {
4344 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4345 			sumo_rlc_fini(rdev);
4346 			return r;
4347 		}
4348 		/* set up the cs buffer */
4349 		dst_ptr = rdev->rlc.cs_ptr;
4350 		if (rdev->family >= CHIP_BONAIRE) {
4351 			cik_get_csb_buffer(rdev, dst_ptr);
4352 		} else if (rdev->family >= CHIP_TAHITI) {
4353 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4354 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4355 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4356 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4357 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4358 		} else {
4359 			reg_list_hdr_blk_index = 0;
4360 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4361 			data = upper_32_bits(reg_list_mc_addr);
4362 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4363 			reg_list_hdr_blk_index++;
4364 			for (i = 0; cs_data[i].section != NULL; i++) {
4365 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4366 					reg_num = cs_data[i].section[j].reg_count;
4367 					data = reg_list_mc_addr & 0xffffffff;
4368 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4369 					reg_list_hdr_blk_index++;
4370 
4371 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4372 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4373 					reg_list_hdr_blk_index++;
4374 
4375 					data = 0x08000000 | (reg_num * 4);
4376 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4377 					reg_list_hdr_blk_index++;
4378 
4379 					for (k = 0; k < reg_num; k++) {
4380 						data = cs_data[i].section[j].extent[k];
4381 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4382 					}
4383 					reg_list_mc_addr += reg_num * 4;
4384 					reg_list_blk_index += reg_num;
4385 				}
4386 			}
4387 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4388 		}
4389 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4390 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4391 	}
4392 
4393 	if (rdev->rlc.cp_table_size) {
4394 		if (rdev->rlc.cp_table_obj == NULL) {
4395 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4396 					     PAGE_SIZE, true,
4397 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4398 					     NULL, &rdev->rlc.cp_table_obj);
4399 			if (r) {
4400 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4401 				sumo_rlc_fini(rdev);
4402 				return r;
4403 			}
4404 		}
4405 
4406 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4407 		if (unlikely(r != 0)) {
4408 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4409 			sumo_rlc_fini(rdev);
4410 			return r;
4411 		}
4412 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4413 				  &rdev->rlc.cp_table_gpu_addr);
4414 		if (r) {
4415 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4416 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4417 			sumo_rlc_fini(rdev);
4418 			return r;
4419 		}
4420 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4421 		if (r) {
4422 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4423 			sumo_rlc_fini(rdev);
4424 			return r;
4425 		}
4426 
4427 		cik_init_cp_pg_table(rdev);
4428 
4429 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4430 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4431 
4432 	}
4433 
4434 	return 0;
4435 }
4436 #pragma GCC diagnostic pop
4437 
4438 static void evergreen_rlc_start(struct radeon_device *rdev)
4439 {
4440 	u32 mask = RLC_ENABLE;
4441 
4442 	if (rdev->flags & RADEON_IS_IGP) {
4443 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4444 	}
4445 
4446 	WREG32(RLC_CNTL, mask);
4447 }
4448 
4449 int evergreen_rlc_resume(struct radeon_device *rdev)
4450 {
4451 	u32 i;
4452 	const __be32 *fw_data;
4453 
4454 	if (!rdev->rlc_fw)
4455 		return -EINVAL;
4456 
4457 	r600_rlc_stop(rdev);
4458 
4459 	WREG32(RLC_HB_CNTL, 0);
4460 
4461 	if (rdev->flags & RADEON_IS_IGP) {
4462 		if (rdev->family == CHIP_ARUBA) {
4463 			u32 always_on_bitmap =
4464 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4465 			/* find out the number of active simds */
4466 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4467 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4468 			tmp = hweight32(~tmp);
4469 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4470 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4471 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4472 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4473 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4474 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4475 			}
4476 		} else {
4477 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4478 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4479 		}
4480 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4481 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4482 	} else {
4483 		WREG32(RLC_HB_BASE, 0);
4484 		WREG32(RLC_HB_RPTR, 0);
4485 		WREG32(RLC_HB_WPTR, 0);
4486 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4487 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4488 	}
4489 	WREG32(RLC_MC_CNTL, 0);
4490 	WREG32(RLC_UCODE_CNTL, 0);
4491 
4492 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4493 	if (rdev->family >= CHIP_ARUBA) {
4494 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4495 			WREG32(RLC_UCODE_ADDR, i);
4496 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4497 		}
4498 	} else if (rdev->family >= CHIP_CAYMAN) {
4499 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4500 			WREG32(RLC_UCODE_ADDR, i);
4501 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4502 		}
4503 	} else {
4504 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4505 			WREG32(RLC_UCODE_ADDR, i);
4506 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4507 		}
4508 	}
4509 	WREG32(RLC_UCODE_ADDR, 0);
4510 
4511 	evergreen_rlc_start(rdev);
4512 
4513 	return 0;
4514 }
4515 
4516 /* Interrupts */
4517 
4518 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4519 {
4520 	if (crtc >= rdev->num_crtc)
4521 		return 0;
4522 	else
4523 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4524 }
4525 
4526 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4527 {
4528 	u32 tmp;
4529 
4530 	if (rdev->family >= CHIP_CAYMAN) {
4531 		cayman_cp_int_cntl_setup(rdev, 0,
4532 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4533 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4534 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4535 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4536 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4537 	} else
4538 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4539 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4540 	WREG32(DMA_CNTL, tmp);
4541 	WREG32(GRBM_INT_CNTL, 0);
4542 	WREG32(SRBM_INT_CNTL, 0);
4543 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4544 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4545 	if (rdev->num_crtc >= 4) {
4546 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4547 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4548 	}
4549 	if (rdev->num_crtc >= 6) {
4550 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4551 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4552 	}
4553 
4554 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4555 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4556 	if (rdev->num_crtc >= 4) {
4557 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4558 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4559 	}
4560 	if (rdev->num_crtc >= 6) {
4561 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4562 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4563 	}
4564 
4565 	/* only one DAC on DCE5 */
4566 	if (!ASIC_IS_DCE5(rdev))
4567 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4568 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4569 
4570 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4571 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4572 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4573 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4574 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4575 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4576 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4577 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4578 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4579 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4580 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4581 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4582 
4583 }
4584 
4585 int evergreen_irq_set(struct radeon_device *rdev)
4586 {
4587 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4588 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4589 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4590 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4591 	u32 grbm_int_cntl = 0;
4592 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4593 	u32 dma_cntl, dma_cntl1 = 0;
4594 	u32 thermal_int = 0;
4595 
4596 	if (!rdev->irq.installed) {
4597 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4598 		return -EINVAL;
4599 	}
4600 	/* don't enable anything if the ih is disabled */
4601 	if (!rdev->ih.enabled) {
4602 		r600_disable_interrupts(rdev);
4603 		/* force the active interrupt state to all disabled */
4604 		evergreen_disable_interrupt_state(rdev);
4605 		return 0;
4606 	}
4607 
4608 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4609 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4610 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4611 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4612 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4613 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4614 	if (rdev->family == CHIP_ARUBA)
4615 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4616 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4617 	else
4618 		thermal_int = RREG32(CG_THERMAL_INT) &
4619 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4620 
4621 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4622 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4623 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4624 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4625 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4626 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4627 
4628 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4629 
4630 	if (rdev->family >= CHIP_CAYMAN) {
4631 		/* enable CP interrupts on all rings */
4632 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4633 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4634 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4635 		}
4636 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4637 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4638 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4639 		}
4640 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4641 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4642 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4643 		}
4644 	} else {
4645 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4646 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4647 			cp_int_cntl |= RB_INT_ENABLE;
4648 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4649 		}
4650 	}
4651 
4652 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4653 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4654 		dma_cntl |= TRAP_ENABLE;
4655 	}
4656 
4657 	if (rdev->family >= CHIP_CAYMAN) {
4658 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4659 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4660 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4661 			dma_cntl1 |= TRAP_ENABLE;
4662 		}
4663 	}
4664 
4665 	if (rdev->irq.dpm_thermal) {
4666 		DRM_DEBUG("dpm thermal\n");
4667 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4668 	}
4669 
4670 	if (rdev->irq.crtc_vblank_int[0] ||
4671 	    atomic_read(&rdev->irq.pflip[0])) {
4672 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4673 		crtc1 |= VBLANK_INT_MASK;
4674 	}
4675 	if (rdev->irq.crtc_vblank_int[1] ||
4676 	    atomic_read(&rdev->irq.pflip[1])) {
4677 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4678 		crtc2 |= VBLANK_INT_MASK;
4679 	}
4680 	if (rdev->irq.crtc_vblank_int[2] ||
4681 	    atomic_read(&rdev->irq.pflip[2])) {
4682 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4683 		crtc3 |= VBLANK_INT_MASK;
4684 	}
4685 	if (rdev->irq.crtc_vblank_int[3] ||
4686 	    atomic_read(&rdev->irq.pflip[3])) {
4687 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4688 		crtc4 |= VBLANK_INT_MASK;
4689 	}
4690 	if (rdev->irq.crtc_vblank_int[4] ||
4691 	    atomic_read(&rdev->irq.pflip[4])) {
4692 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4693 		crtc5 |= VBLANK_INT_MASK;
4694 	}
4695 	if (rdev->irq.crtc_vblank_int[5] ||
4696 	    atomic_read(&rdev->irq.pflip[5])) {
4697 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4698 		crtc6 |= VBLANK_INT_MASK;
4699 	}
4700 	if (rdev->irq.hpd[0]) {
4701 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4702 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4703 	}
4704 	if (rdev->irq.hpd[1]) {
4705 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4706 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4707 	}
4708 	if (rdev->irq.hpd[2]) {
4709 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4710 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4711 	}
4712 	if (rdev->irq.hpd[3]) {
4713 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4714 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4715 	}
4716 	if (rdev->irq.hpd[4]) {
4717 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4718 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4719 	}
4720 	if (rdev->irq.hpd[5]) {
4721 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4722 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4723 	}
4724 	if (rdev->irq.afmt[0]) {
4725 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4726 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4727 	}
4728 	if (rdev->irq.afmt[1]) {
4729 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4730 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4731 	}
4732 	if (rdev->irq.afmt[2]) {
4733 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4734 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4735 	}
4736 	if (rdev->irq.afmt[3]) {
4737 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4738 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4739 	}
4740 	if (rdev->irq.afmt[4]) {
4741 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4742 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4743 	}
4744 	if (rdev->irq.afmt[5]) {
4745 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4746 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4747 	}
4748 
4749 	if (rdev->family >= CHIP_CAYMAN) {
4750 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4751 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4752 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4753 	} else
4754 		WREG32(CP_INT_CNTL, cp_int_cntl);
4755 
4756 	WREG32(DMA_CNTL, dma_cntl);
4757 
4758 	if (rdev->family >= CHIP_CAYMAN)
4759 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4760 
4761 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4762 
4763 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4764 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4765 	if (rdev->num_crtc >= 4) {
4766 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4767 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4768 	}
4769 	if (rdev->num_crtc >= 6) {
4770 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4771 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4772 	}
4773 
4774 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4775 	       GRPH_PFLIP_INT_MASK);
4776 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4777 	       GRPH_PFLIP_INT_MASK);
4778 	if (rdev->num_crtc >= 4) {
4779 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4780 		       GRPH_PFLIP_INT_MASK);
4781 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4782 		       GRPH_PFLIP_INT_MASK);
4783 	}
4784 	if (rdev->num_crtc >= 6) {
4785 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4786 		       GRPH_PFLIP_INT_MASK);
4787 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4788 		       GRPH_PFLIP_INT_MASK);
4789 	}
4790 
4791 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4792 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4793 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4794 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4795 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4796 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4797 	if (rdev->family == CHIP_ARUBA)
4798 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4799 	else
4800 		WREG32(CG_THERMAL_INT, thermal_int);
4801 
4802 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4803 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4804 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4805 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4806 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4807 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4808 
4809 	/* posting read */
4810 	RREG32(SRBM_STATUS);
4811 
4812 	return 0;
4813 }
4814 
4815 static void evergreen_irq_ack(struct radeon_device *rdev)
4816 {
4817 	u32 tmp;
4818 
4819 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4820 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4821 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4822 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4823 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4824 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4825 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4826 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4827 	if (rdev->num_crtc >= 4) {
4828 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4829 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4830 	}
4831 	if (rdev->num_crtc >= 6) {
4832 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4833 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4834 	}
4835 
4836 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4837 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4838 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4839 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4840 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4841 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4842 
4843 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4844 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4845 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4846 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4847 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4848 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4849 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4850 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4851 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4852 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4853 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4854 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4855 
4856 	if (rdev->num_crtc >= 4) {
4857 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4858 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4859 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4860 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4861 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4862 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4863 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4864 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4865 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4866 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4867 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4868 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4869 	}
4870 
4871 	if (rdev->num_crtc >= 6) {
4872 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4873 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4874 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4875 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4876 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4877 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4878 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4879 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4880 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4881 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4882 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4883 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4884 	}
4885 
4886 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4887 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4888 		tmp |= DC_HPDx_INT_ACK;
4889 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4890 	}
4891 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4892 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4893 		tmp |= DC_HPDx_INT_ACK;
4894 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4895 	}
4896 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4897 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4898 		tmp |= DC_HPDx_INT_ACK;
4899 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4900 	}
4901 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4902 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4903 		tmp |= DC_HPDx_INT_ACK;
4904 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4905 	}
4906 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4907 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4908 		tmp |= DC_HPDx_INT_ACK;
4909 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4910 	}
4911 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4912 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4913 		tmp |= DC_HPDx_INT_ACK;
4914 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4915 	}
4916 
4917 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4918 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4919 		tmp |= DC_HPDx_RX_INT_ACK;
4920 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4921 	}
4922 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4923 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4924 		tmp |= DC_HPDx_RX_INT_ACK;
4925 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4926 	}
4927 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4928 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4929 		tmp |= DC_HPDx_RX_INT_ACK;
4930 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4931 	}
4932 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4933 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4934 		tmp |= DC_HPDx_RX_INT_ACK;
4935 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4936 	}
4937 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4938 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4939 		tmp |= DC_HPDx_RX_INT_ACK;
4940 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4941 	}
4942 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4943 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4944 		tmp |= DC_HPDx_RX_INT_ACK;
4945 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4946 	}
4947 
4948 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4949 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4950 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4951 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4952 	}
4953 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4954 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4955 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4956 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4957 	}
4958 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4959 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4960 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4961 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4962 	}
4963 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4964 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4965 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4966 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4967 	}
4968 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4969 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4970 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4971 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4972 	}
4973 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4974 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4975 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4976 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4977 	}
4978 }
4979 
4980 static void evergreen_irq_disable(struct radeon_device *rdev)
4981 {
4982 	r600_disable_interrupts(rdev);
4983 	/* Wait and acknowledge irq */
4984 	mdelay(1);
4985 	evergreen_irq_ack(rdev);
4986 	evergreen_disable_interrupt_state(rdev);
4987 }
4988 
4989 void evergreen_irq_suspend(struct radeon_device *rdev)
4990 {
4991 	evergreen_irq_disable(rdev);
4992 	r600_rlc_stop(rdev);
4993 }
4994 
4995 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4996 {
4997 	u32 wptr, tmp;
4998 
4999 	if (rdev->wb.enabled)
5000 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5001 	else
5002 		wptr = RREG32(IH_RB_WPTR);
5003 
5004 	if (wptr & RB_OVERFLOW) {
5005 		wptr &= ~RB_OVERFLOW;
5006 		/* When a ring buffer overflow happen start parsing interrupt
5007 		 * from the last not overwritten vector (wptr + 16). Hopefully
5008 		 * this should allow us to catchup.
5009 		 */
5010 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5011 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5012 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5013 		tmp = RREG32(IH_RB_CNTL);
5014 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
5015 		WREG32(IH_RB_CNTL, tmp);
5016 	}
5017 	return (wptr & rdev->ih.ptr_mask);
5018 }
5019 
5020 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
5021 {
5022 	u32 wptr;
5023 	u32 rptr;
5024 	u32 src_id, src_data;
5025 	u32 ring_index;
5026 	bool queue_hotplug = false;
5027 	bool queue_hdmi = false;
5028 	bool queue_dp = false;
5029 	bool queue_thermal = false;
5030 	u32 status, addr;
5031 
5032 	if (!rdev->ih.enabled || rdev->shutdown)
5033 		return IRQ_NONE;
5034 
5035 	wptr = evergreen_get_ih_wptr(rdev);
5036 
5037 restart_ih:
5038 	/* is somebody else already processing irqs? */
5039 	if (atomic_xchg(&rdev->ih.lock, 1))
5040 		return IRQ_NONE;
5041 
5042 	rptr = rdev->ih.rptr;
5043 	DRM_DEBUG_VBLANK("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5044 
5045 	/* Order reading of wptr vs. reading of IH ring data */
5046 	rmb();
5047 
5048 	/* display interrupts */
5049 	evergreen_irq_ack(rdev);
5050 
5051 	while (rptr != wptr) {
5052 		/* wptr/rptr are in bytes! */
5053 		ring_index = rptr / 4;
5054 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5055 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5056 
5057 		switch (src_id) {
5058 		case 1: /* D1 vblank/vline */
5059 			switch (src_data) {
5060 			case 0: /* D1 vblank */
5061 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5062 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5063 
5064 				if (rdev->irq.crtc_vblank_int[0]) {
5065 					drm_handle_vblank(rdev->ddev, 0);
5066 					rdev->pm.vblank_sync = true;
5067 					wake_up(&rdev->irq.vblank_queue);
5068 				}
5069 				if (atomic_read(&rdev->irq.pflip[0]))
5070 					radeon_crtc_handle_vblank(rdev, 0);
5071 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5072 				DRM_DEBUG_VBLANK("IH: D1 vblank\n");
5073 
5074 				break;
5075 			case 1: /* D1 vline */
5076 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5077 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5078 
5079 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5080 				DRM_DEBUG_VBLANK("IH: D1 vline\n");
5081 
5082 				break;
5083 			default:
5084 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5085 				break;
5086 			}
5087 			break;
5088 		case 2: /* D2 vblank/vline */
5089 			switch (src_data) {
5090 			case 0: /* D2 vblank */
5091 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5092 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5093 
5094 				if (rdev->irq.crtc_vblank_int[1]) {
5095 					drm_handle_vblank(rdev->ddev, 1);
5096 					rdev->pm.vblank_sync = true;
5097 					wake_up(&rdev->irq.vblank_queue);
5098 				}
5099 				if (atomic_read(&rdev->irq.pflip[1]))
5100 					radeon_crtc_handle_vblank(rdev, 1);
5101 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5102 				DRM_DEBUG_VBLANK("IH: D2 vblank\n");
5103 
5104 				break;
5105 			case 1: /* D2 vline */
5106 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5107 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5108 
5109 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5110 				DRM_DEBUG_VBLANK("IH: D2 vline\n");
5111 
5112 				break;
5113 			default:
5114 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5115 				break;
5116 			}
5117 			break;
5118 		case 3: /* D3 vblank/vline */
5119 			switch (src_data) {
5120 			case 0: /* D3 vblank */
5121 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5122 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5123 
5124 				if (rdev->irq.crtc_vblank_int[2]) {
5125 					drm_handle_vblank(rdev->ddev, 2);
5126 					rdev->pm.vblank_sync = true;
5127 					wake_up(&rdev->irq.vblank_queue);
5128 				}
5129 				if (atomic_read(&rdev->irq.pflip[2]))
5130 					radeon_crtc_handle_vblank(rdev, 2);
5131 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5132 				DRM_DEBUG_VBLANK("IH: D3 vblank\n");
5133 
5134 				break;
5135 			case 1: /* D3 vline */
5136 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5137 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5138 
5139 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5140 				DRM_DEBUG_VBLANK("IH: D3 vline\n");
5141 
5142 				break;
5143 			default:
5144 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5145 				break;
5146 			}
5147 			break;
5148 		case 4: /* D4 vblank/vline */
5149 			switch (src_data) {
5150 			case 0: /* D4 vblank */
5151 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5152 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5153 
5154 				if (rdev->irq.crtc_vblank_int[3]) {
5155 					drm_handle_vblank(rdev->ddev, 3);
5156 					rdev->pm.vblank_sync = true;
5157 					wake_up(&rdev->irq.vblank_queue);
5158 				}
5159 				if (atomic_read(&rdev->irq.pflip[3]))
5160 					radeon_crtc_handle_vblank(rdev, 3);
5161 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5162 				DRM_DEBUG_VBLANK("IH: D4 vblank\n");
5163 
5164 				break;
5165 			case 1: /* D4 vline */
5166 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5167 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5168 
5169 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5170 				DRM_DEBUG_VBLANK("IH: D4 vline\n");
5171 
5172 				break;
5173 			default:
5174 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5175 				break;
5176 			}
5177 			break;
5178 		case 5: /* D5 vblank/vline */
5179 			switch (src_data) {
5180 			case 0: /* D5 vblank */
5181 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5182 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5183 
5184 				if (rdev->irq.crtc_vblank_int[4]) {
5185 					drm_handle_vblank(rdev->ddev, 4);
5186 					rdev->pm.vblank_sync = true;
5187 					wake_up(&rdev->irq.vblank_queue);
5188 				}
5189 				if (atomic_read(&rdev->irq.pflip[4]))
5190 					radeon_crtc_handle_vblank(rdev, 4);
5191 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5192 				DRM_DEBUG_VBLANK("IH: D5 vblank\n");
5193 
5194 				break;
5195 			case 1: /* D5 vline */
5196 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5197 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5198 
5199 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5200 				DRM_DEBUG_VBLANK("IH: D5 vline\n");
5201 
5202 				break;
5203 			default:
5204 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5205 				break;
5206 			}
5207 			break;
5208 		case 6: /* D6 vblank/vline */
5209 			switch (src_data) {
5210 			case 0: /* D6 vblank */
5211 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5212 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5213 
5214 				if (rdev->irq.crtc_vblank_int[5]) {
5215 					drm_handle_vblank(rdev->ddev, 5);
5216 					rdev->pm.vblank_sync = true;
5217 					wake_up(&rdev->irq.vblank_queue);
5218 				}
5219 				if (atomic_read(&rdev->irq.pflip[5]))
5220 					radeon_crtc_handle_vblank(rdev, 5);
5221 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5222 				DRM_DEBUG_VBLANK("IH: D6 vblank\n");
5223 
5224 				break;
5225 			case 1: /* D6 vline */
5226 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5227 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5228 
5229 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5230 				DRM_DEBUG_VBLANK("IH: D6 vline\n");
5231 
5232 				break;
5233 			default:
5234 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5235 				break;
5236 			}
5237 			break;
5238 		case 8: /* D1 page flip */
5239 		case 10: /* D2 page flip */
5240 		case 12: /* D3 page flip */
5241 		case 14: /* D4 page flip */
5242 		case 16: /* D5 page flip */
5243 		case 18: /* D6 page flip */
5244 			DRM_DEBUG_VBLANK("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5245 			if (radeon_use_pflipirq > 0)
5246 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5247 			break;
5248 		case 42: /* HPD hotplug */
5249 			switch (src_data) {
5250 			case 0:
5251 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5252 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5253 
5254 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5255 				queue_hotplug = true;
5256 				DRM_DEBUG("IH: HPD1\n");
5257 				break;
5258 			case 1:
5259 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5260 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5261 
5262 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5263 				queue_hotplug = true;
5264 				DRM_DEBUG("IH: HPD2\n");
5265 				break;
5266 			case 2:
5267 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5268 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5269 
5270 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5271 				queue_hotplug = true;
5272 				DRM_DEBUG("IH: HPD3\n");
5273 				break;
5274 			case 3:
5275 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5276 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5277 
5278 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5279 				queue_hotplug = true;
5280 				DRM_DEBUG("IH: HPD4\n");
5281 				break;
5282 			case 4:
5283 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5284 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5285 
5286 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5287 				queue_hotplug = true;
5288 				DRM_DEBUG("IH: HPD5\n");
5289 				break;
5290 			case 5:
5291 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5292 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5293 
5294 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5295 				queue_hotplug = true;
5296 				DRM_DEBUG("IH: HPD6\n");
5297 				break;
5298 			case 6:
5299 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5300 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5301 
5302 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5303 				queue_dp = true;
5304 				DRM_DEBUG("IH: HPD_RX 1\n");
5305 				break;
5306 			case 7:
5307 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5308 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5309 
5310 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5311 				queue_dp = true;
5312 				DRM_DEBUG("IH: HPD_RX 2\n");
5313 				break;
5314 			case 8:
5315 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5316 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5317 
5318 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5319 				queue_dp = true;
5320 				DRM_DEBUG("IH: HPD_RX 3\n");
5321 				break;
5322 			case 9:
5323 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5324 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5325 
5326 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5327 				queue_dp = true;
5328 				DRM_DEBUG("IH: HPD_RX 4\n");
5329 				break;
5330 			case 10:
5331 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5332 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5333 
5334 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5335 				queue_dp = true;
5336 				DRM_DEBUG("IH: HPD_RX 5\n");
5337 				break;
5338 			case 11:
5339 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5340 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5341 
5342 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5343 				queue_dp = true;
5344 				DRM_DEBUG("IH: HPD_RX 6\n");
5345 				break;
5346 			default:
5347 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5348 				break;
5349 			}
5350 			break;
5351 		case 44: /* hdmi */
5352 			switch (src_data) {
5353 			case 0:
5354 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5355 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5356 
5357 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5358 				queue_hdmi = true;
5359 				DRM_DEBUG("IH: HDMI0\n");
5360 				break;
5361 			case 1:
5362 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5363 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5364 
5365 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5366 				queue_hdmi = true;
5367 				DRM_DEBUG("IH: HDMI1\n");
5368 				break;
5369 			case 2:
5370 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5371 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5372 
5373 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5374 				queue_hdmi = true;
5375 				DRM_DEBUG("IH: HDMI2\n");
5376 				break;
5377 			case 3:
5378 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5379 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5380 
5381 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5382 				queue_hdmi = true;
5383 				DRM_DEBUG("IH: HDMI3\n");
5384 				break;
5385 			case 4:
5386 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5387 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5388 
5389 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5390 				queue_hdmi = true;
5391 				DRM_DEBUG("IH: HDMI4\n");
5392 				break;
5393 			case 5:
5394 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5395 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5396 
5397 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5398 				queue_hdmi = true;
5399 				DRM_DEBUG("IH: HDMI5\n");
5400 				break;
5401 			default:
5402 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5403 				break;
5404 			}
5405 		case 96:
5406 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5407 			WREG32(SRBM_INT_ACK, 0x1);
5408 			break;
5409 		case 124: /* UVD */
5410 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5411 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5412 			break;
5413 		case 146:
5414 		case 147:
5415 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5416 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5417 			/* reset addr and status */
5418 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5419 			if (addr == 0x0 && status == 0x0)
5420 				break;
5421 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5422 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5423 				addr);
5424 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5425 				status);
5426 			cayman_vm_decode_fault(rdev, status, addr);
5427 			break;
5428 		case 176: /* CP_INT in ring buffer */
5429 		case 177: /* CP_INT in IB1 */
5430 		case 178: /* CP_INT in IB2 */
5431 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5432 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5433 			break;
5434 		case 181: /* CP EOP event */
5435 			DRM_DEBUG("IH: CP EOP\n");
5436 			if (rdev->family >= CHIP_CAYMAN) {
5437 				switch (src_data) {
5438 				case 0:
5439 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5440 					break;
5441 				case 1:
5442 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5443 					break;
5444 				case 2:
5445 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5446 					break;
5447 				}
5448 			} else
5449 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5450 			break;
5451 		case 224: /* DMA trap event */
5452 			DRM_DEBUG("IH: DMA trap\n");
5453 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5454 			break;
5455 		case 230: /* thermal low to high */
5456 			DRM_DEBUG("IH: thermal low to high\n");
5457 			rdev->pm.dpm.thermal.high_to_low = false;
5458 			queue_thermal = true;
5459 			break;
5460 		case 231: /* thermal high to low */
5461 			DRM_DEBUG("IH: thermal high to low\n");
5462 			rdev->pm.dpm.thermal.high_to_low = true;
5463 			queue_thermal = true;
5464 			break;
5465 		case 233: /* GUI IDLE */
5466 			DRM_DEBUG("IH: GUI idle\n");
5467 			break;
5468 		case 244: /* DMA trap event */
5469 			if (rdev->family >= CHIP_CAYMAN) {
5470 				DRM_DEBUG("IH: DMA1 trap\n");
5471 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5472 			}
5473 			break;
5474 		default:
5475 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5476 			break;
5477 		}
5478 
5479 		/* wptr/rptr are in bytes! */
5480 		rptr += 16;
5481 		rptr &= rdev->ih.ptr_mask;
5482 		WREG32(IH_RB_RPTR, rptr);
5483 	}
5484 	if (queue_dp)
5485 		schedule_work(&rdev->dp_work);
5486 	if (queue_hotplug)
5487 		schedule_delayed_work(&rdev->hotplug_work, 0);
5488 	if (queue_hdmi)
5489 		schedule_work(&rdev->audio_work);
5490 	if (queue_thermal && rdev->pm.dpm_enabled)
5491 		schedule_work(&rdev->pm.dpm.thermal.work);
5492 	rdev->ih.rptr = rptr;
5493 	atomic_set(&rdev->ih.lock, 0);
5494 
5495 	/* make sure wptr hasn't changed while processing */
5496 	wptr = evergreen_get_ih_wptr(rdev);
5497 	if (wptr != rptr)
5498 		goto restart_ih;
5499 
5500 	return IRQ_HANDLED;
5501 }
5502 
5503 static void evergreen_uvd_init(struct radeon_device *rdev)
5504 {
5505 	int r;
5506 
5507 	if (!rdev->has_uvd)
5508 		return;
5509 
5510 	r = radeon_uvd_init(rdev);
5511 	if (r) {
5512 		dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5513 		/*
5514 		 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5515 		 * to early fails uvd_v2_2_resume() and thus nothing happens
5516 		 * there. So it is pointless to try to go through that code
5517 		 * hence why we disable uvd here.
5518 		 */
5519 		rdev->has_uvd = 0;
5520 		return;
5521 	}
5522 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5523 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5524 }
5525 
5526 static void evergreen_uvd_start(struct radeon_device *rdev)
5527 {
5528 	int r;
5529 
5530 	if (!rdev->has_uvd)
5531 		return;
5532 
5533 	r = uvd_v2_2_resume(rdev);
5534 	if (r) {
5535 		dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5536 		goto error;
5537 	}
5538 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5539 	if (r) {
5540 		dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5541 		goto error;
5542 	}
5543 	return;
5544 
5545 error:
5546 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5547 }
5548 
5549 static void evergreen_uvd_resume(struct radeon_device *rdev)
5550 {
5551 	struct radeon_ring *ring;
5552 	int r;
5553 
5554 	if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5555 		return;
5556 
5557 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5558 	r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
5559 	if (r) {
5560 		dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5561 		return;
5562 	}
5563 	r = uvd_v1_0_init(rdev);
5564 	if (r) {
5565 		dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5566 		return;
5567 	}
5568 }
5569 
5570 static int evergreen_startup(struct radeon_device *rdev)
5571 {
5572 	struct radeon_ring *ring;
5573 	int r;
5574 
5575 	/* enable pcie gen2 link */
5576 	evergreen_pcie_gen2_enable(rdev);
5577 	/* enable aspm */
5578 	evergreen_program_aspm(rdev);
5579 
5580 	/* scratch needs to be initialized before MC */
5581 	r = r600_vram_scratch_init(rdev);
5582 	if (r)
5583 		return r;
5584 
5585 	evergreen_mc_program(rdev);
5586 
5587 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5588 		r = ni_mc_load_microcode(rdev);
5589 		if (r) {
5590 			DRM_ERROR("Failed to load MC firmware!\n");
5591 			return r;
5592 		}
5593 	}
5594 
5595 	if (rdev->flags & RADEON_IS_AGP) {
5596 		evergreen_agp_enable(rdev);
5597 	} else {
5598 		r = evergreen_pcie_gart_enable(rdev);
5599 		if (r)
5600 			return r;
5601 	}
5602 	evergreen_gpu_init(rdev);
5603 
5604 	/* allocate rlc buffers */
5605 	if (rdev->flags & RADEON_IS_IGP) {
5606 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5607 		rdev->rlc.reg_list_size =
5608 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5609 		rdev->rlc.cs_data = evergreen_cs_data;
5610 		r = sumo_rlc_init(rdev);
5611 		if (r) {
5612 			DRM_ERROR("Failed to init rlc BOs!\n");
5613 			return r;
5614 		}
5615 	}
5616 
5617 	/* allocate wb buffer */
5618 	r = radeon_wb_init(rdev);
5619 	if (r)
5620 		return r;
5621 
5622 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5623 	if (r) {
5624 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5625 		return r;
5626 	}
5627 
5628 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5629 	if (r) {
5630 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5631 		return r;
5632 	}
5633 
5634 	evergreen_uvd_start(rdev);
5635 
5636 	/* Enable IRQ */
5637 	if (!rdev->irq.installed) {
5638 		r = radeon_irq_kms_init(rdev);
5639 		if (r)
5640 			return r;
5641 	}
5642 
5643 	r = r600_irq_init(rdev);
5644 	if (r) {
5645 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5646 		radeon_irq_kms_fini(rdev);
5647 		return r;
5648 	}
5649 	evergreen_irq_set(rdev);
5650 
5651 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5652 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5653 			     RADEON_CP_PACKET2);
5654 	if (r)
5655 		return r;
5656 
5657 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5658 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5659 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5660 	if (r)
5661 		return r;
5662 
5663 	r = evergreen_cp_load_microcode(rdev);
5664 	if (r)
5665 		return r;
5666 	r = evergreen_cp_resume(rdev);
5667 	if (r)
5668 		return r;
5669 	r = r600_dma_resume(rdev);
5670 	if (r)
5671 		return r;
5672 
5673 	evergreen_uvd_resume(rdev);
5674 
5675 	r = radeon_ib_pool_init(rdev);
5676 	if (r) {
5677 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5678 		return r;
5679 	}
5680 
5681 	r = radeon_audio_init(rdev);
5682 	if (r) {
5683 		DRM_ERROR("radeon: audio init failed\n");
5684 		return r;
5685 	}
5686 
5687 	return 0;
5688 }
5689 
5690 int evergreen_resume(struct radeon_device *rdev)
5691 {
5692 	int r;
5693 
5694 	/* reset the asic, the gfx blocks are often in a bad state
5695 	 * after the driver is unloaded or after a resume
5696 	 */
5697 	if (radeon_asic_reset(rdev))
5698 		dev_warn(rdev->dev, "GPU reset failed !\n");
5699 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5700 	 * posting will perform necessary task to bring back GPU into good
5701 	 * shape.
5702 	 */
5703 	/* post card */
5704 	atom_asic_init(rdev->mode_info.atom_context);
5705 
5706 	/* init golden registers */
5707 	evergreen_init_golden_registers(rdev);
5708 
5709 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5710 		radeon_pm_resume(rdev);
5711 
5712 	rdev->accel_working = true;
5713 	r = evergreen_startup(rdev);
5714 	if (r) {
5715 		DRM_ERROR("evergreen startup failed on resume\n");
5716 		rdev->accel_working = false;
5717 		return r;
5718 	}
5719 
5720 	return r;
5721 
5722 }
5723 
5724 int evergreen_suspend(struct radeon_device *rdev)
5725 {
5726 	radeon_pm_suspend(rdev);
5727 	radeon_audio_fini(rdev);
5728 	if (rdev->has_uvd) {
5729 		uvd_v1_0_fini(rdev);
5730 		radeon_uvd_suspend(rdev);
5731 	}
5732 	r700_cp_stop(rdev);
5733 	r600_dma_stop(rdev);
5734 	evergreen_irq_suspend(rdev);
5735 	radeon_wb_disable(rdev);
5736 	evergreen_pcie_gart_disable(rdev);
5737 
5738 	return 0;
5739 }
5740 
5741 /* Plan is to move initialization in that function and use
5742  * helper function so that radeon_device_init pretty much
5743  * do nothing more than calling asic specific function. This
5744  * should also allow to remove a bunch of callback function
5745  * like vram_info.
5746  */
5747 int evergreen_init(struct radeon_device *rdev)
5748 {
5749 	int r;
5750 
5751 	/* Read BIOS */
5752 	if (!radeon_get_bios(rdev)) {
5753 		if (ASIC_IS_AVIVO(rdev))
5754 			return -EINVAL;
5755 	}
5756 	/* Must be an ATOMBIOS */
5757 	if (!rdev->is_atom_bios) {
5758 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5759 		return -EINVAL;
5760 	}
5761 	r = radeon_atombios_init(rdev);
5762 	if (r)
5763 		return r;
5764 	/* reset the asic, the gfx blocks are often in a bad state
5765 	 * after the driver is unloaded or after a resume
5766 	 */
5767 	if (radeon_asic_reset(rdev))
5768 		dev_warn(rdev->dev, "GPU reset failed !\n");
5769 	/* Post card if necessary */
5770 	if (!radeon_card_posted(rdev)) {
5771 		if (!rdev->bios) {
5772 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5773 			return -EINVAL;
5774 		}
5775 		DRM_INFO("GPU not posted. posting now...\n");
5776 		atom_asic_init(rdev->mode_info.atom_context);
5777 	}
5778 	/* init golden registers */
5779 	evergreen_init_golden_registers(rdev);
5780 	/* Initialize scratch registers */
5781 	r600_scratch_init(rdev);
5782 	/* Initialize surface registers */
5783 	radeon_surface_init(rdev);
5784 	/* Initialize clocks */
5785 	radeon_get_clock_info(rdev->ddev);
5786 	/* Fence driver */
5787 	r = radeon_fence_driver_init(rdev);
5788 	if (r)
5789 		return r;
5790 	/* initialize AGP */
5791 	if (rdev->flags & RADEON_IS_AGP) {
5792 		r = radeon_agp_init(rdev);
5793 		if (r)
5794 			radeon_agp_disable(rdev);
5795 	}
5796 	/* initialize memory controller */
5797 	r = evergreen_mc_init(rdev);
5798 	if (r)
5799 		return r;
5800 	/* Memory manager */
5801 	r = radeon_bo_init(rdev);
5802 	if (r)
5803 		return r;
5804 
5805 	if (ASIC_IS_DCE5(rdev)) {
5806 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5807 			r = ni_init_microcode(rdev);
5808 			if (r) {
5809 				DRM_ERROR("Failed to load firmware!\n");
5810 				return r;
5811 			}
5812 		}
5813 	} else {
5814 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5815 			r = r600_init_microcode(rdev);
5816 			if (r) {
5817 				DRM_ERROR("Failed to load firmware!\n");
5818 				return r;
5819 			}
5820 		}
5821 	}
5822 
5823 	/* Initialize power management */
5824 	radeon_pm_init(rdev);
5825 
5826 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5827 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5828 
5829 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5830 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5831 
5832 	evergreen_uvd_init(rdev);
5833 
5834 	rdev->ih.ring_obj = NULL;
5835 	r600_ih_ring_init(rdev, 64 * 1024);
5836 
5837 	r = r600_pcie_gart_init(rdev);
5838 	if (r)
5839 		return r;
5840 
5841 #ifdef __DragonFly__
5842 	/*
5843 	   There are unresolved crashes on evergreen hardware,
5844 	   tell userland acceleration is not working properly
5845 	   Bug report: https://bugs.dragonflybsd.org/issues/3198
5846 	*/
5847 	rdev->accel_working = false;
5848 	DRM_ERROR("GPU acceleration disabled for now on DragonFly\n");
5849 #else
5850 	rdev->accel_working = true;
5851 #endif
5852 	r = evergreen_startup(rdev);
5853 	if (r) {
5854 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5855 		r700_cp_fini(rdev);
5856 		r600_dma_fini(rdev);
5857 		r600_irq_fini(rdev);
5858 		if (rdev->flags & RADEON_IS_IGP)
5859 			sumo_rlc_fini(rdev);
5860 		radeon_wb_fini(rdev);
5861 		radeon_ib_pool_fini(rdev);
5862 		radeon_irq_kms_fini(rdev);
5863 		evergreen_pcie_gart_fini(rdev);
5864 		rdev->accel_working = false;
5865 	}
5866 
5867 	/* Don't start up if the MC ucode is missing on BTC parts.
5868 	 * The default clocks and voltages before the MC ucode
5869 	 * is loaded are not suffient for advanced operations.
5870 	 */
5871 	if (ASIC_IS_DCE5(rdev)) {
5872 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5873 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5874 			return -EINVAL;
5875 		}
5876 	}
5877 
5878 	return 0;
5879 }
5880 
5881 void evergreen_fini(struct radeon_device *rdev)
5882 {
5883 	radeon_pm_fini(rdev);
5884 	radeon_audio_fini(rdev);
5885 	r700_cp_fini(rdev);
5886 	r600_dma_fini(rdev);
5887 	r600_irq_fini(rdev);
5888 	if (rdev->flags & RADEON_IS_IGP)
5889 		sumo_rlc_fini(rdev);
5890 	radeon_wb_fini(rdev);
5891 	radeon_ib_pool_fini(rdev);
5892 	radeon_irq_kms_fini(rdev);
5893 	uvd_v1_0_fini(rdev);
5894 	radeon_uvd_fini(rdev);
5895 	evergreen_pcie_gart_fini(rdev);
5896 	r600_vram_scratch_fini(rdev);
5897 	radeon_gem_fini(rdev);
5898 	radeon_fence_driver_fini(rdev);
5899 	radeon_agp_fini(rdev);
5900 	radeon_bo_fini(rdev);
5901 	radeon_atombios_fini(rdev);
5902 	if (ASIC_IS_DCE5(rdev))
5903 		ni_fini_microcode(rdev);
5904 	else
5905 		r600_fini_microcode(rdev);
5906 	kfree(rdev->bios);
5907 	rdev->bios = NULL;
5908 }
5909 
5910 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5911 {
5912 	u32 link_width_cntl, speed_cntl;
5913 	u32 mask;
5914 
5915 	if (radeon_pcie_gen2 == 0)
5916 		return;
5917 
5918 	if (rdev->flags & RADEON_IS_IGP)
5919 		return;
5920 
5921 	if (!(rdev->flags & RADEON_IS_PCIE))
5922 		return;
5923 
5924 	/* x2 cards have a special sequence */
5925 	if (ASIC_IS_X2(rdev))
5926 		return;
5927 
5928 #ifdef __DragonFly__
5929 	if (drm_pcie_get_speed_cap_mask(rdev->ddev, &mask) != 0)
5930 		return;
5931 #endif
5932 
5933 	if (!(mask & DRM_PCIE_SPEED_50))
5934 		return;
5935 
5936 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5937 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5938 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5939 		return;
5940 	}
5941 
5942 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5943 
5944 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5945 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5946 
5947 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5948 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5949 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5950 
5951 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5952 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5953 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5954 
5955 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5956 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5957 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5958 
5959 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5960 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5961 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5962 
5963 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5964 		speed_cntl |= LC_GEN2_EN_STRAP;
5965 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5966 
5967 	} else {
5968 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5969 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5970 		if (1)
5971 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5972 		else
5973 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5974 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5975 	}
5976 }
5977 
5978 void evergreen_program_aspm(struct radeon_device *rdev)
5979 {
5980 	u32 data, orig;
5981 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5982 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5983 	/* fusion_platform = true
5984 	 * if the system is a fusion system
5985 	 * (APU or DGPU in a fusion system).
5986 	 * todo: check if the system is a fusion platform.
5987 	 */
5988 	bool fusion_platform = false;
5989 
5990 	if (radeon_aspm == 0)
5991 		return;
5992 
5993 	if (!(rdev->flags & RADEON_IS_PCIE))
5994 		return;
5995 
5996 	switch (rdev->family) {
5997 	case CHIP_CYPRESS:
5998 	case CHIP_HEMLOCK:
5999 	case CHIP_JUNIPER:
6000 	case CHIP_REDWOOD:
6001 	case CHIP_CEDAR:
6002 	case CHIP_SUMO:
6003 	case CHIP_SUMO2:
6004 	case CHIP_PALM:
6005 	case CHIP_ARUBA:
6006 		disable_l0s = true;
6007 		break;
6008 	default:
6009 		disable_l0s = false;
6010 		break;
6011 	}
6012 
6013 	if (rdev->flags & RADEON_IS_IGP)
6014 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
6015 
6016 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6017 	if (fusion_platform)
6018 		data &= ~MULTI_PIF;
6019 	else
6020 		data |= MULTI_PIF;
6021 	if (data != orig)
6022 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6023 
6024 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6025 	if (fusion_platform)
6026 		data &= ~MULTI_PIF;
6027 	else
6028 		data |= MULTI_PIF;
6029 	if (data != orig)
6030 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6031 
6032 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6033 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6034 	if (!disable_l0s) {
6035 		if (rdev->family >= CHIP_BARTS)
6036 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6037 		else
6038 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6039 	}
6040 
6041 	if (!disable_l1) {
6042 		if (rdev->family >= CHIP_BARTS)
6043 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6044 		else
6045 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6046 
6047 		if (!disable_plloff_in_l1) {
6048 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6049 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6050 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6051 			if (data != orig)
6052 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6053 
6054 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6055 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6056 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6057 			if (data != orig)
6058 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6059 
6060 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6061 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6062 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6063 			if (data != orig)
6064 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6065 
6066 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6067 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6068 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6069 			if (data != orig)
6070 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6071 
6072 			if (rdev->family >= CHIP_BARTS) {
6073 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6074 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6075 				data |= PLL_RAMP_UP_TIME_0(4);
6076 				if (data != orig)
6077 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6078 
6079 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6080 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6081 				data |= PLL_RAMP_UP_TIME_1(4);
6082 				if (data != orig)
6083 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6084 
6085 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6086 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6087 				data |= PLL_RAMP_UP_TIME_0(4);
6088 				if (data != orig)
6089 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6090 
6091 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6092 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6093 				data |= PLL_RAMP_UP_TIME_1(4);
6094 				if (data != orig)
6095 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6096 			}
6097 
6098 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6099 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6100 			data |= LC_DYN_LANES_PWR_STATE(3);
6101 			if (data != orig)
6102 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6103 
6104 			if (rdev->family >= CHIP_BARTS) {
6105 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6106 				data &= ~LS2_EXIT_TIME_MASK;
6107 				data |= LS2_EXIT_TIME(1);
6108 				if (data != orig)
6109 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6110 
6111 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6112 				data &= ~LS2_EXIT_TIME_MASK;
6113 				data |= LS2_EXIT_TIME(1);
6114 				if (data != orig)
6115 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6116 			}
6117 		}
6118 	}
6119 
6120 	/* evergreen parts only */
6121 	if (rdev->family < CHIP_BARTS)
6122 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6123 
6124 	if (pcie_lc_cntl != pcie_lc_cntl_old)
6125 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6126 }
6127