xref: /dragonfly/sys/dev/drm/radeon/evergreen.c (revision d8d5b238)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <drm/drmP.h>
26 #include "radeon.h"
27 #include "radeon_asic.h"
28 #include "radeon_audio.h"
29 #include <drm/radeon_drm.h>
30 #include "evergreend.h"
31 #include "atom.h"
32 #include "avivod.h"
33 #include "evergreen_reg.h"
34 #include "evergreen_blit_shaders.h"
35 #include "radeon_ucode.h"
36 
37 /*
38  * Indirect registers accessor
39  */
40 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
41 {
42 	u32 r;
43 
44 	spin_lock(&rdev->cg_idx_lock);
45 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
46 	r = RREG32(EVERGREEN_CG_IND_DATA);
47 	spin_unlock(&rdev->cg_idx_lock);
48 	return r;
49 }
50 
51 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
52 {
53 	spin_lock(&rdev->cg_idx_lock);
54 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
55 	WREG32(EVERGREEN_CG_IND_DATA, (v));
56 	spin_unlock(&rdev->cg_idx_lock);
57 }
58 
59 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
60 {
61 	u32 r;
62 
63 	spin_lock(&rdev->pif_idx_lock);
64 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
65 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
66 	spin_unlock(&rdev->pif_idx_lock);
67 	return r;
68 }
69 
70 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
71 {
72 	spin_lock(&rdev->pif_idx_lock);
73 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
74 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
75 	spin_unlock(&rdev->pif_idx_lock);
76 }
77 
78 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
79 {
80 	u32 r;
81 
82 	spin_lock(&rdev->pif_idx_lock);
83 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
84 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
85 	spin_unlock(&rdev->pif_idx_lock);
86 	return r;
87 }
88 
89 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
90 {
91 	spin_lock(&rdev->pif_idx_lock);
92 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
93 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
94 	spin_unlock(&rdev->pif_idx_lock);
95 }
96 
97 static const u32 crtc_offsets[6] =
98 {
99 	EVERGREEN_CRTC0_REGISTER_OFFSET,
100 	EVERGREEN_CRTC1_REGISTER_OFFSET,
101 	EVERGREEN_CRTC2_REGISTER_OFFSET,
102 	EVERGREEN_CRTC3_REGISTER_OFFSET,
103 	EVERGREEN_CRTC4_REGISTER_OFFSET,
104 	EVERGREEN_CRTC5_REGISTER_OFFSET
105 };
106 
107 #include "clearstate_evergreen.h"
108 
109 static const u32 sumo_rlc_save_restore_register_list[] =
110 {
111 	0x98fc,
112 	0x9830,
113 	0x9834,
114 	0x9838,
115 	0x9870,
116 	0x9874,
117 	0x8a14,
118 	0x8b24,
119 	0x8bcc,
120 	0x8b10,
121 	0x8d00,
122 	0x8d04,
123 	0x8c00,
124 	0x8c04,
125 	0x8c08,
126 	0x8c0c,
127 	0x8d8c,
128 	0x8c20,
129 	0x8c24,
130 	0x8c28,
131 	0x8c18,
132 	0x8c1c,
133 	0x8cf0,
134 	0x8e2c,
135 	0x8e38,
136 	0x8c30,
137 	0x9508,
138 	0x9688,
139 	0x9608,
140 	0x960c,
141 	0x9610,
142 	0x9614,
143 	0x88c4,
144 	0x88d4,
145 	0xa008,
146 	0x900c,
147 	0x9100,
148 	0x913c,
149 	0x98f8,
150 	0x98f4,
151 	0x9b7c,
152 	0x3f8c,
153 	0x8950,
154 	0x8954,
155 	0x8a18,
156 	0x8b28,
157 	0x9144,
158 	0x9148,
159 	0x914c,
160 	0x3f90,
161 	0x3f94,
162 	0x915c,
163 	0x9160,
164 	0x9178,
165 	0x917c,
166 	0x9180,
167 	0x918c,
168 	0x9190,
169 	0x9194,
170 	0x9198,
171 	0x919c,
172 	0x91a8,
173 	0x91ac,
174 	0x91b0,
175 	0x91b4,
176 	0x91b8,
177 	0x91c4,
178 	0x91c8,
179 	0x91cc,
180 	0x91d0,
181 	0x91d4,
182 	0x91e0,
183 	0x91e4,
184 	0x91ec,
185 	0x91f0,
186 	0x91f4,
187 	0x9200,
188 	0x9204,
189 	0x929c,
190 	0x9150,
191 	0x802c,
192 };
193 
194 static void evergreen_gpu_init(struct radeon_device *rdev);
195 
196 static const u32 evergreen_golden_registers[] =
197 {
198 	0x3f90, 0xffff0000, 0xff000000,
199 	0x9148, 0xffff0000, 0xff000000,
200 	0x3f94, 0xffff0000, 0xff000000,
201 	0x914c, 0xffff0000, 0xff000000,
202 	0x9b7c, 0xffffffff, 0x00000000,
203 	0x8a14, 0xffffffff, 0x00000007,
204 	0x8b10, 0xffffffff, 0x00000000,
205 	0x960c, 0xffffffff, 0x54763210,
206 	0x88c4, 0xffffffff, 0x000000c2,
207 	0x88d4, 0xffffffff, 0x00000010,
208 	0x8974, 0xffffffff, 0x00000000,
209 	0xc78, 0x00000080, 0x00000080,
210 	0x5eb4, 0xffffffff, 0x00000002,
211 	0x5e78, 0xffffffff, 0x001000f0,
212 	0x6104, 0x01000300, 0x00000000,
213 	0x5bc0, 0x00300000, 0x00000000,
214 	0x7030, 0xffffffff, 0x00000011,
215 	0x7c30, 0xffffffff, 0x00000011,
216 	0x10830, 0xffffffff, 0x00000011,
217 	0x11430, 0xffffffff, 0x00000011,
218 	0x12030, 0xffffffff, 0x00000011,
219 	0x12c30, 0xffffffff, 0x00000011,
220 	0xd02c, 0xffffffff, 0x08421000,
221 	0x240c, 0xffffffff, 0x00000380,
222 	0x8b24, 0xffffffff, 0x00ff0fff,
223 	0x28a4c, 0x06000000, 0x06000000,
224 	0x10c, 0x00000001, 0x00000001,
225 	0x8d00, 0xffffffff, 0x100e4848,
226 	0x8d04, 0xffffffff, 0x00164745,
227 	0x8c00, 0xffffffff, 0xe4000003,
228 	0x8c04, 0xffffffff, 0x40600060,
229 	0x8c08, 0xffffffff, 0x001c001c,
230 	0x8cf0, 0xffffffff, 0x08e00620,
231 	0x8c20, 0xffffffff, 0x00800080,
232 	0x8c24, 0xffffffff, 0x00800080,
233 	0x8c18, 0xffffffff, 0x20202078,
234 	0x8c1c, 0xffffffff, 0x00001010,
235 	0x28350, 0xffffffff, 0x00000000,
236 	0xa008, 0xffffffff, 0x00010000,
237 	0x5c4, 0xffffffff, 0x00000001,
238 	0x9508, 0xffffffff, 0x00000002,
239 	0x913c, 0x0000000f, 0x0000000a
240 };
241 
242 static const u32 evergreen_golden_registers2[] =
243 {
244 	0x2f4c, 0xffffffff, 0x00000000,
245 	0x54f4, 0xffffffff, 0x00000000,
246 	0x54f0, 0xffffffff, 0x00000000,
247 	0x5498, 0xffffffff, 0x00000000,
248 	0x549c, 0xffffffff, 0x00000000,
249 	0x5494, 0xffffffff, 0x00000000,
250 	0x53cc, 0xffffffff, 0x00000000,
251 	0x53c8, 0xffffffff, 0x00000000,
252 	0x53c4, 0xffffffff, 0x00000000,
253 	0x53c0, 0xffffffff, 0x00000000,
254 	0x53bc, 0xffffffff, 0x00000000,
255 	0x53b8, 0xffffffff, 0x00000000,
256 	0x53b4, 0xffffffff, 0x00000000,
257 	0x53b0, 0xffffffff, 0x00000000
258 };
259 
260 static const u32 cypress_mgcg_init[] =
261 {
262 	0x802c, 0xffffffff, 0xc0000000,
263 	0x5448, 0xffffffff, 0x00000100,
264 	0x55e4, 0xffffffff, 0x00000100,
265 	0x160c, 0xffffffff, 0x00000100,
266 	0x5644, 0xffffffff, 0x00000100,
267 	0xc164, 0xffffffff, 0x00000100,
268 	0x8a18, 0xffffffff, 0x00000100,
269 	0x897c, 0xffffffff, 0x06000100,
270 	0x8b28, 0xffffffff, 0x00000100,
271 	0x9144, 0xffffffff, 0x00000100,
272 	0x9a60, 0xffffffff, 0x00000100,
273 	0x9868, 0xffffffff, 0x00000100,
274 	0x8d58, 0xffffffff, 0x00000100,
275 	0x9510, 0xffffffff, 0x00000100,
276 	0x949c, 0xffffffff, 0x00000100,
277 	0x9654, 0xffffffff, 0x00000100,
278 	0x9030, 0xffffffff, 0x00000100,
279 	0x9034, 0xffffffff, 0x00000100,
280 	0x9038, 0xffffffff, 0x00000100,
281 	0x903c, 0xffffffff, 0x00000100,
282 	0x9040, 0xffffffff, 0x00000100,
283 	0xa200, 0xffffffff, 0x00000100,
284 	0xa204, 0xffffffff, 0x00000100,
285 	0xa208, 0xffffffff, 0x00000100,
286 	0xa20c, 0xffffffff, 0x00000100,
287 	0x971c, 0xffffffff, 0x00000100,
288 	0x977c, 0xffffffff, 0x00000100,
289 	0x3f80, 0xffffffff, 0x00000100,
290 	0xa210, 0xffffffff, 0x00000100,
291 	0xa214, 0xffffffff, 0x00000100,
292 	0x4d8, 0xffffffff, 0x00000100,
293 	0x9784, 0xffffffff, 0x00000100,
294 	0x9698, 0xffffffff, 0x00000100,
295 	0x4d4, 0xffffffff, 0x00000200,
296 	0x30cc, 0xffffffff, 0x00000100,
297 	0xd0c0, 0xffffffff, 0xff000100,
298 	0x802c, 0xffffffff, 0x40000000,
299 	0x915c, 0xffffffff, 0x00010000,
300 	0x9160, 0xffffffff, 0x00030002,
301 	0x9178, 0xffffffff, 0x00070000,
302 	0x917c, 0xffffffff, 0x00030002,
303 	0x9180, 0xffffffff, 0x00050004,
304 	0x918c, 0xffffffff, 0x00010006,
305 	0x9190, 0xffffffff, 0x00090008,
306 	0x9194, 0xffffffff, 0x00070000,
307 	0x9198, 0xffffffff, 0x00030002,
308 	0x919c, 0xffffffff, 0x00050004,
309 	0x91a8, 0xffffffff, 0x00010006,
310 	0x91ac, 0xffffffff, 0x00090008,
311 	0x91b0, 0xffffffff, 0x00070000,
312 	0x91b4, 0xffffffff, 0x00030002,
313 	0x91b8, 0xffffffff, 0x00050004,
314 	0x91c4, 0xffffffff, 0x00010006,
315 	0x91c8, 0xffffffff, 0x00090008,
316 	0x91cc, 0xffffffff, 0x00070000,
317 	0x91d0, 0xffffffff, 0x00030002,
318 	0x91d4, 0xffffffff, 0x00050004,
319 	0x91e0, 0xffffffff, 0x00010006,
320 	0x91e4, 0xffffffff, 0x00090008,
321 	0x91e8, 0xffffffff, 0x00000000,
322 	0x91ec, 0xffffffff, 0x00070000,
323 	0x91f0, 0xffffffff, 0x00030002,
324 	0x91f4, 0xffffffff, 0x00050004,
325 	0x9200, 0xffffffff, 0x00010006,
326 	0x9204, 0xffffffff, 0x00090008,
327 	0x9208, 0xffffffff, 0x00070000,
328 	0x920c, 0xffffffff, 0x00030002,
329 	0x9210, 0xffffffff, 0x00050004,
330 	0x921c, 0xffffffff, 0x00010006,
331 	0x9220, 0xffffffff, 0x00090008,
332 	0x9224, 0xffffffff, 0x00070000,
333 	0x9228, 0xffffffff, 0x00030002,
334 	0x922c, 0xffffffff, 0x00050004,
335 	0x9238, 0xffffffff, 0x00010006,
336 	0x923c, 0xffffffff, 0x00090008,
337 	0x9240, 0xffffffff, 0x00070000,
338 	0x9244, 0xffffffff, 0x00030002,
339 	0x9248, 0xffffffff, 0x00050004,
340 	0x9254, 0xffffffff, 0x00010006,
341 	0x9258, 0xffffffff, 0x00090008,
342 	0x925c, 0xffffffff, 0x00070000,
343 	0x9260, 0xffffffff, 0x00030002,
344 	0x9264, 0xffffffff, 0x00050004,
345 	0x9270, 0xffffffff, 0x00010006,
346 	0x9274, 0xffffffff, 0x00090008,
347 	0x9278, 0xffffffff, 0x00070000,
348 	0x927c, 0xffffffff, 0x00030002,
349 	0x9280, 0xffffffff, 0x00050004,
350 	0x928c, 0xffffffff, 0x00010006,
351 	0x9290, 0xffffffff, 0x00090008,
352 	0x9294, 0xffffffff, 0x00000000,
353 	0x929c, 0xffffffff, 0x00000001,
354 	0x802c, 0xffffffff, 0x40010000,
355 	0x915c, 0xffffffff, 0x00010000,
356 	0x9160, 0xffffffff, 0x00030002,
357 	0x9178, 0xffffffff, 0x00070000,
358 	0x917c, 0xffffffff, 0x00030002,
359 	0x9180, 0xffffffff, 0x00050004,
360 	0x918c, 0xffffffff, 0x00010006,
361 	0x9190, 0xffffffff, 0x00090008,
362 	0x9194, 0xffffffff, 0x00070000,
363 	0x9198, 0xffffffff, 0x00030002,
364 	0x919c, 0xffffffff, 0x00050004,
365 	0x91a8, 0xffffffff, 0x00010006,
366 	0x91ac, 0xffffffff, 0x00090008,
367 	0x91b0, 0xffffffff, 0x00070000,
368 	0x91b4, 0xffffffff, 0x00030002,
369 	0x91b8, 0xffffffff, 0x00050004,
370 	0x91c4, 0xffffffff, 0x00010006,
371 	0x91c8, 0xffffffff, 0x00090008,
372 	0x91cc, 0xffffffff, 0x00070000,
373 	0x91d0, 0xffffffff, 0x00030002,
374 	0x91d4, 0xffffffff, 0x00050004,
375 	0x91e0, 0xffffffff, 0x00010006,
376 	0x91e4, 0xffffffff, 0x00090008,
377 	0x91e8, 0xffffffff, 0x00000000,
378 	0x91ec, 0xffffffff, 0x00070000,
379 	0x91f0, 0xffffffff, 0x00030002,
380 	0x91f4, 0xffffffff, 0x00050004,
381 	0x9200, 0xffffffff, 0x00010006,
382 	0x9204, 0xffffffff, 0x00090008,
383 	0x9208, 0xffffffff, 0x00070000,
384 	0x920c, 0xffffffff, 0x00030002,
385 	0x9210, 0xffffffff, 0x00050004,
386 	0x921c, 0xffffffff, 0x00010006,
387 	0x9220, 0xffffffff, 0x00090008,
388 	0x9224, 0xffffffff, 0x00070000,
389 	0x9228, 0xffffffff, 0x00030002,
390 	0x922c, 0xffffffff, 0x00050004,
391 	0x9238, 0xffffffff, 0x00010006,
392 	0x923c, 0xffffffff, 0x00090008,
393 	0x9240, 0xffffffff, 0x00070000,
394 	0x9244, 0xffffffff, 0x00030002,
395 	0x9248, 0xffffffff, 0x00050004,
396 	0x9254, 0xffffffff, 0x00010006,
397 	0x9258, 0xffffffff, 0x00090008,
398 	0x925c, 0xffffffff, 0x00070000,
399 	0x9260, 0xffffffff, 0x00030002,
400 	0x9264, 0xffffffff, 0x00050004,
401 	0x9270, 0xffffffff, 0x00010006,
402 	0x9274, 0xffffffff, 0x00090008,
403 	0x9278, 0xffffffff, 0x00070000,
404 	0x927c, 0xffffffff, 0x00030002,
405 	0x9280, 0xffffffff, 0x00050004,
406 	0x928c, 0xffffffff, 0x00010006,
407 	0x9290, 0xffffffff, 0x00090008,
408 	0x9294, 0xffffffff, 0x00000000,
409 	0x929c, 0xffffffff, 0x00000001,
410 	0x802c, 0xffffffff, 0xc0000000
411 };
412 
413 static const u32 redwood_mgcg_init[] =
414 {
415 	0x802c, 0xffffffff, 0xc0000000,
416 	0x5448, 0xffffffff, 0x00000100,
417 	0x55e4, 0xffffffff, 0x00000100,
418 	0x160c, 0xffffffff, 0x00000100,
419 	0x5644, 0xffffffff, 0x00000100,
420 	0xc164, 0xffffffff, 0x00000100,
421 	0x8a18, 0xffffffff, 0x00000100,
422 	0x897c, 0xffffffff, 0x06000100,
423 	0x8b28, 0xffffffff, 0x00000100,
424 	0x9144, 0xffffffff, 0x00000100,
425 	0x9a60, 0xffffffff, 0x00000100,
426 	0x9868, 0xffffffff, 0x00000100,
427 	0x8d58, 0xffffffff, 0x00000100,
428 	0x9510, 0xffffffff, 0x00000100,
429 	0x949c, 0xffffffff, 0x00000100,
430 	0x9654, 0xffffffff, 0x00000100,
431 	0x9030, 0xffffffff, 0x00000100,
432 	0x9034, 0xffffffff, 0x00000100,
433 	0x9038, 0xffffffff, 0x00000100,
434 	0x903c, 0xffffffff, 0x00000100,
435 	0x9040, 0xffffffff, 0x00000100,
436 	0xa200, 0xffffffff, 0x00000100,
437 	0xa204, 0xffffffff, 0x00000100,
438 	0xa208, 0xffffffff, 0x00000100,
439 	0xa20c, 0xffffffff, 0x00000100,
440 	0x971c, 0xffffffff, 0x00000100,
441 	0x977c, 0xffffffff, 0x00000100,
442 	0x3f80, 0xffffffff, 0x00000100,
443 	0xa210, 0xffffffff, 0x00000100,
444 	0xa214, 0xffffffff, 0x00000100,
445 	0x4d8, 0xffffffff, 0x00000100,
446 	0x9784, 0xffffffff, 0x00000100,
447 	0x9698, 0xffffffff, 0x00000100,
448 	0x4d4, 0xffffffff, 0x00000200,
449 	0x30cc, 0xffffffff, 0x00000100,
450 	0xd0c0, 0xffffffff, 0xff000100,
451 	0x802c, 0xffffffff, 0x40000000,
452 	0x915c, 0xffffffff, 0x00010000,
453 	0x9160, 0xffffffff, 0x00030002,
454 	0x9178, 0xffffffff, 0x00070000,
455 	0x917c, 0xffffffff, 0x00030002,
456 	0x9180, 0xffffffff, 0x00050004,
457 	0x918c, 0xffffffff, 0x00010006,
458 	0x9190, 0xffffffff, 0x00090008,
459 	0x9194, 0xffffffff, 0x00070000,
460 	0x9198, 0xffffffff, 0x00030002,
461 	0x919c, 0xffffffff, 0x00050004,
462 	0x91a8, 0xffffffff, 0x00010006,
463 	0x91ac, 0xffffffff, 0x00090008,
464 	0x91b0, 0xffffffff, 0x00070000,
465 	0x91b4, 0xffffffff, 0x00030002,
466 	0x91b8, 0xffffffff, 0x00050004,
467 	0x91c4, 0xffffffff, 0x00010006,
468 	0x91c8, 0xffffffff, 0x00090008,
469 	0x91cc, 0xffffffff, 0x00070000,
470 	0x91d0, 0xffffffff, 0x00030002,
471 	0x91d4, 0xffffffff, 0x00050004,
472 	0x91e0, 0xffffffff, 0x00010006,
473 	0x91e4, 0xffffffff, 0x00090008,
474 	0x91e8, 0xffffffff, 0x00000000,
475 	0x91ec, 0xffffffff, 0x00070000,
476 	0x91f0, 0xffffffff, 0x00030002,
477 	0x91f4, 0xffffffff, 0x00050004,
478 	0x9200, 0xffffffff, 0x00010006,
479 	0x9204, 0xffffffff, 0x00090008,
480 	0x9294, 0xffffffff, 0x00000000,
481 	0x929c, 0xffffffff, 0x00000001,
482 	0x802c, 0xffffffff, 0xc0000000
483 };
484 
485 static const u32 cedar_golden_registers[] =
486 {
487 	0x3f90, 0xffff0000, 0xff000000,
488 	0x9148, 0xffff0000, 0xff000000,
489 	0x3f94, 0xffff0000, 0xff000000,
490 	0x914c, 0xffff0000, 0xff000000,
491 	0x9b7c, 0xffffffff, 0x00000000,
492 	0x8a14, 0xffffffff, 0x00000007,
493 	0x8b10, 0xffffffff, 0x00000000,
494 	0x960c, 0xffffffff, 0x54763210,
495 	0x88c4, 0xffffffff, 0x000000c2,
496 	0x88d4, 0xffffffff, 0x00000000,
497 	0x8974, 0xffffffff, 0x00000000,
498 	0xc78, 0x00000080, 0x00000080,
499 	0x5eb4, 0xffffffff, 0x00000002,
500 	0x5e78, 0xffffffff, 0x001000f0,
501 	0x6104, 0x01000300, 0x00000000,
502 	0x5bc0, 0x00300000, 0x00000000,
503 	0x7030, 0xffffffff, 0x00000011,
504 	0x7c30, 0xffffffff, 0x00000011,
505 	0x10830, 0xffffffff, 0x00000011,
506 	0x11430, 0xffffffff, 0x00000011,
507 	0xd02c, 0xffffffff, 0x08421000,
508 	0x240c, 0xffffffff, 0x00000380,
509 	0x8b24, 0xffffffff, 0x00ff0fff,
510 	0x28a4c, 0x06000000, 0x06000000,
511 	0x10c, 0x00000001, 0x00000001,
512 	0x8d00, 0xffffffff, 0x100e4848,
513 	0x8d04, 0xffffffff, 0x00164745,
514 	0x8c00, 0xffffffff, 0xe4000003,
515 	0x8c04, 0xffffffff, 0x40600060,
516 	0x8c08, 0xffffffff, 0x001c001c,
517 	0x8cf0, 0xffffffff, 0x08e00410,
518 	0x8c20, 0xffffffff, 0x00800080,
519 	0x8c24, 0xffffffff, 0x00800080,
520 	0x8c18, 0xffffffff, 0x20202078,
521 	0x8c1c, 0xffffffff, 0x00001010,
522 	0x28350, 0xffffffff, 0x00000000,
523 	0xa008, 0xffffffff, 0x00010000,
524 	0x5c4, 0xffffffff, 0x00000001,
525 	0x9508, 0xffffffff, 0x00000002
526 };
527 
528 static const u32 cedar_mgcg_init[] =
529 {
530 	0x802c, 0xffffffff, 0xc0000000,
531 	0x5448, 0xffffffff, 0x00000100,
532 	0x55e4, 0xffffffff, 0x00000100,
533 	0x160c, 0xffffffff, 0x00000100,
534 	0x5644, 0xffffffff, 0x00000100,
535 	0xc164, 0xffffffff, 0x00000100,
536 	0x8a18, 0xffffffff, 0x00000100,
537 	0x897c, 0xffffffff, 0x06000100,
538 	0x8b28, 0xffffffff, 0x00000100,
539 	0x9144, 0xffffffff, 0x00000100,
540 	0x9a60, 0xffffffff, 0x00000100,
541 	0x9868, 0xffffffff, 0x00000100,
542 	0x8d58, 0xffffffff, 0x00000100,
543 	0x9510, 0xffffffff, 0x00000100,
544 	0x949c, 0xffffffff, 0x00000100,
545 	0x9654, 0xffffffff, 0x00000100,
546 	0x9030, 0xffffffff, 0x00000100,
547 	0x9034, 0xffffffff, 0x00000100,
548 	0x9038, 0xffffffff, 0x00000100,
549 	0x903c, 0xffffffff, 0x00000100,
550 	0x9040, 0xffffffff, 0x00000100,
551 	0xa200, 0xffffffff, 0x00000100,
552 	0xa204, 0xffffffff, 0x00000100,
553 	0xa208, 0xffffffff, 0x00000100,
554 	0xa20c, 0xffffffff, 0x00000100,
555 	0x971c, 0xffffffff, 0x00000100,
556 	0x977c, 0xffffffff, 0x00000100,
557 	0x3f80, 0xffffffff, 0x00000100,
558 	0xa210, 0xffffffff, 0x00000100,
559 	0xa214, 0xffffffff, 0x00000100,
560 	0x4d8, 0xffffffff, 0x00000100,
561 	0x9784, 0xffffffff, 0x00000100,
562 	0x9698, 0xffffffff, 0x00000100,
563 	0x4d4, 0xffffffff, 0x00000200,
564 	0x30cc, 0xffffffff, 0x00000100,
565 	0xd0c0, 0xffffffff, 0xff000100,
566 	0x802c, 0xffffffff, 0x40000000,
567 	0x915c, 0xffffffff, 0x00010000,
568 	0x9178, 0xffffffff, 0x00050000,
569 	0x917c, 0xffffffff, 0x00030002,
570 	0x918c, 0xffffffff, 0x00010004,
571 	0x9190, 0xffffffff, 0x00070006,
572 	0x9194, 0xffffffff, 0x00050000,
573 	0x9198, 0xffffffff, 0x00030002,
574 	0x91a8, 0xffffffff, 0x00010004,
575 	0x91ac, 0xffffffff, 0x00070006,
576 	0x91e8, 0xffffffff, 0x00000000,
577 	0x9294, 0xffffffff, 0x00000000,
578 	0x929c, 0xffffffff, 0x00000001,
579 	0x802c, 0xffffffff, 0xc0000000
580 };
581 
582 static const u32 juniper_mgcg_init[] =
583 {
584 	0x802c, 0xffffffff, 0xc0000000,
585 	0x5448, 0xffffffff, 0x00000100,
586 	0x55e4, 0xffffffff, 0x00000100,
587 	0x160c, 0xffffffff, 0x00000100,
588 	0x5644, 0xffffffff, 0x00000100,
589 	0xc164, 0xffffffff, 0x00000100,
590 	0x8a18, 0xffffffff, 0x00000100,
591 	0x897c, 0xffffffff, 0x06000100,
592 	0x8b28, 0xffffffff, 0x00000100,
593 	0x9144, 0xffffffff, 0x00000100,
594 	0x9a60, 0xffffffff, 0x00000100,
595 	0x9868, 0xffffffff, 0x00000100,
596 	0x8d58, 0xffffffff, 0x00000100,
597 	0x9510, 0xffffffff, 0x00000100,
598 	0x949c, 0xffffffff, 0x00000100,
599 	0x9654, 0xffffffff, 0x00000100,
600 	0x9030, 0xffffffff, 0x00000100,
601 	0x9034, 0xffffffff, 0x00000100,
602 	0x9038, 0xffffffff, 0x00000100,
603 	0x903c, 0xffffffff, 0x00000100,
604 	0x9040, 0xffffffff, 0x00000100,
605 	0xa200, 0xffffffff, 0x00000100,
606 	0xa204, 0xffffffff, 0x00000100,
607 	0xa208, 0xffffffff, 0x00000100,
608 	0xa20c, 0xffffffff, 0x00000100,
609 	0x971c, 0xffffffff, 0x00000100,
610 	0xd0c0, 0xffffffff, 0xff000100,
611 	0x802c, 0xffffffff, 0x40000000,
612 	0x915c, 0xffffffff, 0x00010000,
613 	0x9160, 0xffffffff, 0x00030002,
614 	0x9178, 0xffffffff, 0x00070000,
615 	0x917c, 0xffffffff, 0x00030002,
616 	0x9180, 0xffffffff, 0x00050004,
617 	0x918c, 0xffffffff, 0x00010006,
618 	0x9190, 0xffffffff, 0x00090008,
619 	0x9194, 0xffffffff, 0x00070000,
620 	0x9198, 0xffffffff, 0x00030002,
621 	0x919c, 0xffffffff, 0x00050004,
622 	0x91a8, 0xffffffff, 0x00010006,
623 	0x91ac, 0xffffffff, 0x00090008,
624 	0x91b0, 0xffffffff, 0x00070000,
625 	0x91b4, 0xffffffff, 0x00030002,
626 	0x91b8, 0xffffffff, 0x00050004,
627 	0x91c4, 0xffffffff, 0x00010006,
628 	0x91c8, 0xffffffff, 0x00090008,
629 	0x91cc, 0xffffffff, 0x00070000,
630 	0x91d0, 0xffffffff, 0x00030002,
631 	0x91d4, 0xffffffff, 0x00050004,
632 	0x91e0, 0xffffffff, 0x00010006,
633 	0x91e4, 0xffffffff, 0x00090008,
634 	0x91e8, 0xffffffff, 0x00000000,
635 	0x91ec, 0xffffffff, 0x00070000,
636 	0x91f0, 0xffffffff, 0x00030002,
637 	0x91f4, 0xffffffff, 0x00050004,
638 	0x9200, 0xffffffff, 0x00010006,
639 	0x9204, 0xffffffff, 0x00090008,
640 	0x9208, 0xffffffff, 0x00070000,
641 	0x920c, 0xffffffff, 0x00030002,
642 	0x9210, 0xffffffff, 0x00050004,
643 	0x921c, 0xffffffff, 0x00010006,
644 	0x9220, 0xffffffff, 0x00090008,
645 	0x9224, 0xffffffff, 0x00070000,
646 	0x9228, 0xffffffff, 0x00030002,
647 	0x922c, 0xffffffff, 0x00050004,
648 	0x9238, 0xffffffff, 0x00010006,
649 	0x923c, 0xffffffff, 0x00090008,
650 	0x9240, 0xffffffff, 0x00070000,
651 	0x9244, 0xffffffff, 0x00030002,
652 	0x9248, 0xffffffff, 0x00050004,
653 	0x9254, 0xffffffff, 0x00010006,
654 	0x9258, 0xffffffff, 0x00090008,
655 	0x925c, 0xffffffff, 0x00070000,
656 	0x9260, 0xffffffff, 0x00030002,
657 	0x9264, 0xffffffff, 0x00050004,
658 	0x9270, 0xffffffff, 0x00010006,
659 	0x9274, 0xffffffff, 0x00090008,
660 	0x9278, 0xffffffff, 0x00070000,
661 	0x927c, 0xffffffff, 0x00030002,
662 	0x9280, 0xffffffff, 0x00050004,
663 	0x928c, 0xffffffff, 0x00010006,
664 	0x9290, 0xffffffff, 0x00090008,
665 	0x9294, 0xffffffff, 0x00000000,
666 	0x929c, 0xffffffff, 0x00000001,
667 	0x802c, 0xffffffff, 0xc0000000,
668 	0x977c, 0xffffffff, 0x00000100,
669 	0x3f80, 0xffffffff, 0x00000100,
670 	0xa210, 0xffffffff, 0x00000100,
671 	0xa214, 0xffffffff, 0x00000100,
672 	0x4d8, 0xffffffff, 0x00000100,
673 	0x9784, 0xffffffff, 0x00000100,
674 	0x9698, 0xffffffff, 0x00000100,
675 	0x4d4, 0xffffffff, 0x00000200,
676 	0x30cc, 0xffffffff, 0x00000100,
677 	0x802c, 0xffffffff, 0xc0000000
678 };
679 
680 static const u32 supersumo_golden_registers[] =
681 {
682 	0x5eb4, 0xffffffff, 0x00000002,
683 	0x5c4, 0xffffffff, 0x00000001,
684 	0x7030, 0xffffffff, 0x00000011,
685 	0x7c30, 0xffffffff, 0x00000011,
686 	0x6104, 0x01000300, 0x00000000,
687 	0x5bc0, 0x00300000, 0x00000000,
688 	0x8c04, 0xffffffff, 0x40600060,
689 	0x8c08, 0xffffffff, 0x001c001c,
690 	0x8c20, 0xffffffff, 0x00800080,
691 	0x8c24, 0xffffffff, 0x00800080,
692 	0x8c18, 0xffffffff, 0x20202078,
693 	0x8c1c, 0xffffffff, 0x00001010,
694 	0x918c, 0xffffffff, 0x00010006,
695 	0x91a8, 0xffffffff, 0x00010006,
696 	0x91c4, 0xffffffff, 0x00010006,
697 	0x91e0, 0xffffffff, 0x00010006,
698 	0x9200, 0xffffffff, 0x00010006,
699 	0x9150, 0xffffffff, 0x6e944040,
700 	0x917c, 0xffffffff, 0x00030002,
701 	0x9180, 0xffffffff, 0x00050004,
702 	0x9198, 0xffffffff, 0x00030002,
703 	0x919c, 0xffffffff, 0x00050004,
704 	0x91b4, 0xffffffff, 0x00030002,
705 	0x91b8, 0xffffffff, 0x00050004,
706 	0x91d0, 0xffffffff, 0x00030002,
707 	0x91d4, 0xffffffff, 0x00050004,
708 	0x91f0, 0xffffffff, 0x00030002,
709 	0x91f4, 0xffffffff, 0x00050004,
710 	0x915c, 0xffffffff, 0x00010000,
711 	0x9160, 0xffffffff, 0x00030002,
712 	0x3f90, 0xffff0000, 0xff000000,
713 	0x9178, 0xffffffff, 0x00070000,
714 	0x9194, 0xffffffff, 0x00070000,
715 	0x91b0, 0xffffffff, 0x00070000,
716 	0x91cc, 0xffffffff, 0x00070000,
717 	0x91ec, 0xffffffff, 0x00070000,
718 	0x9148, 0xffff0000, 0xff000000,
719 	0x9190, 0xffffffff, 0x00090008,
720 	0x91ac, 0xffffffff, 0x00090008,
721 	0x91c8, 0xffffffff, 0x00090008,
722 	0x91e4, 0xffffffff, 0x00090008,
723 	0x9204, 0xffffffff, 0x00090008,
724 	0x3f94, 0xffff0000, 0xff000000,
725 	0x914c, 0xffff0000, 0xff000000,
726 	0x929c, 0xffffffff, 0x00000001,
727 	0x8a18, 0xffffffff, 0x00000100,
728 	0x8b28, 0xffffffff, 0x00000100,
729 	0x9144, 0xffffffff, 0x00000100,
730 	0x5644, 0xffffffff, 0x00000100,
731 	0x9b7c, 0xffffffff, 0x00000000,
732 	0x8030, 0xffffffff, 0x0000100a,
733 	0x8a14, 0xffffffff, 0x00000007,
734 	0x8b24, 0xffffffff, 0x00ff0fff,
735 	0x8b10, 0xffffffff, 0x00000000,
736 	0x28a4c, 0x06000000, 0x06000000,
737 	0x4d8, 0xffffffff, 0x00000100,
738 	0x913c, 0xffff000f, 0x0100000a,
739 	0x960c, 0xffffffff, 0x54763210,
740 	0x88c4, 0xffffffff, 0x000000c2,
741 	0x88d4, 0xffffffff, 0x00000010,
742 	0x8974, 0xffffffff, 0x00000000,
743 	0xc78, 0x00000080, 0x00000080,
744 	0x5e78, 0xffffffff, 0x001000f0,
745 	0xd02c, 0xffffffff, 0x08421000,
746 	0xa008, 0xffffffff, 0x00010000,
747 	0x8d00, 0xffffffff, 0x100e4848,
748 	0x8d04, 0xffffffff, 0x00164745,
749 	0x8c00, 0xffffffff, 0xe4000003,
750 	0x8cf0, 0x1fffffff, 0x08e00620,
751 	0x28350, 0xffffffff, 0x00000000,
752 	0x9508, 0xffffffff, 0x00000002
753 };
754 
755 static const u32 sumo_golden_registers[] =
756 {
757 	0x900c, 0x00ffffff, 0x0017071f,
758 	0x8c18, 0xffffffff, 0x10101060,
759 	0x8c1c, 0xffffffff, 0x00001010,
760 	0x8c30, 0x0000000f, 0x00000005,
761 	0x9688, 0x0000000f, 0x00000007
762 };
763 
764 static const u32 wrestler_golden_registers[] =
765 {
766 	0x5eb4, 0xffffffff, 0x00000002,
767 	0x5c4, 0xffffffff, 0x00000001,
768 	0x7030, 0xffffffff, 0x00000011,
769 	0x7c30, 0xffffffff, 0x00000011,
770 	0x6104, 0x01000300, 0x00000000,
771 	0x5bc0, 0x00300000, 0x00000000,
772 	0x918c, 0xffffffff, 0x00010006,
773 	0x91a8, 0xffffffff, 0x00010006,
774 	0x9150, 0xffffffff, 0x6e944040,
775 	0x917c, 0xffffffff, 0x00030002,
776 	0x9198, 0xffffffff, 0x00030002,
777 	0x915c, 0xffffffff, 0x00010000,
778 	0x3f90, 0xffff0000, 0xff000000,
779 	0x9178, 0xffffffff, 0x00070000,
780 	0x9194, 0xffffffff, 0x00070000,
781 	0x9148, 0xffff0000, 0xff000000,
782 	0x9190, 0xffffffff, 0x00090008,
783 	0x91ac, 0xffffffff, 0x00090008,
784 	0x3f94, 0xffff0000, 0xff000000,
785 	0x914c, 0xffff0000, 0xff000000,
786 	0x929c, 0xffffffff, 0x00000001,
787 	0x8a18, 0xffffffff, 0x00000100,
788 	0x8b28, 0xffffffff, 0x00000100,
789 	0x9144, 0xffffffff, 0x00000100,
790 	0x9b7c, 0xffffffff, 0x00000000,
791 	0x8030, 0xffffffff, 0x0000100a,
792 	0x8a14, 0xffffffff, 0x00000001,
793 	0x8b24, 0xffffffff, 0x00ff0fff,
794 	0x8b10, 0xffffffff, 0x00000000,
795 	0x28a4c, 0x06000000, 0x06000000,
796 	0x4d8, 0xffffffff, 0x00000100,
797 	0x913c, 0xffff000f, 0x0100000a,
798 	0x960c, 0xffffffff, 0x54763210,
799 	0x88c4, 0xffffffff, 0x000000c2,
800 	0x88d4, 0xffffffff, 0x00000010,
801 	0x8974, 0xffffffff, 0x00000000,
802 	0xc78, 0x00000080, 0x00000080,
803 	0x5e78, 0xffffffff, 0x001000f0,
804 	0xd02c, 0xffffffff, 0x08421000,
805 	0xa008, 0xffffffff, 0x00010000,
806 	0x8d00, 0xffffffff, 0x100e4848,
807 	0x8d04, 0xffffffff, 0x00164745,
808 	0x8c00, 0xffffffff, 0xe4000003,
809 	0x8cf0, 0x1fffffff, 0x08e00410,
810 	0x28350, 0xffffffff, 0x00000000,
811 	0x9508, 0xffffffff, 0x00000002,
812 	0x900c, 0xffffffff, 0x0017071f,
813 	0x8c18, 0xffffffff, 0x10101060,
814 	0x8c1c, 0xffffffff, 0x00001010
815 };
816 
817 static const u32 barts_golden_registers[] =
818 {
819 	0x5eb4, 0xffffffff, 0x00000002,
820 	0x5e78, 0x8f311ff1, 0x001000f0,
821 	0x3f90, 0xffff0000, 0xff000000,
822 	0x9148, 0xffff0000, 0xff000000,
823 	0x3f94, 0xffff0000, 0xff000000,
824 	0x914c, 0xffff0000, 0xff000000,
825 	0xc78, 0x00000080, 0x00000080,
826 	0xbd4, 0x70073777, 0x00010001,
827 	0xd02c, 0xbfffff1f, 0x08421000,
828 	0xd0b8, 0x03773777, 0x02011003,
829 	0x5bc0, 0x00200000, 0x50100000,
830 	0x98f8, 0x33773777, 0x02011003,
831 	0x98fc, 0xffffffff, 0x76543210,
832 	0x7030, 0x31000311, 0x00000011,
833 	0x2f48, 0x00000007, 0x02011003,
834 	0x6b28, 0x00000010, 0x00000012,
835 	0x7728, 0x00000010, 0x00000012,
836 	0x10328, 0x00000010, 0x00000012,
837 	0x10f28, 0x00000010, 0x00000012,
838 	0x11b28, 0x00000010, 0x00000012,
839 	0x12728, 0x00000010, 0x00000012,
840 	0x240c, 0x000007ff, 0x00000380,
841 	0x8a14, 0xf000001f, 0x00000007,
842 	0x8b24, 0x3fff3fff, 0x00ff0fff,
843 	0x8b10, 0x0000ff0f, 0x00000000,
844 	0x28a4c, 0x07ffffff, 0x06000000,
845 	0x10c, 0x00000001, 0x00010003,
846 	0xa02c, 0xffffffff, 0x0000009b,
847 	0x913c, 0x0000000f, 0x0100000a,
848 	0x8d00, 0xffff7f7f, 0x100e4848,
849 	0x8d04, 0x00ffffff, 0x00164745,
850 	0x8c00, 0xfffc0003, 0xe4000003,
851 	0x8c04, 0xf8ff00ff, 0x40600060,
852 	0x8c08, 0x00ff00ff, 0x001c001c,
853 	0x8cf0, 0x1fff1fff, 0x08e00620,
854 	0x8c20, 0x0fff0fff, 0x00800080,
855 	0x8c24, 0x0fff0fff, 0x00800080,
856 	0x8c18, 0xffffffff, 0x20202078,
857 	0x8c1c, 0x0000ffff, 0x00001010,
858 	0x28350, 0x00000f01, 0x00000000,
859 	0x9508, 0x3700001f, 0x00000002,
860 	0x960c, 0xffffffff, 0x54763210,
861 	0x88c4, 0x001f3ae3, 0x000000c2,
862 	0x88d4, 0x0000001f, 0x00000010,
863 	0x8974, 0xffffffff, 0x00000000
864 };
865 
866 static const u32 turks_golden_registers[] =
867 {
868 	0x5eb4, 0xffffffff, 0x00000002,
869 	0x5e78, 0x8f311ff1, 0x001000f0,
870 	0x8c8, 0x00003000, 0x00001070,
871 	0x8cc, 0x000fffff, 0x00040035,
872 	0x3f90, 0xffff0000, 0xfff00000,
873 	0x9148, 0xffff0000, 0xfff00000,
874 	0x3f94, 0xffff0000, 0xfff00000,
875 	0x914c, 0xffff0000, 0xfff00000,
876 	0xc78, 0x00000080, 0x00000080,
877 	0xbd4, 0x00073007, 0x00010002,
878 	0xd02c, 0xbfffff1f, 0x08421000,
879 	0xd0b8, 0x03773777, 0x02010002,
880 	0x5bc0, 0x00200000, 0x50100000,
881 	0x98f8, 0x33773777, 0x00010002,
882 	0x98fc, 0xffffffff, 0x33221100,
883 	0x7030, 0x31000311, 0x00000011,
884 	0x2f48, 0x33773777, 0x00010002,
885 	0x6b28, 0x00000010, 0x00000012,
886 	0x7728, 0x00000010, 0x00000012,
887 	0x10328, 0x00000010, 0x00000012,
888 	0x10f28, 0x00000010, 0x00000012,
889 	0x11b28, 0x00000010, 0x00000012,
890 	0x12728, 0x00000010, 0x00000012,
891 	0x240c, 0x000007ff, 0x00000380,
892 	0x8a14, 0xf000001f, 0x00000007,
893 	0x8b24, 0x3fff3fff, 0x00ff0fff,
894 	0x8b10, 0x0000ff0f, 0x00000000,
895 	0x28a4c, 0x07ffffff, 0x06000000,
896 	0x10c, 0x00000001, 0x00010003,
897 	0xa02c, 0xffffffff, 0x0000009b,
898 	0x913c, 0x0000000f, 0x0100000a,
899 	0x8d00, 0xffff7f7f, 0x100e4848,
900 	0x8d04, 0x00ffffff, 0x00164745,
901 	0x8c00, 0xfffc0003, 0xe4000003,
902 	0x8c04, 0xf8ff00ff, 0x40600060,
903 	0x8c08, 0x00ff00ff, 0x001c001c,
904 	0x8cf0, 0x1fff1fff, 0x08e00410,
905 	0x8c20, 0x0fff0fff, 0x00800080,
906 	0x8c24, 0x0fff0fff, 0x00800080,
907 	0x8c18, 0xffffffff, 0x20202078,
908 	0x8c1c, 0x0000ffff, 0x00001010,
909 	0x28350, 0x00000f01, 0x00000000,
910 	0x9508, 0x3700001f, 0x00000002,
911 	0x960c, 0xffffffff, 0x54763210,
912 	0x88c4, 0x001f3ae3, 0x000000c2,
913 	0x88d4, 0x0000001f, 0x00000010,
914 	0x8974, 0xffffffff, 0x00000000
915 };
916 
917 static const u32 caicos_golden_registers[] =
918 {
919 	0x5eb4, 0xffffffff, 0x00000002,
920 	0x5e78, 0x8f311ff1, 0x001000f0,
921 	0x8c8, 0x00003420, 0x00001450,
922 	0x8cc, 0x000fffff, 0x00040035,
923 	0x3f90, 0xffff0000, 0xfffc0000,
924 	0x9148, 0xffff0000, 0xfffc0000,
925 	0x3f94, 0xffff0000, 0xfffc0000,
926 	0x914c, 0xffff0000, 0xfffc0000,
927 	0xc78, 0x00000080, 0x00000080,
928 	0xbd4, 0x00073007, 0x00010001,
929 	0xd02c, 0xbfffff1f, 0x08421000,
930 	0xd0b8, 0x03773777, 0x02010001,
931 	0x5bc0, 0x00200000, 0x50100000,
932 	0x98f8, 0x33773777, 0x02010001,
933 	0x98fc, 0xffffffff, 0x33221100,
934 	0x7030, 0x31000311, 0x00000011,
935 	0x2f48, 0x33773777, 0x02010001,
936 	0x6b28, 0x00000010, 0x00000012,
937 	0x7728, 0x00000010, 0x00000012,
938 	0x10328, 0x00000010, 0x00000012,
939 	0x10f28, 0x00000010, 0x00000012,
940 	0x11b28, 0x00000010, 0x00000012,
941 	0x12728, 0x00000010, 0x00000012,
942 	0x240c, 0x000007ff, 0x00000380,
943 	0x8a14, 0xf000001f, 0x00000001,
944 	0x8b24, 0x3fff3fff, 0x00ff0fff,
945 	0x8b10, 0x0000ff0f, 0x00000000,
946 	0x28a4c, 0x07ffffff, 0x06000000,
947 	0x10c, 0x00000001, 0x00010003,
948 	0xa02c, 0xffffffff, 0x0000009b,
949 	0x913c, 0x0000000f, 0x0100000a,
950 	0x8d00, 0xffff7f7f, 0x100e4848,
951 	0x8d04, 0x00ffffff, 0x00164745,
952 	0x8c00, 0xfffc0003, 0xe4000003,
953 	0x8c04, 0xf8ff00ff, 0x40600060,
954 	0x8c08, 0x00ff00ff, 0x001c001c,
955 	0x8cf0, 0x1fff1fff, 0x08e00410,
956 	0x8c20, 0x0fff0fff, 0x00800080,
957 	0x8c24, 0x0fff0fff, 0x00800080,
958 	0x8c18, 0xffffffff, 0x20202078,
959 	0x8c1c, 0x0000ffff, 0x00001010,
960 	0x28350, 0x00000f01, 0x00000000,
961 	0x9508, 0x3700001f, 0x00000002,
962 	0x960c, 0xffffffff, 0x54763210,
963 	0x88c4, 0x001f3ae3, 0x000000c2,
964 	0x88d4, 0x0000001f, 0x00000010,
965 	0x8974, 0xffffffff, 0x00000000
966 };
967 
968 static void evergreen_init_golden_registers(struct radeon_device *rdev)
969 {
970 	switch (rdev->family) {
971 	case CHIP_CYPRESS:
972 	case CHIP_HEMLOCK:
973 		radeon_program_register_sequence(rdev,
974 						 evergreen_golden_registers,
975 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
976 		radeon_program_register_sequence(rdev,
977 						 evergreen_golden_registers2,
978 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
979 		radeon_program_register_sequence(rdev,
980 						 cypress_mgcg_init,
981 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
982 		break;
983 	case CHIP_JUNIPER:
984 		radeon_program_register_sequence(rdev,
985 						 evergreen_golden_registers,
986 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
987 		radeon_program_register_sequence(rdev,
988 						 evergreen_golden_registers2,
989 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
990 		radeon_program_register_sequence(rdev,
991 						 juniper_mgcg_init,
992 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
993 		break;
994 	case CHIP_REDWOOD:
995 		radeon_program_register_sequence(rdev,
996 						 evergreen_golden_registers,
997 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
998 		radeon_program_register_sequence(rdev,
999 						 evergreen_golden_registers2,
1000 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1001 		radeon_program_register_sequence(rdev,
1002 						 redwood_mgcg_init,
1003 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1004 		break;
1005 	case CHIP_CEDAR:
1006 		radeon_program_register_sequence(rdev,
1007 						 cedar_golden_registers,
1008 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1009 		radeon_program_register_sequence(rdev,
1010 						 evergreen_golden_registers2,
1011 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1012 		radeon_program_register_sequence(rdev,
1013 						 cedar_mgcg_init,
1014 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1015 		break;
1016 	case CHIP_PALM:
1017 		radeon_program_register_sequence(rdev,
1018 						 wrestler_golden_registers,
1019 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1020 		break;
1021 	case CHIP_SUMO:
1022 		radeon_program_register_sequence(rdev,
1023 						 supersumo_golden_registers,
1024 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1025 		break;
1026 	case CHIP_SUMO2:
1027 		radeon_program_register_sequence(rdev,
1028 						 supersumo_golden_registers,
1029 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1030 		radeon_program_register_sequence(rdev,
1031 						 sumo_golden_registers,
1032 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1033 		break;
1034 	case CHIP_BARTS:
1035 		radeon_program_register_sequence(rdev,
1036 						 barts_golden_registers,
1037 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1038 		break;
1039 	case CHIP_TURKS:
1040 		radeon_program_register_sequence(rdev,
1041 						 turks_golden_registers,
1042 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1043 		break;
1044 	case CHIP_CAICOS:
1045 		radeon_program_register_sequence(rdev,
1046 						 caicos_golden_registers,
1047 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1048 		break;
1049 	default:
1050 		break;
1051 	}
1052 }
1053 
1054 /**
1055  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1056  *
1057  * @rdev: radeon_device pointer
1058  * @reg: register offset in bytes
1059  * @val: register value
1060  *
1061  * Returns 0 for success or -EINVAL for an invalid register
1062  *
1063  */
1064 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1065 					u32 reg, u32 *val)
1066 {
1067 	switch (reg) {
1068 	case GRBM_STATUS:
1069 	case GRBM_STATUS_SE0:
1070 	case GRBM_STATUS_SE1:
1071 	case SRBM_STATUS:
1072 	case SRBM_STATUS2:
1073 	case DMA_STATUS_REG:
1074 	case UVD_STATUS:
1075 		*val = RREG32(reg);
1076 		return 0;
1077 	default:
1078 		return -EINVAL;
1079 	}
1080 }
1081 
1082 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1083 			     unsigned *bankh, unsigned *mtaspect,
1084 			     unsigned *tile_split)
1085 {
1086 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1087 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1088 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1089 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1090 	switch (*bankw) {
1091 	default:
1092 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1093 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1094 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1095 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1096 	}
1097 	switch (*bankh) {
1098 	default:
1099 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1100 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1101 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1102 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1103 	}
1104 	switch (*mtaspect) {
1105 	default:
1106 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1107 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1108 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1109 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1110 	}
1111 }
1112 
1113 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1114 			      u32 cntl_reg, u32 status_reg)
1115 {
1116 	int r, i;
1117 	struct atom_clock_dividers dividers;
1118 
1119 	r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1120 					   clock, false, &dividers);
1121 	if (r)
1122 		return r;
1123 
1124 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1125 
1126 	for (i = 0; i < 100; i++) {
1127 		if (RREG32(status_reg) & DCLK_STATUS)
1128 			break;
1129 		mdelay(10);
1130 	}
1131 	if (i == 100)
1132 		return -ETIMEDOUT;
1133 
1134 	return 0;
1135 }
1136 
1137 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1138 {
1139 	int r = 0;
1140 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1141 
1142 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1143 	if (r)
1144 		goto done;
1145 	cg_scratch &= 0xffff0000;
1146 	cg_scratch |= vclk / 100; /* Mhz */
1147 
1148 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1149 	if (r)
1150 		goto done;
1151 	cg_scratch &= 0x0000ffff;
1152 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1153 
1154 done:
1155 	WREG32(CG_SCRATCH1, cg_scratch);
1156 
1157 	return r;
1158 }
1159 
1160 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1161 {
1162 	/* start off with something large */
1163 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1164 	int r;
1165 
1166 	/* bypass vclk and dclk with bclk */
1167 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1168 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1169 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1170 
1171 	/* put PLL in bypass mode */
1172 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1173 
1174 	if (!vclk || !dclk) {
1175 		/* keep the Bypass mode, put PLL to sleep */
1176 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1177 		return 0;
1178 	}
1179 
1180 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1181 					  16384, 0x03FFFFFF, 0, 128, 5,
1182 					  &fb_div, &vclk_div, &dclk_div);
1183 	if (r)
1184 		return r;
1185 
1186 	/* set VCO_MODE to 1 */
1187 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1188 
1189 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1190 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1191 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1192 
1193 	/* deassert UPLL_RESET */
1194 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1195 
1196 	mdelay(1);
1197 
1198 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1199 	if (r)
1200 		return r;
1201 
1202 	/* assert UPLL_RESET again */
1203 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1204 
1205 	/* disable spread spectrum. */
1206 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1207 
1208 	/* set feedback divider */
1209 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1210 
1211 	/* set ref divider to 0 */
1212 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1213 
1214 	if (fb_div < 307200)
1215 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1216 	else
1217 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1218 
1219 	/* set PDIV_A and PDIV_B */
1220 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1221 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1222 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1223 
1224 	/* give the PLL some time to settle */
1225 	mdelay(15);
1226 
1227 	/* deassert PLL_RESET */
1228 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1229 
1230 	mdelay(15);
1231 
1232 	/* switch from bypass mode to normal mode */
1233 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1234 
1235 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1236 	if (r)
1237 		return r;
1238 
1239 	/* switch VCLK and DCLK selection */
1240 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1241 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1242 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1243 
1244 	mdelay(100);
1245 
1246 	return 0;
1247 }
1248 
1249 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1250 {
1251 	int readrq;
1252 	u16 v;
1253 
1254 	readrq = pcie_get_readrq(rdev->pdev);
1255 	v = ffs(readrq) - 8;
1256 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1257 	 * to avoid hangs or perfomance issues
1258 	 */
1259 	if ((v == 0) || (v == 6) || (v == 7))
1260 		pcie_set_readrq(rdev->pdev, 512);
1261 }
1262 
1263 void dce4_program_fmt(struct drm_encoder *encoder)
1264 {
1265 	struct drm_device *dev = encoder->dev;
1266 	struct radeon_device *rdev = dev->dev_private;
1267 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1268 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1269 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1270 	int bpc = 0;
1271 	u32 tmp = 0;
1272 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1273 
1274 	if (connector) {
1275 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1276 		bpc = radeon_get_monitor_bpc(connector);
1277 		dither = radeon_connector->dither;
1278 	}
1279 
1280 	/* LVDS/eDP FMT is set up by atom */
1281 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1282 		return;
1283 
1284 	/* not needed for analog */
1285 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1286 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1287 		return;
1288 
1289 	if (bpc == 0)
1290 		return;
1291 
1292 	switch (bpc) {
1293 	case 6:
1294 		if (dither == RADEON_FMT_DITHER_ENABLE)
1295 			/* XXX sort out optimal dither settings */
1296 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1297 				FMT_SPATIAL_DITHER_EN);
1298 		else
1299 			tmp |= FMT_TRUNCATE_EN;
1300 		break;
1301 	case 8:
1302 		if (dither == RADEON_FMT_DITHER_ENABLE)
1303 			/* XXX sort out optimal dither settings */
1304 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1305 				FMT_RGB_RANDOM_ENABLE |
1306 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1307 		else
1308 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1309 		break;
1310 	case 10:
1311 	default:
1312 		/* not needed */
1313 		break;
1314 	}
1315 
1316 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1317 }
1318 
1319 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1320 {
1321 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1322 		return true;
1323 	else
1324 		return false;
1325 }
1326 
1327 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1328 {
1329 	u32 pos1, pos2;
1330 
1331 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1332 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1333 
1334 	if (pos1 != pos2)
1335 		return true;
1336 	else
1337 		return false;
1338 }
1339 
1340 /**
1341  * dce4_wait_for_vblank - vblank wait asic callback.
1342  *
1343  * @rdev: radeon_device pointer
1344  * @crtc: crtc to wait for vblank on
1345  *
1346  * Wait for vblank on the requested crtc (evergreen+).
1347  */
1348 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1349 {
1350 	unsigned i = 0;
1351 
1352 	if (crtc >= rdev->num_crtc)
1353 		return;
1354 
1355 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1356 		return;
1357 
1358 	/* depending on when we hit vblank, we may be close to active; if so,
1359 	 * wait for another frame.
1360 	 */
1361 	while (dce4_is_in_vblank(rdev, crtc)) {
1362 		if (i++ % 100 == 0) {
1363 			if (!dce4_is_counter_moving(rdev, crtc))
1364 				break;
1365 		}
1366 	}
1367 
1368 	while (!dce4_is_in_vblank(rdev, crtc)) {
1369 		if (i++ % 100 == 0) {
1370 			if (!dce4_is_counter_moving(rdev, crtc))
1371 				break;
1372 		}
1373 	}
1374 }
1375 
1376 /**
1377  * evergreen_page_flip - pageflip callback.
1378  *
1379  * @rdev: radeon_device pointer
1380  * @crtc_id: crtc to cleanup pageflip on
1381  * @crtc_base: new address of the crtc (GPU MC address)
1382  *
1383  * Triggers the actual pageflip by updating the primary
1384  * surface base address (evergreen+).
1385  */
1386 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1387 			 bool async)
1388 {
1389 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1390 
1391 	/* update the scanout addresses */
1392 	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1393 	       async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1394 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1395 	       upper_32_bits(crtc_base));
1396 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1397 	       (u32)crtc_base);
1398 	/* post the write */
1399 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1400 }
1401 
1402 /**
1403  * evergreen_page_flip_pending - check if page flip is still pending
1404  *
1405  * @rdev: radeon_device pointer
1406  * @crtc_id: crtc to check
1407  *
1408  * Returns the current update pending status.
1409  */
1410 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1411 {
1412 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1413 
1414 	/* Return current update_pending status: */
1415 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1416 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1417 }
1418 
1419 /* get temperature in millidegrees */
1420 int evergreen_get_temp(struct radeon_device *rdev)
1421 {
1422 	u32 temp, toffset;
1423 	int actual_temp = 0;
1424 
1425 	if (rdev->family == CHIP_JUNIPER) {
1426 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1427 			TOFFSET_SHIFT;
1428 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1429 			TS0_ADC_DOUT_SHIFT;
1430 
1431 		if (toffset & 0x100)
1432 			actual_temp = temp / 2 - (0x200 - toffset);
1433 		else
1434 			actual_temp = temp / 2 + toffset;
1435 
1436 		actual_temp = actual_temp * 1000;
1437 
1438 	} else {
1439 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1440 			ASIC_T_SHIFT;
1441 
1442 		if (temp & 0x400)
1443 			actual_temp = -256;
1444 		else if (temp & 0x200)
1445 			actual_temp = 255;
1446 		else if (temp & 0x100) {
1447 			actual_temp = temp & 0x1ff;
1448 			actual_temp |= ~0x1ff;
1449 		} else
1450 			actual_temp = temp & 0xff;
1451 
1452 		actual_temp = (actual_temp * 1000) / 2;
1453 	}
1454 
1455 	return actual_temp;
1456 }
1457 
1458 int sumo_get_temp(struct radeon_device *rdev)
1459 {
1460 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1461 	int actual_temp = temp - 49;
1462 
1463 	return actual_temp * 1000;
1464 }
1465 
1466 /**
1467  * sumo_pm_init_profile - Initialize power profiles callback.
1468  *
1469  * @rdev: radeon_device pointer
1470  *
1471  * Initialize the power states used in profile mode
1472  * (sumo, trinity, SI).
1473  * Used for profile mode only.
1474  */
1475 void sumo_pm_init_profile(struct radeon_device *rdev)
1476 {
1477 	int idx;
1478 
1479 	/* default */
1480 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1481 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1482 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1483 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1484 
1485 	/* low,mid sh/mh */
1486 	if (rdev->flags & RADEON_IS_MOBILITY)
1487 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1488 	else
1489 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1490 
1491 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1492 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1493 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1494 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1495 
1496 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1497 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1498 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1499 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1500 
1501 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1502 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1503 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1504 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1505 
1506 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1507 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1508 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1509 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1510 
1511 	/* high sh/mh */
1512 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1513 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1514 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1515 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1516 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1517 		rdev->pm.power_state[idx].num_clock_modes - 1;
1518 
1519 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1520 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1521 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1522 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1523 		rdev->pm.power_state[idx].num_clock_modes - 1;
1524 }
1525 
1526 /**
1527  * btc_pm_init_profile - Initialize power profiles callback.
1528  *
1529  * @rdev: radeon_device pointer
1530  *
1531  * Initialize the power states used in profile mode
1532  * (BTC, cayman).
1533  * Used for profile mode only.
1534  */
1535 void btc_pm_init_profile(struct radeon_device *rdev)
1536 {
1537 	int idx;
1538 
1539 	/* default */
1540 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1541 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1542 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1543 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1544 	/* starting with BTC, there is one state that is used for both
1545 	 * MH and SH.  Difference is that we always use the high clock index for
1546 	 * mclk.
1547 	 */
1548 	if (rdev->flags & RADEON_IS_MOBILITY)
1549 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1550 	else
1551 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1552 	/* low sh */
1553 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1554 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1555 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1556 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1557 	/* mid sh */
1558 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1559 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1560 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1561 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1562 	/* high sh */
1563 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1564 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1565 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1566 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1567 	/* low mh */
1568 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1569 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1570 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1571 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1572 	/* mid mh */
1573 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1574 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1575 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1576 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1577 	/* high mh */
1578 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1579 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1580 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1581 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1582 }
1583 
1584 /**
1585  * evergreen_pm_misc - set additional pm hw parameters callback.
1586  *
1587  * @rdev: radeon_device pointer
1588  *
1589  * Set non-clock parameters associated with a power state
1590  * (voltage, etc.) (evergreen+).
1591  */
1592 void evergreen_pm_misc(struct radeon_device *rdev)
1593 {
1594 	int req_ps_idx = rdev->pm.requested_power_state_index;
1595 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1596 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1597 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1598 
1599 	if (voltage->type == VOLTAGE_SW) {
1600 		/* 0xff0x are flags rather then an actual voltage */
1601 		if ((voltage->voltage & 0xff00) == 0xff00)
1602 			return;
1603 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1604 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1605 			rdev->pm.current_vddc = voltage->voltage;
1606 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1607 		}
1608 
1609 		/* starting with BTC, there is one state that is used for both
1610 		 * MH and SH.  Difference is that we always use the high clock index for
1611 		 * mclk and vddci.
1612 		 */
1613 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1614 		    (rdev->family >= CHIP_BARTS) &&
1615 		    rdev->pm.active_crtc_count &&
1616 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1617 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1618 			voltage = &rdev->pm.power_state[req_ps_idx].
1619 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1620 
1621 		/* 0xff0x are flags rather then an actual voltage */
1622 		if ((voltage->vddci & 0xff00) == 0xff00)
1623 			return;
1624 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1625 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1626 			rdev->pm.current_vddci = voltage->vddci;
1627 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1628 		}
1629 	}
1630 }
1631 
1632 /**
1633  * evergreen_pm_prepare - pre-power state change callback.
1634  *
1635  * @rdev: radeon_device pointer
1636  *
1637  * Prepare for a power state change (evergreen+).
1638  */
1639 void evergreen_pm_prepare(struct radeon_device *rdev)
1640 {
1641 	struct drm_device *ddev = rdev->ddev;
1642 	struct drm_crtc *crtc;
1643 	struct radeon_crtc *radeon_crtc;
1644 	u32 tmp;
1645 
1646 	/* disable any active CRTCs */
1647 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1648 		radeon_crtc = to_radeon_crtc(crtc);
1649 		if (radeon_crtc->enabled) {
1650 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1651 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1652 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1653 		}
1654 	}
1655 }
1656 
1657 /**
1658  * evergreen_pm_finish - post-power state change callback.
1659  *
1660  * @rdev: radeon_device pointer
1661  *
1662  * Clean up after a power state change (evergreen+).
1663  */
1664 void evergreen_pm_finish(struct radeon_device *rdev)
1665 {
1666 	struct drm_device *ddev = rdev->ddev;
1667 	struct drm_crtc *crtc;
1668 	struct radeon_crtc *radeon_crtc;
1669 	u32 tmp;
1670 
1671 	/* enable any active CRTCs */
1672 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1673 		radeon_crtc = to_radeon_crtc(crtc);
1674 		if (radeon_crtc->enabled) {
1675 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1676 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1677 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1678 		}
1679 	}
1680 }
1681 
1682 /**
1683  * evergreen_hpd_sense - hpd sense callback.
1684  *
1685  * @rdev: radeon_device pointer
1686  * @hpd: hpd (hotplug detect) pin
1687  *
1688  * Checks if a digital monitor is connected (evergreen+).
1689  * Returns true if connected, false if not connected.
1690  */
1691 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1692 {
1693 	bool connected = false;
1694 
1695 	switch (hpd) {
1696 	case RADEON_HPD_1:
1697 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1698 			connected = true;
1699 		break;
1700 	case RADEON_HPD_2:
1701 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1702 			connected = true;
1703 		break;
1704 	case RADEON_HPD_3:
1705 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1706 			connected = true;
1707 		break;
1708 	case RADEON_HPD_4:
1709 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1710 			connected = true;
1711 		break;
1712 	case RADEON_HPD_5:
1713 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1714 			connected = true;
1715 		break;
1716 	case RADEON_HPD_6:
1717 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1718 			connected = true;
1719 		break;
1720 	default:
1721 		break;
1722 	}
1723 
1724 	return connected;
1725 }
1726 
1727 /**
1728  * evergreen_hpd_set_polarity - hpd set polarity callback.
1729  *
1730  * @rdev: radeon_device pointer
1731  * @hpd: hpd (hotplug detect) pin
1732  *
1733  * Set the polarity of the hpd pin (evergreen+).
1734  */
1735 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1736 				enum radeon_hpd_id hpd)
1737 {
1738 	u32 tmp;
1739 	bool connected = evergreen_hpd_sense(rdev, hpd);
1740 
1741 	switch (hpd) {
1742 	case RADEON_HPD_1:
1743 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1744 		if (connected)
1745 			tmp &= ~DC_HPDx_INT_POLARITY;
1746 		else
1747 			tmp |= DC_HPDx_INT_POLARITY;
1748 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1749 		break;
1750 	case RADEON_HPD_2:
1751 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1752 		if (connected)
1753 			tmp &= ~DC_HPDx_INT_POLARITY;
1754 		else
1755 			tmp |= DC_HPDx_INT_POLARITY;
1756 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1757 		break;
1758 	case RADEON_HPD_3:
1759 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1760 		if (connected)
1761 			tmp &= ~DC_HPDx_INT_POLARITY;
1762 		else
1763 			tmp |= DC_HPDx_INT_POLARITY;
1764 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1765 		break;
1766 	case RADEON_HPD_4:
1767 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1768 		if (connected)
1769 			tmp &= ~DC_HPDx_INT_POLARITY;
1770 		else
1771 			tmp |= DC_HPDx_INT_POLARITY;
1772 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1773 		break;
1774 	case RADEON_HPD_5:
1775 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1776 		if (connected)
1777 			tmp &= ~DC_HPDx_INT_POLARITY;
1778 		else
1779 			tmp |= DC_HPDx_INT_POLARITY;
1780 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1781 			break;
1782 	case RADEON_HPD_6:
1783 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1784 		if (connected)
1785 			tmp &= ~DC_HPDx_INT_POLARITY;
1786 		else
1787 			tmp |= DC_HPDx_INT_POLARITY;
1788 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1789 		break;
1790 	default:
1791 		break;
1792 	}
1793 }
1794 
1795 /**
1796  * evergreen_hpd_init - hpd setup callback.
1797  *
1798  * @rdev: radeon_device pointer
1799  *
1800  * Setup the hpd pins used by the card (evergreen+).
1801  * Enable the pin, set the polarity, and enable the hpd interrupts.
1802  */
1803 void evergreen_hpd_init(struct radeon_device *rdev)
1804 {
1805 	struct drm_device *dev = rdev->ddev;
1806 	struct drm_connector *connector;
1807 	unsigned enabled = 0;
1808 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1809 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1810 
1811 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1812 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1813 
1814 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1815 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1816 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1817 			 * aux dp channel on imac and help (but not completely fix)
1818 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1819 			 * also avoid interrupt storms during dpms.
1820 			 */
1821 			continue;
1822 		}
1823 		switch (radeon_connector->hpd.hpd) {
1824 		case RADEON_HPD_1:
1825 			WREG32(DC_HPD1_CONTROL, tmp);
1826 			break;
1827 		case RADEON_HPD_2:
1828 			WREG32(DC_HPD2_CONTROL, tmp);
1829 			break;
1830 		case RADEON_HPD_3:
1831 			WREG32(DC_HPD3_CONTROL, tmp);
1832 			break;
1833 		case RADEON_HPD_4:
1834 			WREG32(DC_HPD4_CONTROL, tmp);
1835 			break;
1836 		case RADEON_HPD_5:
1837 			WREG32(DC_HPD5_CONTROL, tmp);
1838 			break;
1839 		case RADEON_HPD_6:
1840 			WREG32(DC_HPD6_CONTROL, tmp);
1841 			break;
1842 		default:
1843 			break;
1844 		}
1845 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1846 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1847 			enabled |= 1 << radeon_connector->hpd.hpd;
1848 	}
1849 	radeon_irq_kms_enable_hpd(rdev, enabled);
1850 }
1851 
1852 /**
1853  * evergreen_hpd_fini - hpd tear down callback.
1854  *
1855  * @rdev: radeon_device pointer
1856  *
1857  * Tear down the hpd pins used by the card (evergreen+).
1858  * Disable the hpd interrupts.
1859  */
1860 void evergreen_hpd_fini(struct radeon_device *rdev)
1861 {
1862 	struct drm_device *dev = rdev->ddev;
1863 	struct drm_connector *connector;
1864 	unsigned disabled = 0;
1865 
1866 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1867 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1868 		switch (radeon_connector->hpd.hpd) {
1869 		case RADEON_HPD_1:
1870 			WREG32(DC_HPD1_CONTROL, 0);
1871 			break;
1872 		case RADEON_HPD_2:
1873 			WREG32(DC_HPD2_CONTROL, 0);
1874 			break;
1875 		case RADEON_HPD_3:
1876 			WREG32(DC_HPD3_CONTROL, 0);
1877 			break;
1878 		case RADEON_HPD_4:
1879 			WREG32(DC_HPD4_CONTROL, 0);
1880 			break;
1881 		case RADEON_HPD_5:
1882 			WREG32(DC_HPD5_CONTROL, 0);
1883 			break;
1884 		case RADEON_HPD_6:
1885 			WREG32(DC_HPD6_CONTROL, 0);
1886 			break;
1887 		default:
1888 			break;
1889 		}
1890 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1891 			disabled |= 1 << radeon_connector->hpd.hpd;
1892 	}
1893 	radeon_irq_kms_disable_hpd(rdev, disabled);
1894 }
1895 
1896 /* watermark setup */
1897 
1898 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1899 					struct radeon_crtc *radeon_crtc,
1900 					struct drm_display_mode *mode,
1901 					struct drm_display_mode *other_mode)
1902 {
1903 	u32 tmp, buffer_alloc, i;
1904 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1905 	/*
1906 	 * Line Buffer Setup
1907 	 * There are 3 line buffers, each one shared by 2 display controllers.
1908 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1909 	 * the display controllers.  The paritioning is done via one of four
1910 	 * preset allocations specified in bits 2:0:
1911 	 * first display controller
1912 	 *  0 - first half of lb (3840 * 2)
1913 	 *  1 - first 3/4 of lb (5760 * 2)
1914 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1915 	 *  3 - first 1/4 of lb (1920 * 2)
1916 	 * second display controller
1917 	 *  4 - second half of lb (3840 * 2)
1918 	 *  5 - second 3/4 of lb (5760 * 2)
1919 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1920 	 *  7 - last 1/4 of lb (1920 * 2)
1921 	 */
1922 	/* this can get tricky if we have two large displays on a paired group
1923 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1924 	 * non-linked crtcs for maximum line buffer allocation.
1925 	 */
1926 	if (radeon_crtc->base.enabled && mode) {
1927 		if (other_mode) {
1928 			tmp = 0; /* 1/2 */
1929 			buffer_alloc = 1;
1930 		} else {
1931 			tmp = 2; /* whole */
1932 			buffer_alloc = 2;
1933 		}
1934 	} else {
1935 		tmp = 0;
1936 		buffer_alloc = 0;
1937 	}
1938 
1939 	/* second controller of the pair uses second half of the lb */
1940 	if (radeon_crtc->crtc_id % 2)
1941 		tmp += 4;
1942 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1943 
1944 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1945 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1946 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1947 		for (i = 0; i < rdev->usec_timeout; i++) {
1948 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1949 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1950 				break;
1951 			udelay(1);
1952 		}
1953 	}
1954 
1955 	if (radeon_crtc->base.enabled && mode) {
1956 		switch (tmp) {
1957 		case 0:
1958 		case 4:
1959 		default:
1960 			if (ASIC_IS_DCE5(rdev))
1961 				return 4096 * 2;
1962 			else
1963 				return 3840 * 2;
1964 		case 1:
1965 		case 5:
1966 			if (ASIC_IS_DCE5(rdev))
1967 				return 6144 * 2;
1968 			else
1969 				return 5760 * 2;
1970 		case 2:
1971 		case 6:
1972 			if (ASIC_IS_DCE5(rdev))
1973 				return 8192 * 2;
1974 			else
1975 				return 7680 * 2;
1976 		case 3:
1977 		case 7:
1978 			if (ASIC_IS_DCE5(rdev))
1979 				return 2048 * 2;
1980 			else
1981 				return 1920 * 2;
1982 		}
1983 	}
1984 
1985 	/* controller not enabled, so no lb used */
1986 	return 0;
1987 }
1988 
1989 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1990 {
1991 	u32 tmp = RREG32(MC_SHARED_CHMAP);
1992 
1993 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1994 	case 0:
1995 	default:
1996 		return 1;
1997 	case 1:
1998 		return 2;
1999 	case 2:
2000 		return 4;
2001 	case 3:
2002 		return 8;
2003 	}
2004 }
2005 
2006 struct evergreen_wm_params {
2007 	u32 dram_channels; /* number of dram channels */
2008 	u32 yclk;          /* bandwidth per dram data pin in kHz */
2009 	u32 sclk;          /* engine clock in kHz */
2010 	u32 disp_clk;      /* display clock in kHz */
2011 	u32 src_width;     /* viewport width */
2012 	u32 active_time;   /* active display time in ns */
2013 	u32 blank_time;    /* blank time in ns */
2014 	bool interlaced;    /* mode is interlaced */
2015 	fixed20_12 vsc;    /* vertical scale ratio */
2016 	u32 num_heads;     /* number of active crtcs */
2017 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2018 	u32 lb_size;       /* line buffer allocated to pipe */
2019 	u32 vtaps;         /* vertical scaler taps */
2020 };
2021 
2022 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2023 {
2024 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2025 	fixed20_12 dram_efficiency; /* 0.7 */
2026 	fixed20_12 yclk, dram_channels, bandwidth;
2027 	fixed20_12 a;
2028 
2029 	a.full = dfixed_const(1000);
2030 	yclk.full = dfixed_const(wm->yclk);
2031 	yclk.full = dfixed_div(yclk, a);
2032 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2033 	a.full = dfixed_const(10);
2034 	dram_efficiency.full = dfixed_const(7);
2035 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2036 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2037 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2038 
2039 	return dfixed_trunc(bandwidth);
2040 }
2041 
2042 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2043 {
2044 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2045 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2046 	fixed20_12 yclk, dram_channels, bandwidth;
2047 	fixed20_12 a;
2048 
2049 	a.full = dfixed_const(1000);
2050 	yclk.full = dfixed_const(wm->yclk);
2051 	yclk.full = dfixed_div(yclk, a);
2052 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2053 	a.full = dfixed_const(10);
2054 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2055 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2056 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2057 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2058 
2059 	return dfixed_trunc(bandwidth);
2060 }
2061 
2062 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2063 {
2064 	/* Calculate the display Data return Bandwidth */
2065 	fixed20_12 return_efficiency; /* 0.8 */
2066 	fixed20_12 sclk, bandwidth;
2067 	fixed20_12 a;
2068 
2069 	a.full = dfixed_const(1000);
2070 	sclk.full = dfixed_const(wm->sclk);
2071 	sclk.full = dfixed_div(sclk, a);
2072 	a.full = dfixed_const(10);
2073 	return_efficiency.full = dfixed_const(8);
2074 	return_efficiency.full = dfixed_div(return_efficiency, a);
2075 	a.full = dfixed_const(32);
2076 	bandwidth.full = dfixed_mul(a, sclk);
2077 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2078 
2079 	return dfixed_trunc(bandwidth);
2080 }
2081 
2082 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2083 {
2084 	/* Calculate the DMIF Request Bandwidth */
2085 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2086 	fixed20_12 disp_clk, bandwidth;
2087 	fixed20_12 a;
2088 
2089 	a.full = dfixed_const(1000);
2090 	disp_clk.full = dfixed_const(wm->disp_clk);
2091 	disp_clk.full = dfixed_div(disp_clk, a);
2092 	a.full = dfixed_const(10);
2093 	disp_clk_request_efficiency.full = dfixed_const(8);
2094 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2095 	a.full = dfixed_const(32);
2096 	bandwidth.full = dfixed_mul(a, disp_clk);
2097 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2098 
2099 	return dfixed_trunc(bandwidth);
2100 }
2101 
2102 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2103 {
2104 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2105 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2106 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2107 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2108 
2109 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2110 }
2111 
2112 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2113 {
2114 	/* Calculate the display mode Average Bandwidth
2115 	 * DisplayMode should contain the source and destination dimensions,
2116 	 * timing, etc.
2117 	 */
2118 	fixed20_12 bpp;
2119 	fixed20_12 line_time;
2120 	fixed20_12 src_width;
2121 	fixed20_12 bandwidth;
2122 	fixed20_12 a;
2123 
2124 	a.full = dfixed_const(1000);
2125 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2126 	line_time.full = dfixed_div(line_time, a);
2127 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2128 	src_width.full = dfixed_const(wm->src_width);
2129 	bandwidth.full = dfixed_mul(src_width, bpp);
2130 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2131 	bandwidth.full = dfixed_div(bandwidth, line_time);
2132 
2133 	return dfixed_trunc(bandwidth);
2134 }
2135 
2136 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2137 {
2138 	/* First calcualte the latency in ns */
2139 	u32 mc_latency = 2000; /* 2000 ns. */
2140 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2141 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2142 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2143 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2144 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2145 		(wm->num_heads * cursor_line_pair_return_time);
2146 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2147 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2148 	fixed20_12 a, b, c;
2149 
2150 	if (wm->num_heads == 0)
2151 		return 0;
2152 
2153 	a.full = dfixed_const(2);
2154 	b.full = dfixed_const(1);
2155 	if ((wm->vsc.full > a.full) ||
2156 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2157 	    (wm->vtaps >= 5) ||
2158 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2159 		max_src_lines_per_dst_line = 4;
2160 	else
2161 		max_src_lines_per_dst_line = 2;
2162 
2163 	a.full = dfixed_const(available_bandwidth);
2164 	b.full = dfixed_const(wm->num_heads);
2165 	a.full = dfixed_div(a, b);
2166 
2167 	b.full = dfixed_const(1000);
2168 	c.full = dfixed_const(wm->disp_clk);
2169 	b.full = dfixed_div(c, b);
2170 	c.full = dfixed_const(wm->bytes_per_pixel);
2171 	b.full = dfixed_mul(b, c);
2172 
2173 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2174 
2175 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2176 	b.full = dfixed_const(1000);
2177 	c.full = dfixed_const(lb_fill_bw);
2178 	b.full = dfixed_div(c, b);
2179 	a.full = dfixed_div(a, b);
2180 	line_fill_time = dfixed_trunc(a);
2181 
2182 	if (line_fill_time < wm->active_time)
2183 		return latency;
2184 	else
2185 		return latency + (line_fill_time - wm->active_time);
2186 
2187 }
2188 
2189 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2190 {
2191 	if (evergreen_average_bandwidth(wm) <=
2192 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2193 		return true;
2194 	else
2195 		return false;
2196 };
2197 
2198 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2199 {
2200 	if (evergreen_average_bandwidth(wm) <=
2201 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2202 		return true;
2203 	else
2204 		return false;
2205 };
2206 
2207 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2208 {
2209 	u32 lb_partitions = wm->lb_size / wm->src_width;
2210 	u32 line_time = wm->active_time + wm->blank_time;
2211 	u32 latency_tolerant_lines;
2212 	u32 latency_hiding;
2213 	fixed20_12 a;
2214 
2215 	a.full = dfixed_const(1);
2216 	if (wm->vsc.full > a.full)
2217 		latency_tolerant_lines = 1;
2218 	else {
2219 		if (lb_partitions <= (wm->vtaps + 1))
2220 			latency_tolerant_lines = 1;
2221 		else
2222 			latency_tolerant_lines = 2;
2223 	}
2224 
2225 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2226 
2227 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2228 		return true;
2229 	else
2230 		return false;
2231 }
2232 
2233 static void evergreen_program_watermarks(struct radeon_device *rdev,
2234 					 struct radeon_crtc *radeon_crtc,
2235 					 u32 lb_size, u32 num_heads)
2236 {
2237 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2238 	struct evergreen_wm_params wm_low, wm_high;
2239 	u32 dram_channels;
2240 	u32 pixel_period;
2241 	u32 line_time = 0;
2242 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2243 	u32 priority_a_mark = 0, priority_b_mark = 0;
2244 	u32 priority_a_cnt = PRIORITY_OFF;
2245 	u32 priority_b_cnt = PRIORITY_OFF;
2246 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2247 	u32 tmp, arb_control3;
2248 	fixed20_12 a, b, c;
2249 
2250 	if (radeon_crtc->base.enabled && num_heads && mode) {
2251 		pixel_period = 1000000 / (u32)mode->clock;
2252 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2253 		priority_a_cnt = 0;
2254 		priority_b_cnt = 0;
2255 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2256 
2257 		/* watermark for high clocks */
2258 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2259 			wm_high.yclk =
2260 				radeon_dpm_get_mclk(rdev, false) * 10;
2261 			wm_high.sclk =
2262 				radeon_dpm_get_sclk(rdev, false) * 10;
2263 		} else {
2264 			wm_high.yclk = rdev->pm.current_mclk * 10;
2265 			wm_high.sclk = rdev->pm.current_sclk * 10;
2266 		}
2267 
2268 		wm_high.disp_clk = mode->clock;
2269 		wm_high.src_width = mode->crtc_hdisplay;
2270 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2271 		wm_high.blank_time = line_time - wm_high.active_time;
2272 		wm_high.interlaced = false;
2273 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2274 			wm_high.interlaced = true;
2275 		wm_high.vsc = radeon_crtc->vsc;
2276 		wm_high.vtaps = 1;
2277 		if (radeon_crtc->rmx_type != RMX_OFF)
2278 			wm_high.vtaps = 2;
2279 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2280 		wm_high.lb_size = lb_size;
2281 		wm_high.dram_channels = dram_channels;
2282 		wm_high.num_heads = num_heads;
2283 
2284 		/* watermark for low clocks */
2285 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2286 			wm_low.yclk =
2287 				radeon_dpm_get_mclk(rdev, true) * 10;
2288 			wm_low.sclk =
2289 				radeon_dpm_get_sclk(rdev, true) * 10;
2290 		} else {
2291 			wm_low.yclk = rdev->pm.current_mclk * 10;
2292 			wm_low.sclk = rdev->pm.current_sclk * 10;
2293 		}
2294 
2295 		wm_low.disp_clk = mode->clock;
2296 		wm_low.src_width = mode->crtc_hdisplay;
2297 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2298 		wm_low.blank_time = line_time - wm_low.active_time;
2299 		wm_low.interlaced = false;
2300 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2301 			wm_low.interlaced = true;
2302 		wm_low.vsc = radeon_crtc->vsc;
2303 		wm_low.vtaps = 1;
2304 		if (radeon_crtc->rmx_type != RMX_OFF)
2305 			wm_low.vtaps = 2;
2306 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2307 		wm_low.lb_size = lb_size;
2308 		wm_low.dram_channels = dram_channels;
2309 		wm_low.num_heads = num_heads;
2310 
2311 		/* set for high clocks */
2312 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2313 		/* set for low clocks */
2314 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2315 
2316 		/* possibly force display priority to high */
2317 		/* should really do this at mode validation time... */
2318 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2319 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2320 		    !evergreen_check_latency_hiding(&wm_high) ||
2321 		    (rdev->disp_priority == 2)) {
2322 			DRM_DEBUG_KMS("force priority a to high\n");
2323 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2324 		}
2325 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2326 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2327 		    !evergreen_check_latency_hiding(&wm_low) ||
2328 		    (rdev->disp_priority == 2)) {
2329 			DRM_DEBUG_KMS("force priority b to high\n");
2330 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2331 		}
2332 
2333 		a.full = dfixed_const(1000);
2334 		b.full = dfixed_const(mode->clock);
2335 		b.full = dfixed_div(b, a);
2336 		c.full = dfixed_const(latency_watermark_a);
2337 		c.full = dfixed_mul(c, b);
2338 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2339 		c.full = dfixed_div(c, a);
2340 		a.full = dfixed_const(16);
2341 		c.full = dfixed_div(c, a);
2342 		priority_a_mark = dfixed_trunc(c);
2343 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2344 
2345 		a.full = dfixed_const(1000);
2346 		b.full = dfixed_const(mode->clock);
2347 		b.full = dfixed_div(b, a);
2348 		c.full = dfixed_const(latency_watermark_b);
2349 		c.full = dfixed_mul(c, b);
2350 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2351 		c.full = dfixed_div(c, a);
2352 		a.full = dfixed_const(16);
2353 		c.full = dfixed_div(c, a);
2354 		priority_b_mark = dfixed_trunc(c);
2355 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2356 
2357 		/* Save number of lines the linebuffer leads before the scanout */
2358 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2359 	}
2360 
2361 	/* select wm A */
2362 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2363 	tmp = arb_control3;
2364 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2365 	tmp |= LATENCY_WATERMARK_MASK(1);
2366 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2367 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2368 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2369 		LATENCY_HIGH_WATERMARK(line_time)));
2370 	/* select wm B */
2371 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2372 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2373 	tmp |= LATENCY_WATERMARK_MASK(2);
2374 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2375 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2376 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2377 		LATENCY_HIGH_WATERMARK(line_time)));
2378 	/* restore original selection */
2379 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2380 
2381 	/* write the priority marks */
2382 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2383 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2384 
2385 	/* save values for DPM */
2386 	radeon_crtc->line_time = line_time;
2387 	radeon_crtc->wm_high = latency_watermark_a;
2388 	radeon_crtc->wm_low = latency_watermark_b;
2389 }
2390 
2391 /**
2392  * evergreen_bandwidth_update - update display watermarks callback.
2393  *
2394  * @rdev: radeon_device pointer
2395  *
2396  * Update the display watermarks based on the requested mode(s)
2397  * (evergreen+).
2398  */
2399 void evergreen_bandwidth_update(struct radeon_device *rdev)
2400 {
2401 	struct drm_display_mode *mode0 = NULL;
2402 	struct drm_display_mode *mode1 = NULL;
2403 	u32 num_heads = 0, lb_size;
2404 	int i;
2405 
2406 	if (!rdev->mode_info.mode_config_initialized)
2407 		return;
2408 
2409 	radeon_update_display_priority(rdev);
2410 
2411 	for (i = 0; i < rdev->num_crtc; i++) {
2412 		if (rdev->mode_info.crtcs[i]->base.enabled)
2413 			num_heads++;
2414 	}
2415 	for (i = 0; i < rdev->num_crtc; i += 2) {
2416 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2417 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2418 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2419 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2420 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2421 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2422 	}
2423 }
2424 
2425 /**
2426  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2427  *
2428  * @rdev: radeon_device pointer
2429  *
2430  * Wait for the MC (memory controller) to be idle.
2431  * (evergreen+).
2432  * Returns 0 if the MC is idle, -1 if not.
2433  */
2434 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2435 {
2436 	unsigned i;
2437 	u32 tmp;
2438 
2439 	for (i = 0; i < rdev->usec_timeout; i++) {
2440 		/* read MC_STATUS */
2441 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2442 		if (!tmp)
2443 			return 0;
2444 		udelay(1);
2445 	}
2446 	return -1;
2447 }
2448 
2449 /*
2450  * GART
2451  */
2452 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2453 {
2454 	unsigned i;
2455 	u32 tmp;
2456 
2457 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2458 
2459 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2460 	for (i = 0; i < rdev->usec_timeout; i++) {
2461 		/* read MC_STATUS */
2462 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2463 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2464 		if (tmp == 2) {
2465 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2466 			return;
2467 		}
2468 		if (tmp) {
2469 			return;
2470 		}
2471 		udelay(1);
2472 	}
2473 }
2474 
2475 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2476 {
2477 	u32 tmp;
2478 	int r;
2479 
2480 	if (rdev->gart.robj == NULL) {
2481 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2482 		return -EINVAL;
2483 	}
2484 	r = radeon_gart_table_vram_pin(rdev);
2485 	if (r)
2486 		return r;
2487 	/* Setup L2 cache */
2488 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2489 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2490 				EFFECTIVE_L2_QUEUE_SIZE(7));
2491 	WREG32(VM_L2_CNTL2, 0);
2492 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2493 	/* Setup TLB control */
2494 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2495 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2496 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2497 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2498 	if (rdev->flags & RADEON_IS_IGP) {
2499 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2500 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2501 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2502 	} else {
2503 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2504 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2505 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2506 		if ((rdev->family == CHIP_JUNIPER) ||
2507 		    (rdev->family == CHIP_CYPRESS) ||
2508 		    (rdev->family == CHIP_HEMLOCK) ||
2509 		    (rdev->family == CHIP_BARTS))
2510 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2511 	}
2512 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2513 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2514 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2515 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2516 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2517 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2518 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2519 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2520 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2521 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2522 			(u32)(rdev->dummy_page.addr >> 12));
2523 	WREG32(VM_CONTEXT1_CNTL, 0);
2524 
2525 	evergreen_pcie_gart_tlb_flush(rdev);
2526 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2527 		 (unsigned)(rdev->mc.gtt_size >> 20),
2528 		 (unsigned long long)rdev->gart.table_addr);
2529 	rdev->gart.ready = true;
2530 	return 0;
2531 }
2532 
2533 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2534 {
2535 	u32 tmp;
2536 
2537 	/* Disable all tables */
2538 	WREG32(VM_CONTEXT0_CNTL, 0);
2539 	WREG32(VM_CONTEXT1_CNTL, 0);
2540 
2541 	/* Setup L2 cache */
2542 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2543 				EFFECTIVE_L2_QUEUE_SIZE(7));
2544 	WREG32(VM_L2_CNTL2, 0);
2545 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2546 	/* Setup TLB control */
2547 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2548 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2549 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2550 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2551 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2552 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2553 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2554 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2555 	radeon_gart_table_vram_unpin(rdev);
2556 }
2557 
2558 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2559 {
2560 	evergreen_pcie_gart_disable(rdev);
2561 	radeon_gart_table_vram_free(rdev);
2562 	radeon_gart_fini(rdev);
2563 }
2564 
2565 
2566 static void evergreen_agp_enable(struct radeon_device *rdev)
2567 {
2568 	u32 tmp;
2569 
2570 	/* Setup L2 cache */
2571 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2572 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2573 				EFFECTIVE_L2_QUEUE_SIZE(7));
2574 	WREG32(VM_L2_CNTL2, 0);
2575 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2576 	/* Setup TLB control */
2577 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2578 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2579 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2580 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2581 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2582 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2583 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2584 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2585 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2586 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2587 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2588 	WREG32(VM_CONTEXT0_CNTL, 0);
2589 	WREG32(VM_CONTEXT1_CNTL, 0);
2590 }
2591 
2592 static const unsigned ni_dig_offsets[] =
2593 {
2594 	NI_DIG0_REGISTER_OFFSET,
2595 	NI_DIG1_REGISTER_OFFSET,
2596 	NI_DIG2_REGISTER_OFFSET,
2597 	NI_DIG3_REGISTER_OFFSET,
2598 	NI_DIG4_REGISTER_OFFSET,
2599 	NI_DIG5_REGISTER_OFFSET
2600 };
2601 
2602 static const unsigned ni_tx_offsets[] =
2603 {
2604 	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2605 	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2606 	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2607 	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2608 	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2609 	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2610 };
2611 
2612 static const unsigned evergreen_dp_offsets[] =
2613 {
2614 	EVERGREEN_DP0_REGISTER_OFFSET,
2615 	EVERGREEN_DP1_REGISTER_OFFSET,
2616 	EVERGREEN_DP2_REGISTER_OFFSET,
2617 	EVERGREEN_DP3_REGISTER_OFFSET,
2618 	EVERGREEN_DP4_REGISTER_OFFSET,
2619 	EVERGREEN_DP5_REGISTER_OFFSET
2620 };
2621 
2622 
2623 /*
2624  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2625  * We go from crtc to connector and it is not relible  since it
2626  * should be an opposite direction .If crtc is enable then
2627  * find the dig_fe which selects this crtc and insure that it enable.
2628  * if such dig_fe is found then find dig_be which selects found dig_be and
2629  * insure that it enable and in DP_SST mode.
2630  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2631  * from dp symbols clocks .
2632  */
2633 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2634 					       unsigned crtc_id, unsigned *ret_dig_fe)
2635 {
2636 	unsigned i;
2637 	unsigned dig_fe;
2638 	unsigned dig_be;
2639 	unsigned dig_en_be;
2640 	unsigned uniphy_pll;
2641 	unsigned digs_fe_selected;
2642 	unsigned dig_be_mode;
2643 	unsigned dig_fe_mask;
2644 	bool is_enabled = false;
2645 	bool found_crtc = false;
2646 
2647 	/* loop through all running dig_fe to find selected crtc */
2648 	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2649 		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2650 		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2651 		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2652 			/* found running pipe */
2653 			found_crtc = true;
2654 			dig_fe_mask = 1 << i;
2655 			dig_fe = i;
2656 			break;
2657 		}
2658 	}
2659 
2660 	if (found_crtc) {
2661 		/* loop through all running dig_be to find selected dig_fe */
2662 		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2663 			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2664 			/* if dig_fe_selected by dig_be? */
2665 			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2666 			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2667 			if (dig_fe_mask &  digs_fe_selected &&
2668 			    /* if dig_be in sst mode? */
2669 			    dig_be_mode == NI_DIG_BE_DPSST) {
2670 				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2671 						   ni_dig_offsets[i]);
2672 				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2673 						    ni_tx_offsets[i]);
2674 				/* dig_be enable and tx is running */
2675 				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2676 				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2677 				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2678 					is_enabled = true;
2679 					*ret_dig_fe = dig_fe;
2680 					break;
2681 				}
2682 			}
2683 		}
2684 	}
2685 
2686 	return is_enabled;
2687 }
2688 
2689 /*
2690  * Blank dig when in dp sst mode
2691  * Dig ignores crtc timing
2692  */
2693 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2694 				      unsigned dig_fe)
2695 {
2696 	unsigned stream_ctrl;
2697 	unsigned fifo_ctrl;
2698 	unsigned counter = 0;
2699 
2700 	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2701 		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2702 		return;
2703 	}
2704 
2705 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2706 			     evergreen_dp_offsets[dig_fe]);
2707 	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2708 		DRM_ERROR("dig %d , should be enable\n", dig_fe);
2709 		return;
2710 	}
2711 
2712 	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2713 	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2714 	       evergreen_dp_offsets[dig_fe], stream_ctrl);
2715 
2716 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2717 			     evergreen_dp_offsets[dig_fe]);
2718 	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2719 		msleep(1);
2720 		counter++;
2721 		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2722 				     evergreen_dp_offsets[dig_fe]);
2723 	}
2724 	if (counter >= 32 )
2725 		DRM_ERROR("counter exceeds %d\n", counter);
2726 
2727 	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2728 	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2729 	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2730 
2731 }
2732 
2733 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2734 {
2735 	u32 crtc_enabled, tmp, frame_count, blackout;
2736 	int i, j;
2737 	unsigned dig_fe;
2738 
2739 	if (!ASIC_IS_NODCE(rdev)) {
2740 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2741 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2742 
2743 		/* disable VGA render */
2744 		WREG32(VGA_RENDER_CONTROL, 0);
2745 	}
2746 	/* blank the display controllers */
2747 	for (i = 0; i < rdev->num_crtc; i++) {
2748 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2749 		if (crtc_enabled) {
2750 			save->crtc_enabled[i] = true;
2751 			if (ASIC_IS_DCE6(rdev)) {
2752 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2753 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2754 					radeon_wait_for_vblank(rdev, i);
2755 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2756 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2757 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2758 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2759 				}
2760 			} else {
2761 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2762 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2763 					radeon_wait_for_vblank(rdev, i);
2764 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2765 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2766 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2767 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2768 				}
2769 			}
2770 			/* wait for the next frame */
2771 			frame_count = radeon_get_vblank_counter(rdev, i);
2772 			for (j = 0; j < rdev->usec_timeout; j++) {
2773 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2774 					break;
2775 				udelay(1);
2776 			}
2777 			/*we should disable dig if it drives dp sst*/
2778 			/*but we are in radeon_device_init and the topology is unknown*/
2779 			/*and it is available after radeon_modeset_init*/
2780 			/*the following method radeon_atom_encoder_dpms_dig*/
2781 			/*does the job if we initialize it properly*/
2782 			/*for now we do it this manually*/
2783 			/**/
2784 			if (ASIC_IS_DCE5(rdev) &&
2785 			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2786 				evergreen_blank_dp_output(rdev, dig_fe);
2787 			/*we could remove 6 lines below*/
2788 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2789 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2790 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2791 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2792 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2793 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2794 			save->crtc_enabled[i] = false;
2795 			/* ***** */
2796 		} else {
2797 			save->crtc_enabled[i] = false;
2798 		}
2799 	}
2800 
2801 	radeon_mc_wait_for_idle(rdev);
2802 
2803 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2804 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2805 		/* Block CPU access */
2806 		WREG32(BIF_FB_EN, 0);
2807 		/* blackout the MC */
2808 		blackout &= ~BLACKOUT_MODE_MASK;
2809 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2810 	}
2811 	/* wait for the MC to settle */
2812 	udelay(100);
2813 
2814 	/* lock double buffered regs */
2815 	for (i = 0; i < rdev->num_crtc; i++) {
2816 		if (save->crtc_enabled[i]) {
2817 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2818 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2819 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2820 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2821 			}
2822 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2823 			if (!(tmp & 1)) {
2824 				tmp |= 1;
2825 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2826 			}
2827 		}
2828 	}
2829 }
2830 
2831 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2832 {
2833 	u32 tmp, frame_count;
2834 	int i, j;
2835 
2836 	/* update crtc base addresses */
2837 	for (i = 0; i < rdev->num_crtc; i++) {
2838 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2839 		       upper_32_bits(rdev->mc.vram_start));
2840 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2841 		       upper_32_bits(rdev->mc.vram_start));
2842 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2843 		       (u32)rdev->mc.vram_start);
2844 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2845 		       (u32)rdev->mc.vram_start);
2846 	}
2847 
2848 	if (!ASIC_IS_NODCE(rdev)) {
2849 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2850 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2851 	}
2852 
2853 	/* unlock regs and wait for update */
2854 	for (i = 0; i < rdev->num_crtc; i++) {
2855 		if (save->crtc_enabled[i]) {
2856 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2857 			if ((tmp & 0x7) != 3) {
2858 				tmp &= ~0x7;
2859 				tmp |= 0x3;
2860 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2861 			}
2862 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2863 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2864 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2865 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2866 			}
2867 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2868 			if (tmp & 1) {
2869 				tmp &= ~1;
2870 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2871 			}
2872 			for (j = 0; j < rdev->usec_timeout; j++) {
2873 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2874 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2875 					break;
2876 				udelay(1);
2877 			}
2878 		}
2879 	}
2880 
2881 	/* unblackout the MC */
2882 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2883 	tmp &= ~BLACKOUT_MODE_MASK;
2884 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2885 	/* allow CPU access */
2886 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2887 
2888 	for (i = 0; i < rdev->num_crtc; i++) {
2889 		if (save->crtc_enabled[i]) {
2890 			if (ASIC_IS_DCE6(rdev)) {
2891 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2892 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2893 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2894 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2895 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2896 			} else {
2897 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2898 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2899 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2900 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2901 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2902 			}
2903 			/* wait for the next frame */
2904 			frame_count = radeon_get_vblank_counter(rdev, i);
2905 			for (j = 0; j < rdev->usec_timeout; j++) {
2906 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2907 					break;
2908 				udelay(1);
2909 			}
2910 		}
2911 	}
2912 	if (!ASIC_IS_NODCE(rdev)) {
2913 		/* Unlock vga access */
2914 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2915 		mdelay(1);
2916 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2917 	}
2918 }
2919 
2920 void evergreen_mc_program(struct radeon_device *rdev)
2921 {
2922 	struct evergreen_mc_save save;
2923 	u32 tmp;
2924 	int i, j;
2925 
2926 	/* Initialize HDP */
2927 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2928 		WREG32((0x2c14 + j), 0x00000000);
2929 		WREG32((0x2c18 + j), 0x00000000);
2930 		WREG32((0x2c1c + j), 0x00000000);
2931 		WREG32((0x2c20 + j), 0x00000000);
2932 		WREG32((0x2c24 + j), 0x00000000);
2933 	}
2934 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2935 
2936 	evergreen_mc_stop(rdev, &save);
2937 	if (evergreen_mc_wait_for_idle(rdev)) {
2938 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2939 	}
2940 	/* Lockout access through VGA aperture*/
2941 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2942 	/* Update configuration */
2943 	if (rdev->flags & RADEON_IS_AGP) {
2944 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2945 			/* VRAM before AGP */
2946 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2947 				rdev->mc.vram_start >> 12);
2948 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2949 				rdev->mc.gtt_end >> 12);
2950 		} else {
2951 			/* VRAM after AGP */
2952 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2953 				rdev->mc.gtt_start >> 12);
2954 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2955 				rdev->mc.vram_end >> 12);
2956 		}
2957 	} else {
2958 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2959 			rdev->mc.vram_start >> 12);
2960 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2961 			rdev->mc.vram_end >> 12);
2962 	}
2963 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2964 	/* llano/ontario only */
2965 	if ((rdev->family == CHIP_PALM) ||
2966 	    (rdev->family == CHIP_SUMO) ||
2967 	    (rdev->family == CHIP_SUMO2)) {
2968 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2969 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2970 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2971 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2972 	}
2973 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2974 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2975 	WREG32(MC_VM_FB_LOCATION, tmp);
2976 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2977 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2978 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2979 	if (rdev->flags & RADEON_IS_AGP) {
2980 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2981 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2982 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2983 	} else {
2984 		WREG32(MC_VM_AGP_BASE, 0);
2985 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2986 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2987 	}
2988 	if (evergreen_mc_wait_for_idle(rdev)) {
2989 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2990 	}
2991 	evergreen_mc_resume(rdev, &save);
2992 	/* we need to own VRAM, so turn off the VGA renderer here
2993 	 * to stop it overwriting our objects */
2994 	rv515_vga_render_disable(rdev);
2995 }
2996 
2997 /*
2998  * CP.
2999  */
3000 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3001 {
3002 	struct radeon_ring *ring = &rdev->ring[ib->ring];
3003 	u32 next_rptr;
3004 
3005 	/* set to DX10/11 mode */
3006 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3007 	radeon_ring_write(ring, 1);
3008 
3009 	if (ring->rptr_save_reg) {
3010 		next_rptr = ring->wptr + 3 + 4;
3011 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3012 		radeon_ring_write(ring, ((ring->rptr_save_reg -
3013 					  PACKET3_SET_CONFIG_REG_START) >> 2));
3014 		radeon_ring_write(ring, next_rptr);
3015 	} else if (rdev->wb.enabled) {
3016 		next_rptr = ring->wptr + 5 + 4;
3017 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3018 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3019 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3020 		radeon_ring_write(ring, next_rptr);
3021 		radeon_ring_write(ring, 0);
3022 	}
3023 
3024 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3025 	radeon_ring_write(ring,
3026 #ifdef __BIG_ENDIAN
3027 			  (2 << 0) |
3028 #endif
3029 			  (ib->gpu_addr & 0xFFFFFFFC));
3030 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3031 	radeon_ring_write(ring, ib->length_dw);
3032 }
3033 
3034 
3035 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3036 {
3037 	const __be32 *fw_data;
3038 	int i;
3039 
3040 	if (!rdev->me_fw || !rdev->pfp_fw)
3041 		return -EINVAL;
3042 
3043 	r700_cp_stop(rdev);
3044 	WREG32(CP_RB_CNTL,
3045 #ifdef __BIG_ENDIAN
3046 	       BUF_SWAP_32BIT |
3047 #endif
3048 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3049 
3050 	fw_data = (const __be32 *)rdev->pfp_fw->data;
3051 	WREG32(CP_PFP_UCODE_ADDR, 0);
3052 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3053 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3054 	WREG32(CP_PFP_UCODE_ADDR, 0);
3055 
3056 	fw_data = (const __be32 *)rdev->me_fw->data;
3057 	WREG32(CP_ME_RAM_WADDR, 0);
3058 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3059 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3060 
3061 	WREG32(CP_PFP_UCODE_ADDR, 0);
3062 	WREG32(CP_ME_RAM_WADDR, 0);
3063 	WREG32(CP_ME_RAM_RADDR, 0);
3064 	return 0;
3065 }
3066 
3067 static int evergreen_cp_start(struct radeon_device *rdev)
3068 {
3069 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3070 	int r, i;
3071 	uint32_t cp_me;
3072 
3073 	r = radeon_ring_lock(rdev, ring, 7);
3074 	if (r) {
3075 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3076 		return r;
3077 	}
3078 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3079 	radeon_ring_write(ring, 0x1);
3080 	radeon_ring_write(ring, 0x0);
3081 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3082 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3083 	radeon_ring_write(ring, 0);
3084 	radeon_ring_write(ring, 0);
3085 	radeon_ring_unlock_commit(rdev, ring, false);
3086 
3087 	cp_me = 0xff;
3088 	WREG32(CP_ME_CNTL, cp_me);
3089 
3090 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3091 	if (r) {
3092 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3093 		return r;
3094 	}
3095 
3096 	/* setup clear context state */
3097 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3098 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3099 
3100 	for (i = 0; i < evergreen_default_size; i++)
3101 		radeon_ring_write(ring, evergreen_default_state[i]);
3102 
3103 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3104 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3105 
3106 	/* set clear context state */
3107 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3108 	radeon_ring_write(ring, 0);
3109 
3110 	/* SQ_VTX_BASE_VTX_LOC */
3111 	radeon_ring_write(ring, 0xc0026f00);
3112 	radeon_ring_write(ring, 0x00000000);
3113 	radeon_ring_write(ring, 0x00000000);
3114 	radeon_ring_write(ring, 0x00000000);
3115 
3116 	/* Clear consts */
3117 	radeon_ring_write(ring, 0xc0036f00);
3118 	radeon_ring_write(ring, 0x00000bc4);
3119 	radeon_ring_write(ring, 0xffffffff);
3120 	radeon_ring_write(ring, 0xffffffff);
3121 	radeon_ring_write(ring, 0xffffffff);
3122 
3123 	radeon_ring_write(ring, 0xc0026900);
3124 	radeon_ring_write(ring, 0x00000316);
3125 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3126 	radeon_ring_write(ring, 0x00000010); /*  */
3127 
3128 	radeon_ring_unlock_commit(rdev, ring, false);
3129 
3130 	return 0;
3131 }
3132 
3133 static int evergreen_cp_resume(struct radeon_device *rdev)
3134 {
3135 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3136 	u32 tmp;
3137 	u32 rb_bufsz;
3138 	int r;
3139 
3140 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3141 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3142 				 SOFT_RESET_PA |
3143 				 SOFT_RESET_SH |
3144 				 SOFT_RESET_VGT |
3145 				 SOFT_RESET_SPI |
3146 				 SOFT_RESET_SX));
3147 	RREG32(GRBM_SOFT_RESET);
3148 	mdelay(15);
3149 	WREG32(GRBM_SOFT_RESET, 0);
3150 	RREG32(GRBM_SOFT_RESET);
3151 
3152 	/* Set ring buffer size */
3153 	rb_bufsz = order_base_2(ring->ring_size / 8);
3154 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3155 #ifdef __BIG_ENDIAN
3156 	tmp |= BUF_SWAP_32BIT;
3157 #endif
3158 	WREG32(CP_RB_CNTL, tmp);
3159 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3160 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3161 
3162 	/* Set the write pointer delay */
3163 	WREG32(CP_RB_WPTR_DELAY, 0);
3164 
3165 	/* Initialize the ring buffer's read and write pointers */
3166 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3167 	WREG32(CP_RB_RPTR_WR, 0);
3168 	ring->wptr = 0;
3169 	WREG32(CP_RB_WPTR, ring->wptr);
3170 
3171 	/* set the wb address whether it's enabled or not */
3172 	WREG32(CP_RB_RPTR_ADDR,
3173 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3174 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3175 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3176 
3177 	if (rdev->wb.enabled)
3178 		WREG32(SCRATCH_UMSK, 0xff);
3179 	else {
3180 		tmp |= RB_NO_UPDATE;
3181 		WREG32(SCRATCH_UMSK, 0);
3182 	}
3183 
3184 	mdelay(1);
3185 	WREG32(CP_RB_CNTL, tmp);
3186 
3187 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3188 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3189 
3190 	evergreen_cp_start(rdev);
3191 	ring->ready = true;
3192 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3193 	if (r) {
3194 		ring->ready = false;
3195 		return r;
3196 	}
3197 	return 0;
3198 }
3199 
3200 /*
3201  * Core functions
3202  */
3203 static void evergreen_gpu_init(struct radeon_device *rdev)
3204 {
3205 	u32 gb_addr_config;
3206 	u32 mc_shared_chmap, mc_arb_ramcfg;
3207 	u32 sx_debug_1;
3208 	u32 smx_dc_ctl0;
3209 	u32 sq_config;
3210 	u32 sq_lds_resource_mgmt;
3211 	u32 sq_gpr_resource_mgmt_1;
3212 	u32 sq_gpr_resource_mgmt_2;
3213 	u32 sq_gpr_resource_mgmt_3;
3214 	u32 sq_thread_resource_mgmt;
3215 	u32 sq_thread_resource_mgmt_2;
3216 	u32 sq_stack_resource_mgmt_1;
3217 	u32 sq_stack_resource_mgmt_2;
3218 	u32 sq_stack_resource_mgmt_3;
3219 	u32 vgt_cache_invalidation;
3220 	u32 hdp_host_path_cntl, tmp;
3221 	u32 disabled_rb_mask;
3222 	int i, j, ps_thread_count;
3223 
3224 	switch (rdev->family) {
3225 	case CHIP_CYPRESS:
3226 	case CHIP_HEMLOCK:
3227 		rdev->config.evergreen.num_ses = 2;
3228 		rdev->config.evergreen.max_pipes = 4;
3229 		rdev->config.evergreen.max_tile_pipes = 8;
3230 		rdev->config.evergreen.max_simds = 10;
3231 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3232 		rdev->config.evergreen.max_gprs = 256;
3233 		rdev->config.evergreen.max_threads = 248;
3234 		rdev->config.evergreen.max_gs_threads = 32;
3235 		rdev->config.evergreen.max_stack_entries = 512;
3236 		rdev->config.evergreen.sx_num_of_sets = 4;
3237 		rdev->config.evergreen.sx_max_export_size = 256;
3238 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3239 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3240 		rdev->config.evergreen.max_hw_contexts = 8;
3241 		rdev->config.evergreen.sq_num_cf_insts = 2;
3242 
3243 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3244 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3245 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3246 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3247 		break;
3248 	case CHIP_JUNIPER:
3249 		rdev->config.evergreen.num_ses = 1;
3250 		rdev->config.evergreen.max_pipes = 4;
3251 		rdev->config.evergreen.max_tile_pipes = 4;
3252 		rdev->config.evergreen.max_simds = 10;
3253 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3254 		rdev->config.evergreen.max_gprs = 256;
3255 		rdev->config.evergreen.max_threads = 248;
3256 		rdev->config.evergreen.max_gs_threads = 32;
3257 		rdev->config.evergreen.max_stack_entries = 512;
3258 		rdev->config.evergreen.sx_num_of_sets = 4;
3259 		rdev->config.evergreen.sx_max_export_size = 256;
3260 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3261 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3262 		rdev->config.evergreen.max_hw_contexts = 8;
3263 		rdev->config.evergreen.sq_num_cf_insts = 2;
3264 
3265 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3266 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3267 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3268 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3269 		break;
3270 	case CHIP_REDWOOD:
3271 		rdev->config.evergreen.num_ses = 1;
3272 		rdev->config.evergreen.max_pipes = 4;
3273 		rdev->config.evergreen.max_tile_pipes = 4;
3274 		rdev->config.evergreen.max_simds = 5;
3275 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3276 		rdev->config.evergreen.max_gprs = 256;
3277 		rdev->config.evergreen.max_threads = 248;
3278 		rdev->config.evergreen.max_gs_threads = 32;
3279 		rdev->config.evergreen.max_stack_entries = 256;
3280 		rdev->config.evergreen.sx_num_of_sets = 4;
3281 		rdev->config.evergreen.sx_max_export_size = 256;
3282 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3283 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3284 		rdev->config.evergreen.max_hw_contexts = 8;
3285 		rdev->config.evergreen.sq_num_cf_insts = 2;
3286 
3287 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3288 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3289 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3290 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3291 		break;
3292 	case CHIP_CEDAR:
3293 	default:
3294 		rdev->config.evergreen.num_ses = 1;
3295 		rdev->config.evergreen.max_pipes = 2;
3296 		rdev->config.evergreen.max_tile_pipes = 2;
3297 		rdev->config.evergreen.max_simds = 2;
3298 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3299 		rdev->config.evergreen.max_gprs = 256;
3300 		rdev->config.evergreen.max_threads = 192;
3301 		rdev->config.evergreen.max_gs_threads = 16;
3302 		rdev->config.evergreen.max_stack_entries = 256;
3303 		rdev->config.evergreen.sx_num_of_sets = 4;
3304 		rdev->config.evergreen.sx_max_export_size = 128;
3305 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3306 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3307 		rdev->config.evergreen.max_hw_contexts = 4;
3308 		rdev->config.evergreen.sq_num_cf_insts = 1;
3309 
3310 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3311 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3312 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3313 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3314 		break;
3315 	case CHIP_PALM:
3316 		rdev->config.evergreen.num_ses = 1;
3317 		rdev->config.evergreen.max_pipes = 2;
3318 		rdev->config.evergreen.max_tile_pipes = 2;
3319 		rdev->config.evergreen.max_simds = 2;
3320 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3321 		rdev->config.evergreen.max_gprs = 256;
3322 		rdev->config.evergreen.max_threads = 192;
3323 		rdev->config.evergreen.max_gs_threads = 16;
3324 		rdev->config.evergreen.max_stack_entries = 256;
3325 		rdev->config.evergreen.sx_num_of_sets = 4;
3326 		rdev->config.evergreen.sx_max_export_size = 128;
3327 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3328 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3329 		rdev->config.evergreen.max_hw_contexts = 4;
3330 		rdev->config.evergreen.sq_num_cf_insts = 1;
3331 
3332 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3333 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3334 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3335 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3336 		break;
3337 	case CHIP_SUMO:
3338 		rdev->config.evergreen.num_ses = 1;
3339 		rdev->config.evergreen.max_pipes = 4;
3340 		rdev->config.evergreen.max_tile_pipes = 4;
3341 		if (rdev->pdev->device == 0x9648)
3342 			rdev->config.evergreen.max_simds = 3;
3343 		else if ((rdev->pdev->device == 0x9647) ||
3344 			 (rdev->pdev->device == 0x964a))
3345 			rdev->config.evergreen.max_simds = 4;
3346 		else
3347 			rdev->config.evergreen.max_simds = 5;
3348 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3349 		rdev->config.evergreen.max_gprs = 256;
3350 		rdev->config.evergreen.max_threads = 248;
3351 		rdev->config.evergreen.max_gs_threads = 32;
3352 		rdev->config.evergreen.max_stack_entries = 256;
3353 		rdev->config.evergreen.sx_num_of_sets = 4;
3354 		rdev->config.evergreen.sx_max_export_size = 256;
3355 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3356 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3357 		rdev->config.evergreen.max_hw_contexts = 8;
3358 		rdev->config.evergreen.sq_num_cf_insts = 2;
3359 
3360 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3361 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3362 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3363 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3364 		break;
3365 	case CHIP_SUMO2:
3366 		rdev->config.evergreen.num_ses = 1;
3367 		rdev->config.evergreen.max_pipes = 4;
3368 		rdev->config.evergreen.max_tile_pipes = 4;
3369 		rdev->config.evergreen.max_simds = 2;
3370 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3371 		rdev->config.evergreen.max_gprs = 256;
3372 		rdev->config.evergreen.max_threads = 248;
3373 		rdev->config.evergreen.max_gs_threads = 32;
3374 		rdev->config.evergreen.max_stack_entries = 512;
3375 		rdev->config.evergreen.sx_num_of_sets = 4;
3376 		rdev->config.evergreen.sx_max_export_size = 256;
3377 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3378 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3379 		rdev->config.evergreen.max_hw_contexts = 4;
3380 		rdev->config.evergreen.sq_num_cf_insts = 2;
3381 
3382 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3383 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3384 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3385 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3386 		break;
3387 	case CHIP_BARTS:
3388 		rdev->config.evergreen.num_ses = 2;
3389 		rdev->config.evergreen.max_pipes = 4;
3390 		rdev->config.evergreen.max_tile_pipes = 8;
3391 		rdev->config.evergreen.max_simds = 7;
3392 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3393 		rdev->config.evergreen.max_gprs = 256;
3394 		rdev->config.evergreen.max_threads = 248;
3395 		rdev->config.evergreen.max_gs_threads = 32;
3396 		rdev->config.evergreen.max_stack_entries = 512;
3397 		rdev->config.evergreen.sx_num_of_sets = 4;
3398 		rdev->config.evergreen.sx_max_export_size = 256;
3399 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3400 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3401 		rdev->config.evergreen.max_hw_contexts = 8;
3402 		rdev->config.evergreen.sq_num_cf_insts = 2;
3403 
3404 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3405 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3406 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3407 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3408 		break;
3409 	case CHIP_TURKS:
3410 		rdev->config.evergreen.num_ses = 1;
3411 		rdev->config.evergreen.max_pipes = 4;
3412 		rdev->config.evergreen.max_tile_pipes = 4;
3413 		rdev->config.evergreen.max_simds = 6;
3414 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3415 		rdev->config.evergreen.max_gprs = 256;
3416 		rdev->config.evergreen.max_threads = 248;
3417 		rdev->config.evergreen.max_gs_threads = 32;
3418 		rdev->config.evergreen.max_stack_entries = 256;
3419 		rdev->config.evergreen.sx_num_of_sets = 4;
3420 		rdev->config.evergreen.sx_max_export_size = 256;
3421 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3422 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3423 		rdev->config.evergreen.max_hw_contexts = 8;
3424 		rdev->config.evergreen.sq_num_cf_insts = 2;
3425 
3426 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3427 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3428 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3429 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3430 		break;
3431 	case CHIP_CAICOS:
3432 		rdev->config.evergreen.num_ses = 1;
3433 		rdev->config.evergreen.max_pipes = 2;
3434 		rdev->config.evergreen.max_tile_pipes = 2;
3435 		rdev->config.evergreen.max_simds = 2;
3436 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3437 		rdev->config.evergreen.max_gprs = 256;
3438 		rdev->config.evergreen.max_threads = 192;
3439 		rdev->config.evergreen.max_gs_threads = 16;
3440 		rdev->config.evergreen.max_stack_entries = 256;
3441 		rdev->config.evergreen.sx_num_of_sets = 4;
3442 		rdev->config.evergreen.sx_max_export_size = 128;
3443 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3444 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3445 		rdev->config.evergreen.max_hw_contexts = 4;
3446 		rdev->config.evergreen.sq_num_cf_insts = 1;
3447 
3448 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3449 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3450 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3451 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3452 		break;
3453 	}
3454 
3455 	/* Initialize HDP */
3456 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3457 		WREG32((0x2c14 + j), 0x00000000);
3458 		WREG32((0x2c18 + j), 0x00000000);
3459 		WREG32((0x2c1c + j), 0x00000000);
3460 		WREG32((0x2c20 + j), 0x00000000);
3461 		WREG32((0x2c24 + j), 0x00000000);
3462 	}
3463 
3464 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3465 	WREG32(SRBM_INT_CNTL, 0x1);
3466 	WREG32(SRBM_INT_ACK, 0x1);
3467 
3468 	evergreen_fix_pci_max_read_req_size(rdev);
3469 
3470 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3471 	if ((rdev->family == CHIP_PALM) ||
3472 	    (rdev->family == CHIP_SUMO) ||
3473 	    (rdev->family == CHIP_SUMO2))
3474 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3475 	else
3476 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3477 
3478 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3479 	 * not have bank info, so create a custom tiling dword.
3480 	 * bits 3:0   num_pipes
3481 	 * bits 7:4   num_banks
3482 	 * bits 11:8  group_size
3483 	 * bits 15:12 row_size
3484 	 */
3485 	rdev->config.evergreen.tile_config = 0;
3486 	switch (rdev->config.evergreen.max_tile_pipes) {
3487 	case 1:
3488 	default:
3489 		rdev->config.evergreen.tile_config |= (0 << 0);
3490 		break;
3491 	case 2:
3492 		rdev->config.evergreen.tile_config |= (1 << 0);
3493 		break;
3494 	case 4:
3495 		rdev->config.evergreen.tile_config |= (2 << 0);
3496 		break;
3497 	case 8:
3498 		rdev->config.evergreen.tile_config |= (3 << 0);
3499 		break;
3500 	}
3501 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3502 	if (rdev->flags & RADEON_IS_IGP)
3503 		rdev->config.evergreen.tile_config |= 1 << 4;
3504 	else {
3505 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3506 		case 0: /* four banks */
3507 			rdev->config.evergreen.tile_config |= 0 << 4;
3508 			break;
3509 		case 1: /* eight banks */
3510 			rdev->config.evergreen.tile_config |= 1 << 4;
3511 			break;
3512 		case 2: /* sixteen banks */
3513 		default:
3514 			rdev->config.evergreen.tile_config |= 2 << 4;
3515 			break;
3516 		}
3517 	}
3518 	rdev->config.evergreen.tile_config |= 0 << 8;
3519 	rdev->config.evergreen.tile_config |=
3520 		((gb_addr_config & 0x30000000) >> 28) << 12;
3521 
3522 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3523 		u32 efuse_straps_4;
3524 		u32 efuse_straps_3;
3525 
3526 		efuse_straps_4 = RREG32_RCU(0x204);
3527 		efuse_straps_3 = RREG32_RCU(0x203);
3528 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3529 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3530 	} else {
3531 		tmp = 0;
3532 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3533 			u32 rb_disable_bitmap;
3534 
3535 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3536 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3537 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3538 			tmp <<= 4;
3539 			tmp |= rb_disable_bitmap;
3540 		}
3541 	}
3542 	/* enabled rb are just the one not disabled :) */
3543 	disabled_rb_mask = tmp;
3544 	tmp = 0;
3545 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3546 		tmp |= (1 << i);
3547 	/* if all the backends are disabled, fix it up here */
3548 	if ((disabled_rb_mask & tmp) == tmp) {
3549 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3550 			disabled_rb_mask &= ~(1 << i);
3551 	}
3552 
3553 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3554 		u32 simd_disable_bitmap;
3555 
3556 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3557 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3558 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3559 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3560 		tmp <<= 16;
3561 		tmp |= simd_disable_bitmap;
3562 	}
3563 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3564 
3565 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3566 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3567 
3568 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3569 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3570 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3571 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3572 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3573 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3574 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3575 
3576 	if ((rdev->config.evergreen.max_backends == 1) &&
3577 	    (rdev->flags & RADEON_IS_IGP)) {
3578 		if ((disabled_rb_mask & 3) == 1) {
3579 			/* RB0 disabled, RB1 enabled */
3580 			tmp = 0x11111111;
3581 		} else {
3582 			/* RB1 disabled, RB0 enabled */
3583 			tmp = 0x00000000;
3584 		}
3585 	} else {
3586 		tmp = gb_addr_config & NUM_PIPES_MASK;
3587 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3588 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3589 	}
3590 	WREG32(GB_BACKEND_MAP, tmp);
3591 
3592 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3593 	WREG32(CGTS_TCC_DISABLE, 0);
3594 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3595 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3596 
3597 	/* set HW defaults for 3D engine */
3598 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3599 				     ROQ_IB2_START(0x2b)));
3600 
3601 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3602 
3603 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3604 			     SYNC_GRADIENT |
3605 			     SYNC_WALKER |
3606 			     SYNC_ALIGNER));
3607 
3608 	sx_debug_1 = RREG32(SX_DEBUG_1);
3609 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3610 	WREG32(SX_DEBUG_1, sx_debug_1);
3611 
3612 
3613 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3614 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3615 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3616 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3617 
3618 	if (rdev->family <= CHIP_SUMO2)
3619 		WREG32(SMX_SAR_CTL0, 0x00010000);
3620 
3621 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3622 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3623 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3624 
3625 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3626 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3627 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3628 
3629 	WREG32(VGT_NUM_INSTANCES, 1);
3630 	WREG32(SPI_CONFIG_CNTL, 0);
3631 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3632 	WREG32(CP_PERFMON_CNTL, 0);
3633 
3634 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3635 				  FETCH_FIFO_HIWATER(0x4) |
3636 				  DONE_FIFO_HIWATER(0xe0) |
3637 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3638 
3639 	sq_config = RREG32(SQ_CONFIG);
3640 	sq_config &= ~(PS_PRIO(3) |
3641 		       VS_PRIO(3) |
3642 		       GS_PRIO(3) |
3643 		       ES_PRIO(3));
3644 	sq_config |= (VC_ENABLE |
3645 		      EXPORT_SRC_C |
3646 		      PS_PRIO(0) |
3647 		      VS_PRIO(1) |
3648 		      GS_PRIO(2) |
3649 		      ES_PRIO(3));
3650 
3651 	switch (rdev->family) {
3652 	case CHIP_CEDAR:
3653 	case CHIP_PALM:
3654 	case CHIP_SUMO:
3655 	case CHIP_SUMO2:
3656 	case CHIP_CAICOS:
3657 		/* no vertex cache */
3658 		sq_config &= ~VC_ENABLE;
3659 		break;
3660 	default:
3661 		break;
3662 	}
3663 
3664 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3665 
3666 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3667 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3668 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3669 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3670 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3671 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3672 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3673 
3674 	switch (rdev->family) {
3675 	case CHIP_CEDAR:
3676 	case CHIP_PALM:
3677 	case CHIP_SUMO:
3678 	case CHIP_SUMO2:
3679 		ps_thread_count = 96;
3680 		break;
3681 	default:
3682 		ps_thread_count = 128;
3683 		break;
3684 	}
3685 
3686 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3687 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3688 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3689 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3690 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3691 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3692 
3693 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3694 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3695 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3696 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3697 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3698 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3699 
3700 	WREG32(SQ_CONFIG, sq_config);
3701 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3702 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3703 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3704 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3705 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3706 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3707 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3708 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3709 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3710 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3711 
3712 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3713 					  FORCE_EOV_MAX_REZ_CNT(255)));
3714 
3715 	switch (rdev->family) {
3716 	case CHIP_CEDAR:
3717 	case CHIP_PALM:
3718 	case CHIP_SUMO:
3719 	case CHIP_SUMO2:
3720 	case CHIP_CAICOS:
3721 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3722 		break;
3723 	default:
3724 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3725 		break;
3726 	}
3727 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3728 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3729 
3730 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3731 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3732 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3733 
3734 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3735 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3736 
3737 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3738 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3739 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3740 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3741 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3742 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3743 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3744 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3745 
3746 	/* clear render buffer base addresses */
3747 	WREG32(CB_COLOR0_BASE, 0);
3748 	WREG32(CB_COLOR1_BASE, 0);
3749 	WREG32(CB_COLOR2_BASE, 0);
3750 	WREG32(CB_COLOR3_BASE, 0);
3751 	WREG32(CB_COLOR4_BASE, 0);
3752 	WREG32(CB_COLOR5_BASE, 0);
3753 	WREG32(CB_COLOR6_BASE, 0);
3754 	WREG32(CB_COLOR7_BASE, 0);
3755 	WREG32(CB_COLOR8_BASE, 0);
3756 	WREG32(CB_COLOR9_BASE, 0);
3757 	WREG32(CB_COLOR10_BASE, 0);
3758 	WREG32(CB_COLOR11_BASE, 0);
3759 
3760 	/* set the shader const cache sizes to 0 */
3761 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3762 		WREG32(i, 0);
3763 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3764 		WREG32(i, 0);
3765 
3766 	tmp = RREG32(HDP_MISC_CNTL);
3767 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3768 	WREG32(HDP_MISC_CNTL, tmp);
3769 
3770 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3771 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3772 
3773 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3774 
3775 	udelay(50);
3776 
3777 }
3778 
3779 int evergreen_mc_init(struct radeon_device *rdev)
3780 {
3781 	u32 tmp;
3782 	int chansize, numchan;
3783 
3784 	/* Get VRAM informations */
3785 	rdev->mc.vram_is_ddr = true;
3786 	if ((rdev->family == CHIP_PALM) ||
3787 	    (rdev->family == CHIP_SUMO) ||
3788 	    (rdev->family == CHIP_SUMO2))
3789 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3790 	else
3791 		tmp = RREG32(MC_ARB_RAMCFG);
3792 	if (tmp & CHANSIZE_OVERRIDE) {
3793 		chansize = 16;
3794 	} else if (tmp & CHANSIZE_MASK) {
3795 		chansize = 64;
3796 	} else {
3797 		chansize = 32;
3798 	}
3799 	tmp = RREG32(MC_SHARED_CHMAP);
3800 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3801 	case 0:
3802 	default:
3803 		numchan = 1;
3804 		break;
3805 	case 1:
3806 		numchan = 2;
3807 		break;
3808 	case 2:
3809 		numchan = 4;
3810 		break;
3811 	case 3:
3812 		numchan = 8;
3813 		break;
3814 	}
3815 	rdev->mc.vram_width = numchan * chansize;
3816 	/* Could aper size report 0 ? */
3817 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3818 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3819 	/* Setup GPU memory space */
3820 	if ((rdev->family == CHIP_PALM) ||
3821 	    (rdev->family == CHIP_SUMO) ||
3822 	    (rdev->family == CHIP_SUMO2)) {
3823 		/* size in bytes on fusion */
3824 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3825 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3826 	} else {
3827 		/* size in MB on evergreen/cayman/tn */
3828 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3829 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3830 	}
3831 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3832 	r700_vram_gtt_location(rdev, &rdev->mc);
3833 	radeon_update_bandwidth_info(rdev);
3834 
3835 	return 0;
3836 }
3837 
3838 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3839 {
3840 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3841 		RREG32(GRBM_STATUS));
3842 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3843 		RREG32(GRBM_STATUS_SE0));
3844 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3845 		RREG32(GRBM_STATUS_SE1));
3846 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3847 		RREG32(SRBM_STATUS));
3848 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3849 		RREG32(SRBM_STATUS2));
3850 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3851 		RREG32(CP_STALLED_STAT1));
3852 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3853 		RREG32(CP_STALLED_STAT2));
3854 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3855 		RREG32(CP_BUSY_STAT));
3856 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3857 		RREG32(CP_STAT));
3858 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3859 		RREG32(DMA_STATUS_REG));
3860 	if (rdev->family >= CHIP_CAYMAN) {
3861 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3862 			 RREG32(DMA_STATUS_REG + 0x800));
3863 	}
3864 }
3865 
3866 bool evergreen_is_display_hung(struct radeon_device *rdev)
3867 {
3868 	u32 crtc_hung = 0;
3869 	u32 crtc_status[6];
3870 	u32 i, j, tmp;
3871 
3872 	for (i = 0; i < rdev->num_crtc; i++) {
3873 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3874 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3875 			crtc_hung |= (1 << i);
3876 		}
3877 	}
3878 
3879 	for (j = 0; j < 10; j++) {
3880 		for (i = 0; i < rdev->num_crtc; i++) {
3881 			if (crtc_hung & (1 << i)) {
3882 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3883 				if (tmp != crtc_status[i])
3884 					crtc_hung &= ~(1 << i);
3885 			}
3886 		}
3887 		if (crtc_hung == 0)
3888 			return false;
3889 		udelay(100);
3890 	}
3891 
3892 	return true;
3893 }
3894 
3895 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3896 {
3897 	u32 reset_mask = 0;
3898 	u32 tmp;
3899 
3900 	/* GRBM_STATUS */
3901 	tmp = RREG32(GRBM_STATUS);
3902 	if (tmp & (PA_BUSY | SC_BUSY |
3903 		   SH_BUSY | SX_BUSY |
3904 		   TA_BUSY | VGT_BUSY |
3905 		   DB_BUSY | CB_BUSY |
3906 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3907 		reset_mask |= RADEON_RESET_GFX;
3908 
3909 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3910 		   CP_BUSY | CP_COHERENCY_BUSY))
3911 		reset_mask |= RADEON_RESET_CP;
3912 
3913 	if (tmp & GRBM_EE_BUSY)
3914 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3915 
3916 	/* DMA_STATUS_REG */
3917 	tmp = RREG32(DMA_STATUS_REG);
3918 	if (!(tmp & DMA_IDLE))
3919 		reset_mask |= RADEON_RESET_DMA;
3920 
3921 	/* SRBM_STATUS2 */
3922 	tmp = RREG32(SRBM_STATUS2);
3923 	if (tmp & DMA_BUSY)
3924 		reset_mask |= RADEON_RESET_DMA;
3925 
3926 	/* SRBM_STATUS */
3927 	tmp = RREG32(SRBM_STATUS);
3928 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3929 		reset_mask |= RADEON_RESET_RLC;
3930 
3931 	if (tmp & IH_BUSY)
3932 		reset_mask |= RADEON_RESET_IH;
3933 
3934 	if (tmp & SEM_BUSY)
3935 		reset_mask |= RADEON_RESET_SEM;
3936 
3937 	if (tmp & GRBM_RQ_PENDING)
3938 		reset_mask |= RADEON_RESET_GRBM;
3939 
3940 	if (tmp & VMC_BUSY)
3941 		reset_mask |= RADEON_RESET_VMC;
3942 
3943 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3944 		   MCC_BUSY | MCD_BUSY))
3945 		reset_mask |= RADEON_RESET_MC;
3946 
3947 	if (evergreen_is_display_hung(rdev))
3948 		reset_mask |= RADEON_RESET_DISPLAY;
3949 
3950 	/* VM_L2_STATUS */
3951 	tmp = RREG32(VM_L2_STATUS);
3952 	if (tmp & L2_BUSY)
3953 		reset_mask |= RADEON_RESET_VMC;
3954 
3955 	/* Skip MC reset as it's mostly likely not hung, just busy */
3956 	if (reset_mask & RADEON_RESET_MC) {
3957 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3958 		reset_mask &= ~RADEON_RESET_MC;
3959 	}
3960 
3961 	return reset_mask;
3962 }
3963 
3964 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3965 {
3966 	struct evergreen_mc_save save;
3967 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3968 	u32 tmp;
3969 
3970 	if (reset_mask == 0)
3971 		return;
3972 
3973 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3974 
3975 	evergreen_print_gpu_status_regs(rdev);
3976 
3977 	/* Disable CP parsing/prefetching */
3978 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3979 
3980 	if (reset_mask & RADEON_RESET_DMA) {
3981 		/* Disable DMA */
3982 		tmp = RREG32(DMA_RB_CNTL);
3983 		tmp &= ~DMA_RB_ENABLE;
3984 		WREG32(DMA_RB_CNTL, tmp);
3985 	}
3986 
3987 	udelay(50);
3988 
3989 	evergreen_mc_stop(rdev, &save);
3990 	if (evergreen_mc_wait_for_idle(rdev)) {
3991 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3992 	}
3993 
3994 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3995 		grbm_soft_reset |= SOFT_RESET_DB |
3996 			SOFT_RESET_CB |
3997 			SOFT_RESET_PA |
3998 			SOFT_RESET_SC |
3999 			SOFT_RESET_SPI |
4000 			SOFT_RESET_SX |
4001 			SOFT_RESET_SH |
4002 			SOFT_RESET_TC |
4003 			SOFT_RESET_TA |
4004 			SOFT_RESET_VC |
4005 			SOFT_RESET_VGT;
4006 	}
4007 
4008 	if (reset_mask & RADEON_RESET_CP) {
4009 		grbm_soft_reset |= SOFT_RESET_CP |
4010 			SOFT_RESET_VGT;
4011 
4012 		srbm_soft_reset |= SOFT_RESET_GRBM;
4013 	}
4014 
4015 	if (reset_mask & RADEON_RESET_DMA)
4016 		srbm_soft_reset |= SOFT_RESET_DMA;
4017 
4018 	if (reset_mask & RADEON_RESET_DISPLAY)
4019 		srbm_soft_reset |= SOFT_RESET_DC;
4020 
4021 	if (reset_mask & RADEON_RESET_RLC)
4022 		srbm_soft_reset |= SOFT_RESET_RLC;
4023 
4024 	if (reset_mask & RADEON_RESET_SEM)
4025 		srbm_soft_reset |= SOFT_RESET_SEM;
4026 
4027 	if (reset_mask & RADEON_RESET_IH)
4028 		srbm_soft_reset |= SOFT_RESET_IH;
4029 
4030 	if (reset_mask & RADEON_RESET_GRBM)
4031 		srbm_soft_reset |= SOFT_RESET_GRBM;
4032 
4033 	if (reset_mask & RADEON_RESET_VMC)
4034 		srbm_soft_reset |= SOFT_RESET_VMC;
4035 
4036 	if (!(rdev->flags & RADEON_IS_IGP)) {
4037 		if (reset_mask & RADEON_RESET_MC)
4038 			srbm_soft_reset |= SOFT_RESET_MC;
4039 	}
4040 
4041 	if (grbm_soft_reset) {
4042 		tmp = RREG32(GRBM_SOFT_RESET);
4043 		tmp |= grbm_soft_reset;
4044 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4045 		WREG32(GRBM_SOFT_RESET, tmp);
4046 		tmp = RREG32(GRBM_SOFT_RESET);
4047 
4048 		udelay(50);
4049 
4050 		tmp &= ~grbm_soft_reset;
4051 		WREG32(GRBM_SOFT_RESET, tmp);
4052 		tmp = RREG32(GRBM_SOFT_RESET);
4053 	}
4054 
4055 	if (srbm_soft_reset) {
4056 		tmp = RREG32(SRBM_SOFT_RESET);
4057 		tmp |= srbm_soft_reset;
4058 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4059 		WREG32(SRBM_SOFT_RESET, tmp);
4060 		tmp = RREG32(SRBM_SOFT_RESET);
4061 
4062 		udelay(50);
4063 
4064 		tmp &= ~srbm_soft_reset;
4065 		WREG32(SRBM_SOFT_RESET, tmp);
4066 		tmp = RREG32(SRBM_SOFT_RESET);
4067 	}
4068 
4069 	/* Wait a little for things to settle down */
4070 	udelay(50);
4071 
4072 	evergreen_mc_resume(rdev, &save);
4073 	udelay(50);
4074 
4075 	evergreen_print_gpu_status_regs(rdev);
4076 }
4077 
4078 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4079 {
4080 	struct evergreen_mc_save save;
4081 	u32 tmp, i;
4082 
4083 	dev_info(rdev->dev, "GPU pci config reset\n");
4084 
4085 	/* disable dpm? */
4086 
4087 	/* Disable CP parsing/prefetching */
4088 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4089 	udelay(50);
4090 	/* Disable DMA */
4091 	tmp = RREG32(DMA_RB_CNTL);
4092 	tmp &= ~DMA_RB_ENABLE;
4093 	WREG32(DMA_RB_CNTL, tmp);
4094 	/* XXX other engines? */
4095 
4096 	/* halt the rlc */
4097 	r600_rlc_stop(rdev);
4098 
4099 	udelay(50);
4100 
4101 	/* set mclk/sclk to bypass */
4102 	rv770_set_clk_bypass_mode(rdev);
4103 	/* disable BM */
4104 	pci_clear_master(rdev->pdev);
4105 	/* disable mem access */
4106 	evergreen_mc_stop(rdev, &save);
4107 	if (evergreen_mc_wait_for_idle(rdev)) {
4108 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4109 	}
4110 	/* reset */
4111 	radeon_pci_config_reset(rdev);
4112 	/* wait for asic to come out of reset */
4113 	for (i = 0; i < rdev->usec_timeout; i++) {
4114 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4115 			break;
4116 		udelay(1);
4117 	}
4118 }
4119 
4120 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4121 {
4122 	u32 reset_mask;
4123 
4124 	if (hard) {
4125 		evergreen_gpu_pci_config_reset(rdev);
4126 		return 0;
4127 	}
4128 
4129 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4130 
4131 	if (reset_mask)
4132 		r600_set_bios_scratch_engine_hung(rdev, true);
4133 
4134 	/* try soft reset */
4135 	evergreen_gpu_soft_reset(rdev, reset_mask);
4136 
4137 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4138 
4139 	/* try pci config reset */
4140 	if (reset_mask && radeon_hard_reset)
4141 		evergreen_gpu_pci_config_reset(rdev);
4142 
4143 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4144 
4145 	if (!reset_mask)
4146 		r600_set_bios_scratch_engine_hung(rdev, false);
4147 
4148 	return 0;
4149 }
4150 
4151 /**
4152  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4153  *
4154  * @rdev: radeon_device pointer
4155  * @ring: radeon_ring structure holding ring information
4156  *
4157  * Check if the GFX engine is locked up.
4158  * Returns true if the engine appears to be locked up, false if not.
4159  */
4160 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4161 {
4162 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4163 
4164 	if (!(reset_mask & (RADEON_RESET_GFX |
4165 			    RADEON_RESET_COMPUTE |
4166 			    RADEON_RESET_CP))) {
4167 		radeon_ring_lockup_update(rdev, ring);
4168 		return false;
4169 	}
4170 	return radeon_ring_test_lockup(rdev, ring);
4171 }
4172 
4173 /*
4174  * RLC
4175  */
4176 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4177 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4178 
4179 void sumo_rlc_fini(struct radeon_device *rdev)
4180 {
4181 	int r;
4182 
4183 	/* save restore block */
4184 	if (rdev->rlc.save_restore_obj) {
4185 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4186 		if (unlikely(r != 0))
4187 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4188 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4189 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4190 
4191 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4192 		rdev->rlc.save_restore_obj = NULL;
4193 	}
4194 
4195 	/* clear state block */
4196 	if (rdev->rlc.clear_state_obj) {
4197 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4198 		if (unlikely(r != 0))
4199 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4200 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4201 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4202 
4203 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4204 		rdev->rlc.clear_state_obj = NULL;
4205 	}
4206 
4207 	/* clear state block */
4208 	if (rdev->rlc.cp_table_obj) {
4209 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4210 		if (unlikely(r != 0))
4211 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4212 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4213 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4214 
4215 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4216 		rdev->rlc.cp_table_obj = NULL;
4217 	}
4218 }
4219 
4220 #define CP_ME_TABLE_SIZE    96
4221 
4222 #pragma GCC diagnostic push
4223 #pragma GCC diagnostic ignored "-Wcast-qual"
4224 int sumo_rlc_init(struct radeon_device *rdev)
4225 {
4226 	const u32 *src_ptr;
4227 	volatile u32 *dst_ptr;
4228 	u32 dws, data, i, j, k, reg_num;
4229 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4230 	u64 reg_list_mc_addr;
4231 	const struct cs_section_def *cs_data;
4232 	int r;
4233 
4234 	src_ptr = rdev->rlc.reg_list;
4235 	dws = rdev->rlc.reg_list_size;
4236 	if (rdev->family >= CHIP_BONAIRE) {
4237 		dws += (5 * 16) + 48 + 48 + 64;
4238 	}
4239 	cs_data = rdev->rlc.cs_data;
4240 
4241 	if (src_ptr) {
4242 		/* save restore block */
4243 		if (rdev->rlc.save_restore_obj == NULL) {
4244 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4245 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4246 					     NULL, &rdev->rlc.save_restore_obj);
4247 			if (r) {
4248 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4249 				return r;
4250 			}
4251 		}
4252 
4253 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4254 		if (unlikely(r != 0)) {
4255 			sumo_rlc_fini(rdev);
4256 			return r;
4257 		}
4258 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4259 				  &rdev->rlc.save_restore_gpu_addr);
4260 		if (r) {
4261 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4262 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4263 			sumo_rlc_fini(rdev);
4264 			return r;
4265 		}
4266 
4267 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4268 		if (r) {
4269 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4270 			sumo_rlc_fini(rdev);
4271 			return r;
4272 		}
4273 		/* write the sr buffer */
4274 		dst_ptr = rdev->rlc.sr_ptr;
4275 		if (rdev->family >= CHIP_TAHITI) {
4276 			/* SI */
4277 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4278 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4279 		} else {
4280 			/* ON/LN/TN */
4281 			/* format:
4282 			 * dw0: (reg2 << 16) | reg1
4283 			 * dw1: reg1 save space
4284 			 * dw2: reg2 save space
4285 			 */
4286 			for (i = 0; i < dws; i++) {
4287 				data = src_ptr[i] >> 2;
4288 				i++;
4289 				if (i < dws)
4290 					data |= (src_ptr[i] >> 2) << 16;
4291 				j = (((i - 1) * 3) / 2);
4292 				dst_ptr[j] = cpu_to_le32(data);
4293 			}
4294 			j = ((i * 3) / 2);
4295 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4296 		}
4297 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4298 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4299 	}
4300 
4301 	if (cs_data) {
4302 		/* clear state block */
4303 		if (rdev->family >= CHIP_BONAIRE) {
4304 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4305 		} else if (rdev->family >= CHIP_TAHITI) {
4306 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4307 			dws = rdev->rlc.clear_state_size + (256 / 4);
4308 		} else {
4309 			reg_list_num = 0;
4310 			dws = 0;
4311 			for (i = 0; cs_data[i].section != NULL; i++) {
4312 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4313 					reg_list_num++;
4314 					dws += cs_data[i].section[j].reg_count;
4315 				}
4316 			}
4317 			reg_list_blk_index = (3 * reg_list_num + 2);
4318 			dws += reg_list_blk_index;
4319 			rdev->rlc.clear_state_size = dws;
4320 		}
4321 
4322 		if (rdev->rlc.clear_state_obj == NULL) {
4323 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4324 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4325 					     NULL, &rdev->rlc.clear_state_obj);
4326 			if (r) {
4327 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4328 				sumo_rlc_fini(rdev);
4329 				return r;
4330 			}
4331 		}
4332 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4333 		if (unlikely(r != 0)) {
4334 			sumo_rlc_fini(rdev);
4335 			return r;
4336 		}
4337 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4338 				  &rdev->rlc.clear_state_gpu_addr);
4339 		if (r) {
4340 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4341 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4342 			sumo_rlc_fini(rdev);
4343 			return r;
4344 		}
4345 
4346 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4347 		if (r) {
4348 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4349 			sumo_rlc_fini(rdev);
4350 			return r;
4351 		}
4352 		/* set up the cs buffer */
4353 		dst_ptr = rdev->rlc.cs_ptr;
4354 		if (rdev->family >= CHIP_BONAIRE) {
4355 			cik_get_csb_buffer(rdev, dst_ptr);
4356 		} else if (rdev->family >= CHIP_TAHITI) {
4357 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4358 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4359 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4360 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4361 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4362 		} else {
4363 			reg_list_hdr_blk_index = 0;
4364 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4365 			data = upper_32_bits(reg_list_mc_addr);
4366 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4367 			reg_list_hdr_blk_index++;
4368 			for (i = 0; cs_data[i].section != NULL; i++) {
4369 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4370 					reg_num = cs_data[i].section[j].reg_count;
4371 					data = reg_list_mc_addr & 0xffffffff;
4372 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4373 					reg_list_hdr_blk_index++;
4374 
4375 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4376 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4377 					reg_list_hdr_blk_index++;
4378 
4379 					data = 0x08000000 | (reg_num * 4);
4380 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4381 					reg_list_hdr_blk_index++;
4382 
4383 					for (k = 0; k < reg_num; k++) {
4384 						data = cs_data[i].section[j].extent[k];
4385 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4386 					}
4387 					reg_list_mc_addr += reg_num * 4;
4388 					reg_list_blk_index += reg_num;
4389 				}
4390 			}
4391 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4392 		}
4393 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4394 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4395 	}
4396 
4397 	if (rdev->rlc.cp_table_size) {
4398 		if (rdev->rlc.cp_table_obj == NULL) {
4399 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4400 					     PAGE_SIZE, true,
4401 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4402 					     NULL, &rdev->rlc.cp_table_obj);
4403 			if (r) {
4404 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4405 				sumo_rlc_fini(rdev);
4406 				return r;
4407 			}
4408 		}
4409 
4410 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4411 		if (unlikely(r != 0)) {
4412 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4413 			sumo_rlc_fini(rdev);
4414 			return r;
4415 		}
4416 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4417 				  &rdev->rlc.cp_table_gpu_addr);
4418 		if (r) {
4419 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4420 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4421 			sumo_rlc_fini(rdev);
4422 			return r;
4423 		}
4424 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4425 		if (r) {
4426 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4427 			sumo_rlc_fini(rdev);
4428 			return r;
4429 		}
4430 
4431 		cik_init_cp_pg_table(rdev);
4432 
4433 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4434 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4435 
4436 	}
4437 
4438 	return 0;
4439 }
4440 #pragma GCC diagnostic pop
4441 
4442 static void evergreen_rlc_start(struct radeon_device *rdev)
4443 {
4444 	u32 mask = RLC_ENABLE;
4445 
4446 	if (rdev->flags & RADEON_IS_IGP) {
4447 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4448 	}
4449 
4450 	WREG32(RLC_CNTL, mask);
4451 }
4452 
4453 int evergreen_rlc_resume(struct radeon_device *rdev)
4454 {
4455 	u32 i;
4456 	const __be32 *fw_data;
4457 
4458 	if (!rdev->rlc_fw)
4459 		return -EINVAL;
4460 
4461 	r600_rlc_stop(rdev);
4462 
4463 	WREG32(RLC_HB_CNTL, 0);
4464 
4465 	if (rdev->flags & RADEON_IS_IGP) {
4466 		if (rdev->family == CHIP_ARUBA) {
4467 			u32 always_on_bitmap =
4468 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4469 			/* find out the number of active simds */
4470 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4471 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4472 			tmp = hweight32(~tmp);
4473 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4474 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4475 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4476 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4477 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4478 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4479 			}
4480 		} else {
4481 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4482 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4483 		}
4484 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4485 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4486 	} else {
4487 		WREG32(RLC_HB_BASE, 0);
4488 		WREG32(RLC_HB_RPTR, 0);
4489 		WREG32(RLC_HB_WPTR, 0);
4490 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4491 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4492 	}
4493 	WREG32(RLC_MC_CNTL, 0);
4494 	WREG32(RLC_UCODE_CNTL, 0);
4495 
4496 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4497 	if (rdev->family >= CHIP_ARUBA) {
4498 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4499 			WREG32(RLC_UCODE_ADDR, i);
4500 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4501 		}
4502 	} else if (rdev->family >= CHIP_CAYMAN) {
4503 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4504 			WREG32(RLC_UCODE_ADDR, i);
4505 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4506 		}
4507 	} else {
4508 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4509 			WREG32(RLC_UCODE_ADDR, i);
4510 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4511 		}
4512 	}
4513 	WREG32(RLC_UCODE_ADDR, 0);
4514 
4515 	evergreen_rlc_start(rdev);
4516 
4517 	return 0;
4518 }
4519 
4520 /* Interrupts */
4521 
4522 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4523 {
4524 	if (crtc >= rdev->num_crtc)
4525 		return 0;
4526 	else
4527 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4528 }
4529 
4530 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4531 {
4532 	u32 tmp;
4533 
4534 	if (rdev->family >= CHIP_CAYMAN) {
4535 		cayman_cp_int_cntl_setup(rdev, 0,
4536 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4537 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4538 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4539 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4540 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4541 	} else
4542 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4543 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4544 	WREG32(DMA_CNTL, tmp);
4545 	WREG32(GRBM_INT_CNTL, 0);
4546 	WREG32(SRBM_INT_CNTL, 0);
4547 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4548 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4549 	if (rdev->num_crtc >= 4) {
4550 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4551 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4552 	}
4553 	if (rdev->num_crtc >= 6) {
4554 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4555 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4556 	}
4557 
4558 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4559 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4560 	if (rdev->num_crtc >= 4) {
4561 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4562 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4563 	}
4564 	if (rdev->num_crtc >= 6) {
4565 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4566 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4567 	}
4568 
4569 	/* only one DAC on DCE5 */
4570 	if (!ASIC_IS_DCE5(rdev))
4571 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4572 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4573 
4574 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4575 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4576 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4577 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4578 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4579 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4580 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4581 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4582 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4583 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4584 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4585 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4586 
4587 }
4588 
4589 int evergreen_irq_set(struct radeon_device *rdev)
4590 {
4591 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4592 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4593 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4594 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4595 	u32 grbm_int_cntl = 0;
4596 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4597 	u32 dma_cntl, dma_cntl1 = 0;
4598 	u32 thermal_int = 0;
4599 
4600 	if (!rdev->irq.installed) {
4601 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4602 		return -EINVAL;
4603 	}
4604 	/* don't enable anything if the ih is disabled */
4605 	if (!rdev->ih.enabled) {
4606 		r600_disable_interrupts(rdev);
4607 		/* force the active interrupt state to all disabled */
4608 		evergreen_disable_interrupt_state(rdev);
4609 		return 0;
4610 	}
4611 
4612 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4613 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4614 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4615 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4616 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4617 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4618 	if (rdev->family == CHIP_ARUBA)
4619 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4620 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4621 	else
4622 		thermal_int = RREG32(CG_THERMAL_INT) &
4623 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4624 
4625 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4626 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4627 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4628 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4629 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4630 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4631 
4632 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4633 
4634 	if (rdev->family >= CHIP_CAYMAN) {
4635 		/* enable CP interrupts on all rings */
4636 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4637 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4638 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4639 		}
4640 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4641 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4642 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4643 		}
4644 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4645 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4646 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4647 		}
4648 	} else {
4649 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4650 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4651 			cp_int_cntl |= RB_INT_ENABLE;
4652 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4653 		}
4654 	}
4655 
4656 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4657 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4658 		dma_cntl |= TRAP_ENABLE;
4659 	}
4660 
4661 	if (rdev->family >= CHIP_CAYMAN) {
4662 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4663 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4664 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4665 			dma_cntl1 |= TRAP_ENABLE;
4666 		}
4667 	}
4668 
4669 	if (rdev->irq.dpm_thermal) {
4670 		DRM_DEBUG("dpm thermal\n");
4671 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4672 	}
4673 
4674 	if (rdev->irq.crtc_vblank_int[0] ||
4675 	    atomic_read(&rdev->irq.pflip[0])) {
4676 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4677 		crtc1 |= VBLANK_INT_MASK;
4678 	}
4679 	if (rdev->irq.crtc_vblank_int[1] ||
4680 	    atomic_read(&rdev->irq.pflip[1])) {
4681 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4682 		crtc2 |= VBLANK_INT_MASK;
4683 	}
4684 	if (rdev->irq.crtc_vblank_int[2] ||
4685 	    atomic_read(&rdev->irq.pflip[2])) {
4686 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4687 		crtc3 |= VBLANK_INT_MASK;
4688 	}
4689 	if (rdev->irq.crtc_vblank_int[3] ||
4690 	    atomic_read(&rdev->irq.pflip[3])) {
4691 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4692 		crtc4 |= VBLANK_INT_MASK;
4693 	}
4694 	if (rdev->irq.crtc_vblank_int[4] ||
4695 	    atomic_read(&rdev->irq.pflip[4])) {
4696 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4697 		crtc5 |= VBLANK_INT_MASK;
4698 	}
4699 	if (rdev->irq.crtc_vblank_int[5] ||
4700 	    atomic_read(&rdev->irq.pflip[5])) {
4701 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4702 		crtc6 |= VBLANK_INT_MASK;
4703 	}
4704 	if (rdev->irq.hpd[0]) {
4705 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4706 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4707 	}
4708 	if (rdev->irq.hpd[1]) {
4709 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4710 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4711 	}
4712 	if (rdev->irq.hpd[2]) {
4713 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4714 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4715 	}
4716 	if (rdev->irq.hpd[3]) {
4717 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4718 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4719 	}
4720 	if (rdev->irq.hpd[4]) {
4721 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4722 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4723 	}
4724 	if (rdev->irq.hpd[5]) {
4725 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4726 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4727 	}
4728 	if (rdev->irq.afmt[0]) {
4729 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4730 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4731 	}
4732 	if (rdev->irq.afmt[1]) {
4733 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4734 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4735 	}
4736 	if (rdev->irq.afmt[2]) {
4737 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4738 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4739 	}
4740 	if (rdev->irq.afmt[3]) {
4741 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4742 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4743 	}
4744 	if (rdev->irq.afmt[4]) {
4745 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4746 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4747 	}
4748 	if (rdev->irq.afmt[5]) {
4749 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4750 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4751 	}
4752 
4753 	if (rdev->family >= CHIP_CAYMAN) {
4754 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4755 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4756 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4757 	} else
4758 		WREG32(CP_INT_CNTL, cp_int_cntl);
4759 
4760 	WREG32(DMA_CNTL, dma_cntl);
4761 
4762 	if (rdev->family >= CHIP_CAYMAN)
4763 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4764 
4765 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4766 
4767 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4768 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4769 	if (rdev->num_crtc >= 4) {
4770 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4771 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4772 	}
4773 	if (rdev->num_crtc >= 6) {
4774 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4775 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4776 	}
4777 
4778 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4779 	       GRPH_PFLIP_INT_MASK);
4780 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4781 	       GRPH_PFLIP_INT_MASK);
4782 	if (rdev->num_crtc >= 4) {
4783 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4784 		       GRPH_PFLIP_INT_MASK);
4785 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4786 		       GRPH_PFLIP_INT_MASK);
4787 	}
4788 	if (rdev->num_crtc >= 6) {
4789 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4790 		       GRPH_PFLIP_INT_MASK);
4791 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4792 		       GRPH_PFLIP_INT_MASK);
4793 	}
4794 
4795 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4796 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4797 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4798 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4799 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4800 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4801 	if (rdev->family == CHIP_ARUBA)
4802 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4803 	else
4804 		WREG32(CG_THERMAL_INT, thermal_int);
4805 
4806 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4807 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4808 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4809 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4810 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4811 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4812 
4813 	/* posting read */
4814 	RREG32(SRBM_STATUS);
4815 
4816 	return 0;
4817 }
4818 
4819 static void evergreen_irq_ack(struct radeon_device *rdev)
4820 {
4821 	u32 tmp;
4822 
4823 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4824 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4825 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4826 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4827 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4828 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4829 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4830 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4831 	if (rdev->num_crtc >= 4) {
4832 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4833 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4834 	}
4835 	if (rdev->num_crtc >= 6) {
4836 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4837 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4838 	}
4839 
4840 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4841 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4842 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4843 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4844 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4845 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4846 
4847 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4848 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4849 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4850 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4851 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4852 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4853 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4854 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4855 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4856 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4857 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4858 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4859 
4860 	if (rdev->num_crtc >= 4) {
4861 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4862 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4863 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4864 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4865 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4866 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4867 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4868 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4869 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4870 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4871 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4872 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4873 	}
4874 
4875 	if (rdev->num_crtc >= 6) {
4876 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4877 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4878 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4879 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4880 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4881 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4882 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4883 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4884 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4885 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4886 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4887 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4888 	}
4889 
4890 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4891 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4892 		tmp |= DC_HPDx_INT_ACK;
4893 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4894 	}
4895 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4896 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4897 		tmp |= DC_HPDx_INT_ACK;
4898 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4899 	}
4900 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4901 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4902 		tmp |= DC_HPDx_INT_ACK;
4903 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4904 	}
4905 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4906 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4907 		tmp |= DC_HPDx_INT_ACK;
4908 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4909 	}
4910 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4911 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4912 		tmp |= DC_HPDx_INT_ACK;
4913 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4914 	}
4915 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4916 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4917 		tmp |= DC_HPDx_INT_ACK;
4918 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4919 	}
4920 
4921 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4922 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4923 		tmp |= DC_HPDx_RX_INT_ACK;
4924 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4925 	}
4926 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4927 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4928 		tmp |= DC_HPDx_RX_INT_ACK;
4929 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4930 	}
4931 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4932 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4933 		tmp |= DC_HPDx_RX_INT_ACK;
4934 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4935 	}
4936 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4937 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4938 		tmp |= DC_HPDx_RX_INT_ACK;
4939 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4940 	}
4941 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4942 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4943 		tmp |= DC_HPDx_RX_INT_ACK;
4944 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4945 	}
4946 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4947 		tmp = RREG32(DC_HPD6_INT_CONTROL);
4948 		tmp |= DC_HPDx_RX_INT_ACK;
4949 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4950 	}
4951 
4952 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4953 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4954 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4955 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4956 	}
4957 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4958 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4959 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4960 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4961 	}
4962 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4963 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4964 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4965 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4966 	}
4967 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4968 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4969 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4970 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4971 	}
4972 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4973 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4974 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4975 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4976 	}
4977 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4978 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4979 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4980 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4981 	}
4982 }
4983 
4984 static void evergreen_irq_disable(struct radeon_device *rdev)
4985 {
4986 	r600_disable_interrupts(rdev);
4987 	/* Wait and acknowledge irq */
4988 	mdelay(1);
4989 	evergreen_irq_ack(rdev);
4990 	evergreen_disable_interrupt_state(rdev);
4991 }
4992 
4993 void evergreen_irq_suspend(struct radeon_device *rdev)
4994 {
4995 	evergreen_irq_disable(rdev);
4996 	r600_rlc_stop(rdev);
4997 }
4998 
4999 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5000 {
5001 	u32 wptr, tmp;
5002 
5003 	if (rdev->wb.enabled)
5004 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5005 	else
5006 		wptr = RREG32(IH_RB_WPTR);
5007 
5008 	if (wptr & RB_OVERFLOW) {
5009 		wptr &= ~RB_OVERFLOW;
5010 		/* When a ring buffer overflow happen start parsing interrupt
5011 		 * from the last not overwritten vector (wptr + 16). Hopefully
5012 		 * this should allow us to catchup.
5013 		 */
5014 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5015 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5016 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5017 		tmp = RREG32(IH_RB_CNTL);
5018 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
5019 		WREG32(IH_RB_CNTL, tmp);
5020 	}
5021 	return (wptr & rdev->ih.ptr_mask);
5022 }
5023 
5024 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
5025 {
5026 	u32 wptr;
5027 	u32 rptr;
5028 	u32 src_id, src_data;
5029 	u32 ring_index;
5030 	bool queue_hotplug = false;
5031 	bool queue_hdmi = false;
5032 	bool queue_dp = false;
5033 	bool queue_thermal = false;
5034 	u32 status, addr;
5035 
5036 	if (!rdev->ih.enabled || rdev->shutdown)
5037 		return IRQ_NONE;
5038 
5039 	wptr = evergreen_get_ih_wptr(rdev);
5040 
5041 restart_ih:
5042 	/* is somebody else already processing irqs? */
5043 	if (atomic_xchg(&rdev->ih.lock, 1))
5044 		return IRQ_NONE;
5045 
5046 	rptr = rdev->ih.rptr;
5047 	DRM_DEBUG_VBLANK("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5048 
5049 	/* Order reading of wptr vs. reading of IH ring data */
5050 	rmb();
5051 
5052 	/* display interrupts */
5053 	evergreen_irq_ack(rdev);
5054 
5055 	while (rptr != wptr) {
5056 		/* wptr/rptr are in bytes! */
5057 		ring_index = rptr / 4;
5058 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5059 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5060 
5061 		switch (src_id) {
5062 		case 1: /* D1 vblank/vline */
5063 			switch (src_data) {
5064 			case 0: /* D1 vblank */
5065 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5066 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5067 
5068 				if (rdev->irq.crtc_vblank_int[0]) {
5069 					drm_handle_vblank(rdev->ddev, 0);
5070 					rdev->pm.vblank_sync = true;
5071 					wake_up(&rdev->irq.vblank_queue);
5072 				}
5073 				if (atomic_read(&rdev->irq.pflip[0]))
5074 					radeon_crtc_handle_vblank(rdev, 0);
5075 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5076 				DRM_DEBUG_VBLANK("IH: D1 vblank\n");
5077 
5078 				break;
5079 			case 1: /* D1 vline */
5080 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5081 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5082 
5083 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5084 				DRM_DEBUG_VBLANK("IH: D1 vline\n");
5085 
5086 				break;
5087 			default:
5088 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5089 				break;
5090 			}
5091 			break;
5092 		case 2: /* D2 vblank/vline */
5093 			switch (src_data) {
5094 			case 0: /* D2 vblank */
5095 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5096 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5097 
5098 				if (rdev->irq.crtc_vblank_int[1]) {
5099 					drm_handle_vblank(rdev->ddev, 1);
5100 					rdev->pm.vblank_sync = true;
5101 					wake_up(&rdev->irq.vblank_queue);
5102 				}
5103 				if (atomic_read(&rdev->irq.pflip[1]))
5104 					radeon_crtc_handle_vblank(rdev, 1);
5105 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5106 				DRM_DEBUG_VBLANK("IH: D2 vblank\n");
5107 
5108 				break;
5109 			case 1: /* D2 vline */
5110 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5111 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5112 
5113 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5114 				DRM_DEBUG_VBLANK("IH: D2 vline\n");
5115 
5116 				break;
5117 			default:
5118 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5119 				break;
5120 			}
5121 			break;
5122 		case 3: /* D3 vblank/vline */
5123 			switch (src_data) {
5124 			case 0: /* D3 vblank */
5125 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5126 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5127 
5128 				if (rdev->irq.crtc_vblank_int[2]) {
5129 					drm_handle_vblank(rdev->ddev, 2);
5130 					rdev->pm.vblank_sync = true;
5131 					wake_up(&rdev->irq.vblank_queue);
5132 				}
5133 				if (atomic_read(&rdev->irq.pflip[2]))
5134 					radeon_crtc_handle_vblank(rdev, 2);
5135 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5136 				DRM_DEBUG_VBLANK("IH: D3 vblank\n");
5137 
5138 				break;
5139 			case 1: /* D3 vline */
5140 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5141 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5142 
5143 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5144 				DRM_DEBUG_VBLANK("IH: D3 vline\n");
5145 
5146 				break;
5147 			default:
5148 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5149 				break;
5150 			}
5151 			break;
5152 		case 4: /* D4 vblank/vline */
5153 			switch (src_data) {
5154 			case 0: /* D4 vblank */
5155 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5156 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5157 
5158 				if (rdev->irq.crtc_vblank_int[3]) {
5159 					drm_handle_vblank(rdev->ddev, 3);
5160 					rdev->pm.vblank_sync = true;
5161 					wake_up(&rdev->irq.vblank_queue);
5162 				}
5163 				if (atomic_read(&rdev->irq.pflip[3]))
5164 					radeon_crtc_handle_vblank(rdev, 3);
5165 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5166 				DRM_DEBUG_VBLANK("IH: D4 vblank\n");
5167 
5168 				break;
5169 			case 1: /* D4 vline */
5170 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5171 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5172 
5173 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5174 				DRM_DEBUG_VBLANK("IH: D4 vline\n");
5175 
5176 				break;
5177 			default:
5178 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5179 				break;
5180 			}
5181 			break;
5182 		case 5: /* D5 vblank/vline */
5183 			switch (src_data) {
5184 			case 0: /* D5 vblank */
5185 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5186 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5187 
5188 				if (rdev->irq.crtc_vblank_int[4]) {
5189 					drm_handle_vblank(rdev->ddev, 4);
5190 					rdev->pm.vblank_sync = true;
5191 					wake_up(&rdev->irq.vblank_queue);
5192 				}
5193 				if (atomic_read(&rdev->irq.pflip[4]))
5194 					radeon_crtc_handle_vblank(rdev, 4);
5195 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5196 				DRM_DEBUG_VBLANK("IH: D5 vblank\n");
5197 
5198 				break;
5199 			case 1: /* D5 vline */
5200 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5201 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5202 
5203 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5204 				DRM_DEBUG_VBLANK("IH: D5 vline\n");
5205 
5206 				break;
5207 			default:
5208 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5209 				break;
5210 			}
5211 			break;
5212 		case 6: /* D6 vblank/vline */
5213 			switch (src_data) {
5214 			case 0: /* D6 vblank */
5215 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5216 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5217 
5218 				if (rdev->irq.crtc_vblank_int[5]) {
5219 					drm_handle_vblank(rdev->ddev, 5);
5220 					rdev->pm.vblank_sync = true;
5221 					wake_up(&rdev->irq.vblank_queue);
5222 				}
5223 				if (atomic_read(&rdev->irq.pflip[5]))
5224 					radeon_crtc_handle_vblank(rdev, 5);
5225 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5226 				DRM_DEBUG_VBLANK("IH: D6 vblank\n");
5227 
5228 				break;
5229 			case 1: /* D6 vline */
5230 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5231 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5232 
5233 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5234 				DRM_DEBUG_VBLANK("IH: D6 vline\n");
5235 
5236 				break;
5237 			default:
5238 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5239 				break;
5240 			}
5241 			break;
5242 		case 8: /* D1 page flip */
5243 		case 10: /* D2 page flip */
5244 		case 12: /* D3 page flip */
5245 		case 14: /* D4 page flip */
5246 		case 16: /* D5 page flip */
5247 		case 18: /* D6 page flip */
5248 			DRM_DEBUG_VBLANK("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5249 			if (radeon_use_pflipirq > 0)
5250 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5251 			break;
5252 		case 42: /* HPD hotplug */
5253 			switch (src_data) {
5254 			case 0:
5255 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5256 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5257 
5258 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5259 				queue_hotplug = true;
5260 				DRM_DEBUG("IH: HPD1\n");
5261 				break;
5262 			case 1:
5263 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5264 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5265 
5266 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5267 				queue_hotplug = true;
5268 				DRM_DEBUG("IH: HPD2\n");
5269 				break;
5270 			case 2:
5271 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5272 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5273 
5274 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5275 				queue_hotplug = true;
5276 				DRM_DEBUG("IH: HPD3\n");
5277 				break;
5278 			case 3:
5279 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5280 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5281 
5282 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5283 				queue_hotplug = true;
5284 				DRM_DEBUG("IH: HPD4\n");
5285 				break;
5286 			case 4:
5287 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5288 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5289 
5290 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5291 				queue_hotplug = true;
5292 				DRM_DEBUG("IH: HPD5\n");
5293 				break;
5294 			case 5:
5295 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5296 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5297 
5298 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5299 				queue_hotplug = true;
5300 				DRM_DEBUG("IH: HPD6\n");
5301 				break;
5302 			case 6:
5303 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5304 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5305 
5306 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5307 				queue_dp = true;
5308 				DRM_DEBUG("IH: HPD_RX 1\n");
5309 				break;
5310 			case 7:
5311 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5312 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5313 
5314 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5315 				queue_dp = true;
5316 				DRM_DEBUG("IH: HPD_RX 2\n");
5317 				break;
5318 			case 8:
5319 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5320 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5321 
5322 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5323 				queue_dp = true;
5324 				DRM_DEBUG("IH: HPD_RX 3\n");
5325 				break;
5326 			case 9:
5327 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5328 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5329 
5330 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5331 				queue_dp = true;
5332 				DRM_DEBUG("IH: HPD_RX 4\n");
5333 				break;
5334 			case 10:
5335 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5336 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5337 
5338 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5339 				queue_dp = true;
5340 				DRM_DEBUG("IH: HPD_RX 5\n");
5341 				break;
5342 			case 11:
5343 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5344 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5345 
5346 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5347 				queue_dp = true;
5348 				DRM_DEBUG("IH: HPD_RX 6\n");
5349 				break;
5350 			default:
5351 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5352 				break;
5353 			}
5354 			break;
5355 		case 44: /* hdmi */
5356 			switch (src_data) {
5357 			case 0:
5358 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5359 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5360 
5361 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5362 				queue_hdmi = true;
5363 				DRM_DEBUG("IH: HDMI0\n");
5364 				break;
5365 			case 1:
5366 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5367 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5368 
5369 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5370 				queue_hdmi = true;
5371 				DRM_DEBUG("IH: HDMI1\n");
5372 				break;
5373 			case 2:
5374 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5375 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5376 
5377 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5378 				queue_hdmi = true;
5379 				DRM_DEBUG("IH: HDMI2\n");
5380 				break;
5381 			case 3:
5382 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5383 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5384 
5385 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5386 				queue_hdmi = true;
5387 				DRM_DEBUG("IH: HDMI3\n");
5388 				break;
5389 			case 4:
5390 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5391 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5392 
5393 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5394 				queue_hdmi = true;
5395 				DRM_DEBUG("IH: HDMI4\n");
5396 				break;
5397 			case 5:
5398 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5399 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5400 
5401 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5402 				queue_hdmi = true;
5403 				DRM_DEBUG("IH: HDMI5\n");
5404 				break;
5405 			default:
5406 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5407 				break;
5408 			}
5409 		case 96:
5410 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5411 			WREG32(SRBM_INT_ACK, 0x1);
5412 			break;
5413 		case 124: /* UVD */
5414 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5415 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5416 			break;
5417 		case 146:
5418 		case 147:
5419 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5420 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5421 			/* reset addr and status */
5422 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5423 			if (addr == 0x0 && status == 0x0)
5424 				break;
5425 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5426 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5427 				addr);
5428 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5429 				status);
5430 			cayman_vm_decode_fault(rdev, status, addr);
5431 			break;
5432 		case 176: /* CP_INT in ring buffer */
5433 		case 177: /* CP_INT in IB1 */
5434 		case 178: /* CP_INT in IB2 */
5435 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5436 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5437 			break;
5438 		case 181: /* CP EOP event */
5439 			DRM_DEBUG("IH: CP EOP\n");
5440 			if (rdev->family >= CHIP_CAYMAN) {
5441 				switch (src_data) {
5442 				case 0:
5443 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5444 					break;
5445 				case 1:
5446 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5447 					break;
5448 				case 2:
5449 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5450 					break;
5451 				}
5452 			} else
5453 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5454 			break;
5455 		case 224: /* DMA trap event */
5456 			DRM_DEBUG("IH: DMA trap\n");
5457 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5458 			break;
5459 		case 230: /* thermal low to high */
5460 			DRM_DEBUG("IH: thermal low to high\n");
5461 			rdev->pm.dpm.thermal.high_to_low = false;
5462 			queue_thermal = true;
5463 			break;
5464 		case 231: /* thermal high to low */
5465 			DRM_DEBUG("IH: thermal high to low\n");
5466 			rdev->pm.dpm.thermal.high_to_low = true;
5467 			queue_thermal = true;
5468 			break;
5469 		case 233: /* GUI IDLE */
5470 			DRM_DEBUG("IH: GUI idle\n");
5471 			break;
5472 		case 244: /* DMA trap event */
5473 			if (rdev->family >= CHIP_CAYMAN) {
5474 				DRM_DEBUG("IH: DMA1 trap\n");
5475 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5476 			}
5477 			break;
5478 		default:
5479 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5480 			break;
5481 		}
5482 
5483 		/* wptr/rptr are in bytes! */
5484 		rptr += 16;
5485 		rptr &= rdev->ih.ptr_mask;
5486 		WREG32(IH_RB_RPTR, rptr);
5487 	}
5488 	if (queue_dp)
5489 		schedule_work(&rdev->dp_work);
5490 	if (queue_hotplug)
5491 		schedule_delayed_work(&rdev->hotplug_work, 0);
5492 	if (queue_hdmi)
5493 		schedule_work(&rdev->audio_work);
5494 	if (queue_thermal && rdev->pm.dpm_enabled)
5495 		schedule_work(&rdev->pm.dpm.thermal.work);
5496 	rdev->ih.rptr = rptr;
5497 	atomic_set(&rdev->ih.lock, 0);
5498 
5499 	/* make sure wptr hasn't changed while processing */
5500 	wptr = evergreen_get_ih_wptr(rdev);
5501 	if (wptr != rptr)
5502 		goto restart_ih;
5503 
5504 	return IRQ_HANDLED;
5505 }
5506 
5507 static void evergreen_uvd_init(struct radeon_device *rdev)
5508 {
5509 	int r;
5510 
5511 	if (!rdev->has_uvd)
5512 		return;
5513 
5514 	r = radeon_uvd_init(rdev);
5515 	if (r) {
5516 		dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5517 		/*
5518 		 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5519 		 * to early fails uvd_v2_2_resume() and thus nothing happens
5520 		 * there. So it is pointless to try to go through that code
5521 		 * hence why we disable uvd here.
5522 		 */
5523 		rdev->has_uvd = 0;
5524 		return;
5525 	}
5526 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5527 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5528 }
5529 
5530 static void evergreen_uvd_start(struct radeon_device *rdev)
5531 {
5532 	int r;
5533 
5534 	if (!rdev->has_uvd)
5535 		return;
5536 
5537 	r = uvd_v2_2_resume(rdev);
5538 	if (r) {
5539 		dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5540 		goto error;
5541 	}
5542 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5543 	if (r) {
5544 		dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5545 		goto error;
5546 	}
5547 	return;
5548 
5549 error:
5550 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5551 }
5552 
5553 static void evergreen_uvd_resume(struct radeon_device *rdev)
5554 {
5555 	struct radeon_ring *ring;
5556 	int r;
5557 
5558 	if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5559 		return;
5560 
5561 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5562 	r = radeon_ring_init(rdev, ring, ring->ring_size, 0, RADEON_CP_PACKET2);
5563 	if (r) {
5564 		dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5565 		return;
5566 	}
5567 	r = uvd_v1_0_init(rdev);
5568 	if (r) {
5569 		dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5570 		return;
5571 	}
5572 }
5573 
5574 static int evergreen_startup(struct radeon_device *rdev)
5575 {
5576 	struct radeon_ring *ring;
5577 	int r;
5578 
5579 	/* enable pcie gen2 link */
5580 	evergreen_pcie_gen2_enable(rdev);
5581 	/* enable aspm */
5582 	evergreen_program_aspm(rdev);
5583 
5584 	/* scratch needs to be initialized before MC */
5585 	r = r600_vram_scratch_init(rdev);
5586 	if (r)
5587 		return r;
5588 
5589 	evergreen_mc_program(rdev);
5590 
5591 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5592 		r = ni_mc_load_microcode(rdev);
5593 		if (r) {
5594 			DRM_ERROR("Failed to load MC firmware!\n");
5595 			return r;
5596 		}
5597 	}
5598 
5599 	if (rdev->flags & RADEON_IS_AGP) {
5600 		evergreen_agp_enable(rdev);
5601 	} else {
5602 		r = evergreen_pcie_gart_enable(rdev);
5603 		if (r)
5604 			return r;
5605 	}
5606 	evergreen_gpu_init(rdev);
5607 
5608 	/* allocate rlc buffers */
5609 	if (rdev->flags & RADEON_IS_IGP) {
5610 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5611 		rdev->rlc.reg_list_size =
5612 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5613 		rdev->rlc.cs_data = evergreen_cs_data;
5614 		r = sumo_rlc_init(rdev);
5615 		if (r) {
5616 			DRM_ERROR("Failed to init rlc BOs!\n");
5617 			return r;
5618 		}
5619 	}
5620 
5621 	/* allocate wb buffer */
5622 	r = radeon_wb_init(rdev);
5623 	if (r)
5624 		return r;
5625 
5626 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5627 	if (r) {
5628 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5629 		return r;
5630 	}
5631 
5632 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5633 	if (r) {
5634 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5635 		return r;
5636 	}
5637 
5638 	evergreen_uvd_start(rdev);
5639 
5640 	/* Enable IRQ */
5641 	if (!rdev->irq.installed) {
5642 		r = radeon_irq_kms_init(rdev);
5643 		if (r)
5644 			return r;
5645 	}
5646 
5647 	r = r600_irq_init(rdev);
5648 	if (r) {
5649 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5650 		radeon_irq_kms_fini(rdev);
5651 		return r;
5652 	}
5653 	evergreen_irq_set(rdev);
5654 
5655 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5656 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5657 			     RADEON_CP_PACKET2);
5658 	if (r)
5659 		return r;
5660 
5661 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5662 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5663 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5664 	if (r)
5665 		return r;
5666 
5667 	r = evergreen_cp_load_microcode(rdev);
5668 	if (r)
5669 		return r;
5670 	r = evergreen_cp_resume(rdev);
5671 	if (r)
5672 		return r;
5673 	r = r600_dma_resume(rdev);
5674 	if (r)
5675 		return r;
5676 
5677 	evergreen_uvd_resume(rdev);
5678 
5679 	r = radeon_ib_pool_init(rdev);
5680 	if (r) {
5681 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5682 		return r;
5683 	}
5684 
5685 	r = radeon_audio_init(rdev);
5686 	if (r) {
5687 		DRM_ERROR("radeon: audio init failed\n");
5688 		return r;
5689 	}
5690 
5691 	return 0;
5692 }
5693 
5694 int evergreen_resume(struct radeon_device *rdev)
5695 {
5696 	int r;
5697 
5698 	/* reset the asic, the gfx blocks are often in a bad state
5699 	 * after the driver is unloaded or after a resume
5700 	 */
5701 	if (radeon_asic_reset(rdev))
5702 		dev_warn(rdev->dev, "GPU reset failed !\n");
5703 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5704 	 * posting will perform necessary task to bring back GPU into good
5705 	 * shape.
5706 	 */
5707 	/* post card */
5708 	atom_asic_init(rdev->mode_info.atom_context);
5709 
5710 	/* init golden registers */
5711 	evergreen_init_golden_registers(rdev);
5712 
5713 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5714 		radeon_pm_resume(rdev);
5715 
5716 	rdev->accel_working = true;
5717 	r = evergreen_startup(rdev);
5718 	if (r) {
5719 		DRM_ERROR("evergreen startup failed on resume\n");
5720 		rdev->accel_working = false;
5721 		return r;
5722 	}
5723 
5724 	return r;
5725 
5726 }
5727 
5728 int evergreen_suspend(struct radeon_device *rdev)
5729 {
5730 	radeon_pm_suspend(rdev);
5731 	radeon_audio_fini(rdev);
5732 	if (rdev->has_uvd) {
5733 		uvd_v1_0_fini(rdev);
5734 		radeon_uvd_suspend(rdev);
5735 	}
5736 	r700_cp_stop(rdev);
5737 	r600_dma_stop(rdev);
5738 	evergreen_irq_suspend(rdev);
5739 	radeon_wb_disable(rdev);
5740 	evergreen_pcie_gart_disable(rdev);
5741 
5742 	return 0;
5743 }
5744 
5745 /* Plan is to move initialization in that function and use
5746  * helper function so that radeon_device_init pretty much
5747  * do nothing more than calling asic specific function. This
5748  * should also allow to remove a bunch of callback function
5749  * like vram_info.
5750  */
5751 int evergreen_init(struct radeon_device *rdev)
5752 {
5753 	int r;
5754 
5755 	/* Read BIOS */
5756 	if (!radeon_get_bios(rdev)) {
5757 		if (ASIC_IS_AVIVO(rdev))
5758 			return -EINVAL;
5759 	}
5760 	/* Must be an ATOMBIOS */
5761 	if (!rdev->is_atom_bios) {
5762 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5763 		return -EINVAL;
5764 	}
5765 	r = radeon_atombios_init(rdev);
5766 	if (r)
5767 		return r;
5768 	/* reset the asic, the gfx blocks are often in a bad state
5769 	 * after the driver is unloaded or after a resume
5770 	 */
5771 	if (radeon_asic_reset(rdev))
5772 		dev_warn(rdev->dev, "GPU reset failed !\n");
5773 	/* Post card if necessary */
5774 	if (!radeon_card_posted(rdev)) {
5775 		if (!rdev->bios) {
5776 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5777 			return -EINVAL;
5778 		}
5779 		DRM_INFO("GPU not posted. posting now...\n");
5780 		atom_asic_init(rdev->mode_info.atom_context);
5781 	}
5782 	/* init golden registers */
5783 	evergreen_init_golden_registers(rdev);
5784 	/* Initialize scratch registers */
5785 	r600_scratch_init(rdev);
5786 	/* Initialize surface registers */
5787 	radeon_surface_init(rdev);
5788 	/* Initialize clocks */
5789 	radeon_get_clock_info(rdev->ddev);
5790 	/* Fence driver */
5791 	r = radeon_fence_driver_init(rdev);
5792 	if (r)
5793 		return r;
5794 	/* initialize AGP */
5795 	if (rdev->flags & RADEON_IS_AGP) {
5796 		r = radeon_agp_init(rdev);
5797 		if (r)
5798 			radeon_agp_disable(rdev);
5799 	}
5800 	/* initialize memory controller */
5801 	r = evergreen_mc_init(rdev);
5802 	if (r)
5803 		return r;
5804 	/* Memory manager */
5805 	r = radeon_bo_init(rdev);
5806 	if (r)
5807 		return r;
5808 
5809 	if (ASIC_IS_DCE5(rdev)) {
5810 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5811 			r = ni_init_microcode(rdev);
5812 			if (r) {
5813 				DRM_ERROR("Failed to load firmware!\n");
5814 				return r;
5815 			}
5816 		}
5817 	} else {
5818 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5819 			r = r600_init_microcode(rdev);
5820 			if (r) {
5821 				DRM_ERROR("Failed to load firmware!\n");
5822 				return r;
5823 			}
5824 		}
5825 	}
5826 
5827 	/* Initialize power management */
5828 	radeon_pm_init(rdev);
5829 
5830 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5831 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5832 
5833 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5834 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5835 
5836 	evergreen_uvd_init(rdev);
5837 
5838 	rdev->ih.ring_obj = NULL;
5839 	r600_ih_ring_init(rdev, 64 * 1024);
5840 
5841 	r = r600_pcie_gart_init(rdev);
5842 	if (r)
5843 		return r;
5844 
5845 #ifdef __DragonFly__
5846 	/*
5847 	   There are unresolved crashes on evergreen hardware,
5848 	   tell userland acceleration is not working properly
5849 	   Bug report: https://bugs.dragonflybsd.org/issues/3198
5850 	*/
5851 	rdev->accel_working = false;
5852 #else
5853 	rdev->accel_working = true;
5854 #endif
5855 	r = evergreen_startup(rdev);
5856 	if (r) {
5857 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5858 		r700_cp_fini(rdev);
5859 		r600_dma_fini(rdev);
5860 		r600_irq_fini(rdev);
5861 		if (rdev->flags & RADEON_IS_IGP)
5862 			sumo_rlc_fini(rdev);
5863 		radeon_wb_fini(rdev);
5864 		radeon_ib_pool_fini(rdev);
5865 		radeon_irq_kms_fini(rdev);
5866 		evergreen_pcie_gart_fini(rdev);
5867 		rdev->accel_working = false;
5868 	}
5869 
5870 	/* Don't start up if the MC ucode is missing on BTC parts.
5871 	 * The default clocks and voltages before the MC ucode
5872 	 * is loaded are not suffient for advanced operations.
5873 	 */
5874 	if (ASIC_IS_DCE5(rdev)) {
5875 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5876 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5877 			return -EINVAL;
5878 		}
5879 	}
5880 
5881 	return 0;
5882 }
5883 
5884 void evergreen_fini(struct radeon_device *rdev)
5885 {
5886 	radeon_pm_fini(rdev);
5887 	radeon_audio_fini(rdev);
5888 	r700_cp_fini(rdev);
5889 	r600_dma_fini(rdev);
5890 	r600_irq_fini(rdev);
5891 	if (rdev->flags & RADEON_IS_IGP)
5892 		sumo_rlc_fini(rdev);
5893 	radeon_wb_fini(rdev);
5894 	radeon_ib_pool_fini(rdev);
5895 	radeon_irq_kms_fini(rdev);
5896 	uvd_v1_0_fini(rdev);
5897 	radeon_uvd_fini(rdev);
5898 	evergreen_pcie_gart_fini(rdev);
5899 	r600_vram_scratch_fini(rdev);
5900 	radeon_gem_fini(rdev);
5901 	radeon_fence_driver_fini(rdev);
5902 	radeon_agp_fini(rdev);
5903 	radeon_bo_fini(rdev);
5904 	radeon_atombios_fini(rdev);
5905 	if (ASIC_IS_DCE5(rdev))
5906 		ni_fini_microcode(rdev);
5907 	else
5908 		r600_fini_microcode(rdev);
5909 	kfree(rdev->bios);
5910 	rdev->bios = NULL;
5911 }
5912 
5913 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5914 {
5915 	u32 link_width_cntl, speed_cntl;
5916 	u32 mask;
5917 
5918 	if (radeon_pcie_gen2 == 0)
5919 		return;
5920 
5921 	if (rdev->flags & RADEON_IS_IGP)
5922 		return;
5923 
5924 	if (!(rdev->flags & RADEON_IS_PCIE))
5925 		return;
5926 
5927 	/* x2 cards have a special sequence */
5928 	if (ASIC_IS_X2(rdev))
5929 		return;
5930 
5931 #ifdef __DragonFly__
5932 	if (drm_pcie_get_speed_cap_mask(rdev->ddev, &mask) != 0)
5933 		return;
5934 #endif
5935 
5936 	if (!(mask & DRM_PCIE_SPEED_50))
5937 		return;
5938 
5939 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5940 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5941 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5942 		return;
5943 	}
5944 
5945 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5946 
5947 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5948 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5949 
5950 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5951 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5952 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5953 
5954 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5955 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5956 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5957 
5958 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5959 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5960 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5961 
5962 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5963 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5964 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5965 
5966 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5967 		speed_cntl |= LC_GEN2_EN_STRAP;
5968 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5969 
5970 	} else {
5971 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5972 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5973 		if (1)
5974 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5975 		else
5976 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5977 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5978 	}
5979 }
5980 
5981 void evergreen_program_aspm(struct radeon_device *rdev)
5982 {
5983 	u32 data, orig;
5984 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5985 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5986 	/* fusion_platform = true
5987 	 * if the system is a fusion system
5988 	 * (APU or DGPU in a fusion system).
5989 	 * todo: check if the system is a fusion platform.
5990 	 */
5991 	bool fusion_platform = false;
5992 
5993 	if (radeon_aspm == 0)
5994 		return;
5995 
5996 	if (!(rdev->flags & RADEON_IS_PCIE))
5997 		return;
5998 
5999 	switch (rdev->family) {
6000 	case CHIP_CYPRESS:
6001 	case CHIP_HEMLOCK:
6002 	case CHIP_JUNIPER:
6003 	case CHIP_REDWOOD:
6004 	case CHIP_CEDAR:
6005 	case CHIP_SUMO:
6006 	case CHIP_SUMO2:
6007 	case CHIP_PALM:
6008 	case CHIP_ARUBA:
6009 		disable_l0s = true;
6010 		break;
6011 	default:
6012 		disable_l0s = false;
6013 		break;
6014 	}
6015 
6016 	if (rdev->flags & RADEON_IS_IGP)
6017 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
6018 
6019 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6020 	if (fusion_platform)
6021 		data &= ~MULTI_PIF;
6022 	else
6023 		data |= MULTI_PIF;
6024 	if (data != orig)
6025 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6026 
6027 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6028 	if (fusion_platform)
6029 		data &= ~MULTI_PIF;
6030 	else
6031 		data |= MULTI_PIF;
6032 	if (data != orig)
6033 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6034 
6035 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6036 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6037 	if (!disable_l0s) {
6038 		if (rdev->family >= CHIP_BARTS)
6039 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6040 		else
6041 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6042 	}
6043 
6044 	if (!disable_l1) {
6045 		if (rdev->family >= CHIP_BARTS)
6046 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6047 		else
6048 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6049 
6050 		if (!disable_plloff_in_l1) {
6051 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6052 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6053 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6054 			if (data != orig)
6055 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6056 
6057 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6058 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6059 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6060 			if (data != orig)
6061 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6062 
6063 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6064 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6065 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6066 			if (data != orig)
6067 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6068 
6069 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6070 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6071 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6072 			if (data != orig)
6073 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6074 
6075 			if (rdev->family >= CHIP_BARTS) {
6076 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6077 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6078 				data |= PLL_RAMP_UP_TIME_0(4);
6079 				if (data != orig)
6080 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6081 
6082 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6083 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6084 				data |= PLL_RAMP_UP_TIME_1(4);
6085 				if (data != orig)
6086 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6087 
6088 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6089 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6090 				data |= PLL_RAMP_UP_TIME_0(4);
6091 				if (data != orig)
6092 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6093 
6094 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6095 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6096 				data |= PLL_RAMP_UP_TIME_1(4);
6097 				if (data != orig)
6098 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6099 			}
6100 
6101 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6102 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6103 			data |= LC_DYN_LANES_PWR_STATE(3);
6104 			if (data != orig)
6105 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6106 
6107 			if (rdev->family >= CHIP_BARTS) {
6108 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6109 				data &= ~LS2_EXIT_TIME_MASK;
6110 				data |= LS2_EXIT_TIME(1);
6111 				if (data != orig)
6112 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6113 
6114 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6115 				data &= ~LS2_EXIT_TIME_MASK;
6116 				data |= LS2_EXIT_TIME(1);
6117 				if (data != orig)
6118 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6119 			}
6120 		}
6121 	}
6122 
6123 	/* evergreen parts only */
6124 	if (rdev->family < CHIP_BARTS)
6125 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6126 
6127 	if (pcie_lc_cntl != pcie_lc_cntl_old)
6128 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6129 }
6130