xref: /dragonfly/sys/dev/drm/radeon/evergreen.c (revision aabd9311)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <drm/drmP.h>
26 #include "radeon.h"
27 #include "radeon_asic.h"
28 #include <uapi_drm/radeon_drm.h>
29 #include "evergreend.h"
30 #include "atom.h"
31 #include "avivod.h"
32 #include "evergreen_reg.h"
33 #include "evergreen_blit_shaders.h"
34 #include "radeon_ucode.h"
35 
36 static const u32 crtc_offsets[6] =
37 {
38 	EVERGREEN_CRTC0_REGISTER_OFFSET,
39 	EVERGREEN_CRTC1_REGISTER_OFFSET,
40 	EVERGREEN_CRTC2_REGISTER_OFFSET,
41 	EVERGREEN_CRTC3_REGISTER_OFFSET,
42 	EVERGREEN_CRTC4_REGISTER_OFFSET,
43 	EVERGREEN_CRTC5_REGISTER_OFFSET
44 };
45 
46 #include "clearstate_evergreen.h"
47 
48 static const u32 sumo_rlc_save_restore_register_list[] =
49 {
50 	0x98fc,
51 	0x9830,
52 	0x9834,
53 	0x9838,
54 	0x9870,
55 	0x9874,
56 	0x8a14,
57 	0x8b24,
58 	0x8bcc,
59 	0x8b10,
60 	0x8d00,
61 	0x8d04,
62 	0x8c00,
63 	0x8c04,
64 	0x8c08,
65 	0x8c0c,
66 	0x8d8c,
67 	0x8c20,
68 	0x8c24,
69 	0x8c28,
70 	0x8c18,
71 	0x8c1c,
72 	0x8cf0,
73 	0x8e2c,
74 	0x8e38,
75 	0x8c30,
76 	0x9508,
77 	0x9688,
78 	0x9608,
79 	0x960c,
80 	0x9610,
81 	0x9614,
82 	0x88c4,
83 	0x88d4,
84 	0xa008,
85 	0x900c,
86 	0x9100,
87 	0x913c,
88 	0x98f8,
89 	0x98f4,
90 	0x9b7c,
91 	0x3f8c,
92 	0x8950,
93 	0x8954,
94 	0x8a18,
95 	0x8b28,
96 	0x9144,
97 	0x9148,
98 	0x914c,
99 	0x3f90,
100 	0x3f94,
101 	0x915c,
102 	0x9160,
103 	0x9178,
104 	0x917c,
105 	0x9180,
106 	0x918c,
107 	0x9190,
108 	0x9194,
109 	0x9198,
110 	0x919c,
111 	0x91a8,
112 	0x91ac,
113 	0x91b0,
114 	0x91b4,
115 	0x91b8,
116 	0x91c4,
117 	0x91c8,
118 	0x91cc,
119 	0x91d0,
120 	0x91d4,
121 	0x91e0,
122 	0x91e4,
123 	0x91ec,
124 	0x91f0,
125 	0x91f4,
126 	0x9200,
127 	0x9204,
128 	0x929c,
129 	0x9150,
130 	0x802c,
131 };
132 
133 static void evergreen_gpu_init(struct radeon_device *rdev);
134 
135 static const u32 evergreen_golden_registers[] =
136 {
137 	0x3f90, 0xffff0000, 0xff000000,
138 	0x9148, 0xffff0000, 0xff000000,
139 	0x3f94, 0xffff0000, 0xff000000,
140 	0x914c, 0xffff0000, 0xff000000,
141 	0x9b7c, 0xffffffff, 0x00000000,
142 	0x8a14, 0xffffffff, 0x00000007,
143 	0x8b10, 0xffffffff, 0x00000000,
144 	0x960c, 0xffffffff, 0x54763210,
145 	0x88c4, 0xffffffff, 0x000000c2,
146 	0x88d4, 0xffffffff, 0x00000010,
147 	0x8974, 0xffffffff, 0x00000000,
148 	0xc78, 0x00000080, 0x00000080,
149 	0x5eb4, 0xffffffff, 0x00000002,
150 	0x5e78, 0xffffffff, 0x001000f0,
151 	0x6104, 0x01000300, 0x00000000,
152 	0x5bc0, 0x00300000, 0x00000000,
153 	0x7030, 0xffffffff, 0x00000011,
154 	0x7c30, 0xffffffff, 0x00000011,
155 	0x10830, 0xffffffff, 0x00000011,
156 	0x11430, 0xffffffff, 0x00000011,
157 	0x12030, 0xffffffff, 0x00000011,
158 	0x12c30, 0xffffffff, 0x00000011,
159 	0xd02c, 0xffffffff, 0x08421000,
160 	0x240c, 0xffffffff, 0x00000380,
161 	0x8b24, 0xffffffff, 0x00ff0fff,
162 	0x28a4c, 0x06000000, 0x06000000,
163 	0x10c, 0x00000001, 0x00000001,
164 	0x8d00, 0xffffffff, 0x100e4848,
165 	0x8d04, 0xffffffff, 0x00164745,
166 	0x8c00, 0xffffffff, 0xe4000003,
167 	0x8c04, 0xffffffff, 0x40600060,
168 	0x8c08, 0xffffffff, 0x001c001c,
169 	0x8cf0, 0xffffffff, 0x08e00620,
170 	0x8c20, 0xffffffff, 0x00800080,
171 	0x8c24, 0xffffffff, 0x00800080,
172 	0x8c18, 0xffffffff, 0x20202078,
173 	0x8c1c, 0xffffffff, 0x00001010,
174 	0x28350, 0xffffffff, 0x00000000,
175 	0xa008, 0xffffffff, 0x00010000,
176 	0x5c4, 0xffffffff, 0x00000001,
177 	0x9508, 0xffffffff, 0x00000002,
178 	0x913c, 0x0000000f, 0x0000000a
179 };
180 
181 static const u32 evergreen_golden_registers2[] =
182 {
183 	0x2f4c, 0xffffffff, 0x00000000,
184 	0x54f4, 0xffffffff, 0x00000000,
185 	0x54f0, 0xffffffff, 0x00000000,
186 	0x5498, 0xffffffff, 0x00000000,
187 	0x549c, 0xffffffff, 0x00000000,
188 	0x5494, 0xffffffff, 0x00000000,
189 	0x53cc, 0xffffffff, 0x00000000,
190 	0x53c8, 0xffffffff, 0x00000000,
191 	0x53c4, 0xffffffff, 0x00000000,
192 	0x53c0, 0xffffffff, 0x00000000,
193 	0x53bc, 0xffffffff, 0x00000000,
194 	0x53b8, 0xffffffff, 0x00000000,
195 	0x53b4, 0xffffffff, 0x00000000,
196 	0x53b0, 0xffffffff, 0x00000000
197 };
198 
199 static const u32 cypress_mgcg_init[] =
200 {
201 	0x802c, 0xffffffff, 0xc0000000,
202 	0x5448, 0xffffffff, 0x00000100,
203 	0x55e4, 0xffffffff, 0x00000100,
204 	0x160c, 0xffffffff, 0x00000100,
205 	0x5644, 0xffffffff, 0x00000100,
206 	0xc164, 0xffffffff, 0x00000100,
207 	0x8a18, 0xffffffff, 0x00000100,
208 	0x897c, 0xffffffff, 0x06000100,
209 	0x8b28, 0xffffffff, 0x00000100,
210 	0x9144, 0xffffffff, 0x00000100,
211 	0x9a60, 0xffffffff, 0x00000100,
212 	0x9868, 0xffffffff, 0x00000100,
213 	0x8d58, 0xffffffff, 0x00000100,
214 	0x9510, 0xffffffff, 0x00000100,
215 	0x949c, 0xffffffff, 0x00000100,
216 	0x9654, 0xffffffff, 0x00000100,
217 	0x9030, 0xffffffff, 0x00000100,
218 	0x9034, 0xffffffff, 0x00000100,
219 	0x9038, 0xffffffff, 0x00000100,
220 	0x903c, 0xffffffff, 0x00000100,
221 	0x9040, 0xffffffff, 0x00000100,
222 	0xa200, 0xffffffff, 0x00000100,
223 	0xa204, 0xffffffff, 0x00000100,
224 	0xa208, 0xffffffff, 0x00000100,
225 	0xa20c, 0xffffffff, 0x00000100,
226 	0x971c, 0xffffffff, 0x00000100,
227 	0x977c, 0xffffffff, 0x00000100,
228 	0x3f80, 0xffffffff, 0x00000100,
229 	0xa210, 0xffffffff, 0x00000100,
230 	0xa214, 0xffffffff, 0x00000100,
231 	0x4d8, 0xffffffff, 0x00000100,
232 	0x9784, 0xffffffff, 0x00000100,
233 	0x9698, 0xffffffff, 0x00000100,
234 	0x4d4, 0xffffffff, 0x00000200,
235 	0x30cc, 0xffffffff, 0x00000100,
236 	0xd0c0, 0xffffffff, 0xff000100,
237 	0x802c, 0xffffffff, 0x40000000,
238 	0x915c, 0xffffffff, 0x00010000,
239 	0x9160, 0xffffffff, 0x00030002,
240 	0x9178, 0xffffffff, 0x00070000,
241 	0x917c, 0xffffffff, 0x00030002,
242 	0x9180, 0xffffffff, 0x00050004,
243 	0x918c, 0xffffffff, 0x00010006,
244 	0x9190, 0xffffffff, 0x00090008,
245 	0x9194, 0xffffffff, 0x00070000,
246 	0x9198, 0xffffffff, 0x00030002,
247 	0x919c, 0xffffffff, 0x00050004,
248 	0x91a8, 0xffffffff, 0x00010006,
249 	0x91ac, 0xffffffff, 0x00090008,
250 	0x91b0, 0xffffffff, 0x00070000,
251 	0x91b4, 0xffffffff, 0x00030002,
252 	0x91b8, 0xffffffff, 0x00050004,
253 	0x91c4, 0xffffffff, 0x00010006,
254 	0x91c8, 0xffffffff, 0x00090008,
255 	0x91cc, 0xffffffff, 0x00070000,
256 	0x91d0, 0xffffffff, 0x00030002,
257 	0x91d4, 0xffffffff, 0x00050004,
258 	0x91e0, 0xffffffff, 0x00010006,
259 	0x91e4, 0xffffffff, 0x00090008,
260 	0x91e8, 0xffffffff, 0x00000000,
261 	0x91ec, 0xffffffff, 0x00070000,
262 	0x91f0, 0xffffffff, 0x00030002,
263 	0x91f4, 0xffffffff, 0x00050004,
264 	0x9200, 0xffffffff, 0x00010006,
265 	0x9204, 0xffffffff, 0x00090008,
266 	0x9208, 0xffffffff, 0x00070000,
267 	0x920c, 0xffffffff, 0x00030002,
268 	0x9210, 0xffffffff, 0x00050004,
269 	0x921c, 0xffffffff, 0x00010006,
270 	0x9220, 0xffffffff, 0x00090008,
271 	0x9224, 0xffffffff, 0x00070000,
272 	0x9228, 0xffffffff, 0x00030002,
273 	0x922c, 0xffffffff, 0x00050004,
274 	0x9238, 0xffffffff, 0x00010006,
275 	0x923c, 0xffffffff, 0x00090008,
276 	0x9240, 0xffffffff, 0x00070000,
277 	0x9244, 0xffffffff, 0x00030002,
278 	0x9248, 0xffffffff, 0x00050004,
279 	0x9254, 0xffffffff, 0x00010006,
280 	0x9258, 0xffffffff, 0x00090008,
281 	0x925c, 0xffffffff, 0x00070000,
282 	0x9260, 0xffffffff, 0x00030002,
283 	0x9264, 0xffffffff, 0x00050004,
284 	0x9270, 0xffffffff, 0x00010006,
285 	0x9274, 0xffffffff, 0x00090008,
286 	0x9278, 0xffffffff, 0x00070000,
287 	0x927c, 0xffffffff, 0x00030002,
288 	0x9280, 0xffffffff, 0x00050004,
289 	0x928c, 0xffffffff, 0x00010006,
290 	0x9290, 0xffffffff, 0x00090008,
291 	0x9294, 0xffffffff, 0x00000000,
292 	0x929c, 0xffffffff, 0x00000001,
293 	0x802c, 0xffffffff, 0x40010000,
294 	0x915c, 0xffffffff, 0x00010000,
295 	0x9160, 0xffffffff, 0x00030002,
296 	0x9178, 0xffffffff, 0x00070000,
297 	0x917c, 0xffffffff, 0x00030002,
298 	0x9180, 0xffffffff, 0x00050004,
299 	0x918c, 0xffffffff, 0x00010006,
300 	0x9190, 0xffffffff, 0x00090008,
301 	0x9194, 0xffffffff, 0x00070000,
302 	0x9198, 0xffffffff, 0x00030002,
303 	0x919c, 0xffffffff, 0x00050004,
304 	0x91a8, 0xffffffff, 0x00010006,
305 	0x91ac, 0xffffffff, 0x00090008,
306 	0x91b0, 0xffffffff, 0x00070000,
307 	0x91b4, 0xffffffff, 0x00030002,
308 	0x91b8, 0xffffffff, 0x00050004,
309 	0x91c4, 0xffffffff, 0x00010006,
310 	0x91c8, 0xffffffff, 0x00090008,
311 	0x91cc, 0xffffffff, 0x00070000,
312 	0x91d0, 0xffffffff, 0x00030002,
313 	0x91d4, 0xffffffff, 0x00050004,
314 	0x91e0, 0xffffffff, 0x00010006,
315 	0x91e4, 0xffffffff, 0x00090008,
316 	0x91e8, 0xffffffff, 0x00000000,
317 	0x91ec, 0xffffffff, 0x00070000,
318 	0x91f0, 0xffffffff, 0x00030002,
319 	0x91f4, 0xffffffff, 0x00050004,
320 	0x9200, 0xffffffff, 0x00010006,
321 	0x9204, 0xffffffff, 0x00090008,
322 	0x9208, 0xffffffff, 0x00070000,
323 	0x920c, 0xffffffff, 0x00030002,
324 	0x9210, 0xffffffff, 0x00050004,
325 	0x921c, 0xffffffff, 0x00010006,
326 	0x9220, 0xffffffff, 0x00090008,
327 	0x9224, 0xffffffff, 0x00070000,
328 	0x9228, 0xffffffff, 0x00030002,
329 	0x922c, 0xffffffff, 0x00050004,
330 	0x9238, 0xffffffff, 0x00010006,
331 	0x923c, 0xffffffff, 0x00090008,
332 	0x9240, 0xffffffff, 0x00070000,
333 	0x9244, 0xffffffff, 0x00030002,
334 	0x9248, 0xffffffff, 0x00050004,
335 	0x9254, 0xffffffff, 0x00010006,
336 	0x9258, 0xffffffff, 0x00090008,
337 	0x925c, 0xffffffff, 0x00070000,
338 	0x9260, 0xffffffff, 0x00030002,
339 	0x9264, 0xffffffff, 0x00050004,
340 	0x9270, 0xffffffff, 0x00010006,
341 	0x9274, 0xffffffff, 0x00090008,
342 	0x9278, 0xffffffff, 0x00070000,
343 	0x927c, 0xffffffff, 0x00030002,
344 	0x9280, 0xffffffff, 0x00050004,
345 	0x928c, 0xffffffff, 0x00010006,
346 	0x9290, 0xffffffff, 0x00090008,
347 	0x9294, 0xffffffff, 0x00000000,
348 	0x929c, 0xffffffff, 0x00000001,
349 	0x802c, 0xffffffff, 0xc0000000
350 };
351 
352 static const u32 redwood_mgcg_init[] =
353 {
354 	0x802c, 0xffffffff, 0xc0000000,
355 	0x5448, 0xffffffff, 0x00000100,
356 	0x55e4, 0xffffffff, 0x00000100,
357 	0x160c, 0xffffffff, 0x00000100,
358 	0x5644, 0xffffffff, 0x00000100,
359 	0xc164, 0xffffffff, 0x00000100,
360 	0x8a18, 0xffffffff, 0x00000100,
361 	0x897c, 0xffffffff, 0x06000100,
362 	0x8b28, 0xffffffff, 0x00000100,
363 	0x9144, 0xffffffff, 0x00000100,
364 	0x9a60, 0xffffffff, 0x00000100,
365 	0x9868, 0xffffffff, 0x00000100,
366 	0x8d58, 0xffffffff, 0x00000100,
367 	0x9510, 0xffffffff, 0x00000100,
368 	0x949c, 0xffffffff, 0x00000100,
369 	0x9654, 0xffffffff, 0x00000100,
370 	0x9030, 0xffffffff, 0x00000100,
371 	0x9034, 0xffffffff, 0x00000100,
372 	0x9038, 0xffffffff, 0x00000100,
373 	0x903c, 0xffffffff, 0x00000100,
374 	0x9040, 0xffffffff, 0x00000100,
375 	0xa200, 0xffffffff, 0x00000100,
376 	0xa204, 0xffffffff, 0x00000100,
377 	0xa208, 0xffffffff, 0x00000100,
378 	0xa20c, 0xffffffff, 0x00000100,
379 	0x971c, 0xffffffff, 0x00000100,
380 	0x977c, 0xffffffff, 0x00000100,
381 	0x3f80, 0xffffffff, 0x00000100,
382 	0xa210, 0xffffffff, 0x00000100,
383 	0xa214, 0xffffffff, 0x00000100,
384 	0x4d8, 0xffffffff, 0x00000100,
385 	0x9784, 0xffffffff, 0x00000100,
386 	0x9698, 0xffffffff, 0x00000100,
387 	0x4d4, 0xffffffff, 0x00000200,
388 	0x30cc, 0xffffffff, 0x00000100,
389 	0xd0c0, 0xffffffff, 0xff000100,
390 	0x802c, 0xffffffff, 0x40000000,
391 	0x915c, 0xffffffff, 0x00010000,
392 	0x9160, 0xffffffff, 0x00030002,
393 	0x9178, 0xffffffff, 0x00070000,
394 	0x917c, 0xffffffff, 0x00030002,
395 	0x9180, 0xffffffff, 0x00050004,
396 	0x918c, 0xffffffff, 0x00010006,
397 	0x9190, 0xffffffff, 0x00090008,
398 	0x9194, 0xffffffff, 0x00070000,
399 	0x9198, 0xffffffff, 0x00030002,
400 	0x919c, 0xffffffff, 0x00050004,
401 	0x91a8, 0xffffffff, 0x00010006,
402 	0x91ac, 0xffffffff, 0x00090008,
403 	0x91b0, 0xffffffff, 0x00070000,
404 	0x91b4, 0xffffffff, 0x00030002,
405 	0x91b8, 0xffffffff, 0x00050004,
406 	0x91c4, 0xffffffff, 0x00010006,
407 	0x91c8, 0xffffffff, 0x00090008,
408 	0x91cc, 0xffffffff, 0x00070000,
409 	0x91d0, 0xffffffff, 0x00030002,
410 	0x91d4, 0xffffffff, 0x00050004,
411 	0x91e0, 0xffffffff, 0x00010006,
412 	0x91e4, 0xffffffff, 0x00090008,
413 	0x91e8, 0xffffffff, 0x00000000,
414 	0x91ec, 0xffffffff, 0x00070000,
415 	0x91f0, 0xffffffff, 0x00030002,
416 	0x91f4, 0xffffffff, 0x00050004,
417 	0x9200, 0xffffffff, 0x00010006,
418 	0x9204, 0xffffffff, 0x00090008,
419 	0x9294, 0xffffffff, 0x00000000,
420 	0x929c, 0xffffffff, 0x00000001,
421 	0x802c, 0xffffffff, 0xc0000000
422 };
423 
424 static const u32 cedar_golden_registers[] =
425 {
426 	0x3f90, 0xffff0000, 0xff000000,
427 	0x9148, 0xffff0000, 0xff000000,
428 	0x3f94, 0xffff0000, 0xff000000,
429 	0x914c, 0xffff0000, 0xff000000,
430 	0x9b7c, 0xffffffff, 0x00000000,
431 	0x8a14, 0xffffffff, 0x00000007,
432 	0x8b10, 0xffffffff, 0x00000000,
433 	0x960c, 0xffffffff, 0x54763210,
434 	0x88c4, 0xffffffff, 0x000000c2,
435 	0x88d4, 0xffffffff, 0x00000000,
436 	0x8974, 0xffffffff, 0x00000000,
437 	0xc78, 0x00000080, 0x00000080,
438 	0x5eb4, 0xffffffff, 0x00000002,
439 	0x5e78, 0xffffffff, 0x001000f0,
440 	0x6104, 0x01000300, 0x00000000,
441 	0x5bc0, 0x00300000, 0x00000000,
442 	0x7030, 0xffffffff, 0x00000011,
443 	0x7c30, 0xffffffff, 0x00000011,
444 	0x10830, 0xffffffff, 0x00000011,
445 	0x11430, 0xffffffff, 0x00000011,
446 	0xd02c, 0xffffffff, 0x08421000,
447 	0x240c, 0xffffffff, 0x00000380,
448 	0x8b24, 0xffffffff, 0x00ff0fff,
449 	0x28a4c, 0x06000000, 0x06000000,
450 	0x10c, 0x00000001, 0x00000001,
451 	0x8d00, 0xffffffff, 0x100e4848,
452 	0x8d04, 0xffffffff, 0x00164745,
453 	0x8c00, 0xffffffff, 0xe4000003,
454 	0x8c04, 0xffffffff, 0x40600060,
455 	0x8c08, 0xffffffff, 0x001c001c,
456 	0x8cf0, 0xffffffff, 0x08e00410,
457 	0x8c20, 0xffffffff, 0x00800080,
458 	0x8c24, 0xffffffff, 0x00800080,
459 	0x8c18, 0xffffffff, 0x20202078,
460 	0x8c1c, 0xffffffff, 0x00001010,
461 	0x28350, 0xffffffff, 0x00000000,
462 	0xa008, 0xffffffff, 0x00010000,
463 	0x5c4, 0xffffffff, 0x00000001,
464 	0x9508, 0xffffffff, 0x00000002
465 };
466 
467 static const u32 cedar_mgcg_init[] =
468 {
469 	0x802c, 0xffffffff, 0xc0000000,
470 	0x5448, 0xffffffff, 0x00000100,
471 	0x55e4, 0xffffffff, 0x00000100,
472 	0x160c, 0xffffffff, 0x00000100,
473 	0x5644, 0xffffffff, 0x00000100,
474 	0xc164, 0xffffffff, 0x00000100,
475 	0x8a18, 0xffffffff, 0x00000100,
476 	0x897c, 0xffffffff, 0x06000100,
477 	0x8b28, 0xffffffff, 0x00000100,
478 	0x9144, 0xffffffff, 0x00000100,
479 	0x9a60, 0xffffffff, 0x00000100,
480 	0x9868, 0xffffffff, 0x00000100,
481 	0x8d58, 0xffffffff, 0x00000100,
482 	0x9510, 0xffffffff, 0x00000100,
483 	0x949c, 0xffffffff, 0x00000100,
484 	0x9654, 0xffffffff, 0x00000100,
485 	0x9030, 0xffffffff, 0x00000100,
486 	0x9034, 0xffffffff, 0x00000100,
487 	0x9038, 0xffffffff, 0x00000100,
488 	0x903c, 0xffffffff, 0x00000100,
489 	0x9040, 0xffffffff, 0x00000100,
490 	0xa200, 0xffffffff, 0x00000100,
491 	0xa204, 0xffffffff, 0x00000100,
492 	0xa208, 0xffffffff, 0x00000100,
493 	0xa20c, 0xffffffff, 0x00000100,
494 	0x971c, 0xffffffff, 0x00000100,
495 	0x977c, 0xffffffff, 0x00000100,
496 	0x3f80, 0xffffffff, 0x00000100,
497 	0xa210, 0xffffffff, 0x00000100,
498 	0xa214, 0xffffffff, 0x00000100,
499 	0x4d8, 0xffffffff, 0x00000100,
500 	0x9784, 0xffffffff, 0x00000100,
501 	0x9698, 0xffffffff, 0x00000100,
502 	0x4d4, 0xffffffff, 0x00000200,
503 	0x30cc, 0xffffffff, 0x00000100,
504 	0xd0c0, 0xffffffff, 0xff000100,
505 	0x802c, 0xffffffff, 0x40000000,
506 	0x915c, 0xffffffff, 0x00010000,
507 	0x9178, 0xffffffff, 0x00050000,
508 	0x917c, 0xffffffff, 0x00030002,
509 	0x918c, 0xffffffff, 0x00010004,
510 	0x9190, 0xffffffff, 0x00070006,
511 	0x9194, 0xffffffff, 0x00050000,
512 	0x9198, 0xffffffff, 0x00030002,
513 	0x91a8, 0xffffffff, 0x00010004,
514 	0x91ac, 0xffffffff, 0x00070006,
515 	0x91e8, 0xffffffff, 0x00000000,
516 	0x9294, 0xffffffff, 0x00000000,
517 	0x929c, 0xffffffff, 0x00000001,
518 	0x802c, 0xffffffff, 0xc0000000
519 };
520 
521 static const u32 juniper_mgcg_init[] =
522 {
523 	0x802c, 0xffffffff, 0xc0000000,
524 	0x5448, 0xffffffff, 0x00000100,
525 	0x55e4, 0xffffffff, 0x00000100,
526 	0x160c, 0xffffffff, 0x00000100,
527 	0x5644, 0xffffffff, 0x00000100,
528 	0xc164, 0xffffffff, 0x00000100,
529 	0x8a18, 0xffffffff, 0x00000100,
530 	0x897c, 0xffffffff, 0x06000100,
531 	0x8b28, 0xffffffff, 0x00000100,
532 	0x9144, 0xffffffff, 0x00000100,
533 	0x9a60, 0xffffffff, 0x00000100,
534 	0x9868, 0xffffffff, 0x00000100,
535 	0x8d58, 0xffffffff, 0x00000100,
536 	0x9510, 0xffffffff, 0x00000100,
537 	0x949c, 0xffffffff, 0x00000100,
538 	0x9654, 0xffffffff, 0x00000100,
539 	0x9030, 0xffffffff, 0x00000100,
540 	0x9034, 0xffffffff, 0x00000100,
541 	0x9038, 0xffffffff, 0x00000100,
542 	0x903c, 0xffffffff, 0x00000100,
543 	0x9040, 0xffffffff, 0x00000100,
544 	0xa200, 0xffffffff, 0x00000100,
545 	0xa204, 0xffffffff, 0x00000100,
546 	0xa208, 0xffffffff, 0x00000100,
547 	0xa20c, 0xffffffff, 0x00000100,
548 	0x971c, 0xffffffff, 0x00000100,
549 	0xd0c0, 0xffffffff, 0xff000100,
550 	0x802c, 0xffffffff, 0x40000000,
551 	0x915c, 0xffffffff, 0x00010000,
552 	0x9160, 0xffffffff, 0x00030002,
553 	0x9178, 0xffffffff, 0x00070000,
554 	0x917c, 0xffffffff, 0x00030002,
555 	0x9180, 0xffffffff, 0x00050004,
556 	0x918c, 0xffffffff, 0x00010006,
557 	0x9190, 0xffffffff, 0x00090008,
558 	0x9194, 0xffffffff, 0x00070000,
559 	0x9198, 0xffffffff, 0x00030002,
560 	0x919c, 0xffffffff, 0x00050004,
561 	0x91a8, 0xffffffff, 0x00010006,
562 	0x91ac, 0xffffffff, 0x00090008,
563 	0x91b0, 0xffffffff, 0x00070000,
564 	0x91b4, 0xffffffff, 0x00030002,
565 	0x91b8, 0xffffffff, 0x00050004,
566 	0x91c4, 0xffffffff, 0x00010006,
567 	0x91c8, 0xffffffff, 0x00090008,
568 	0x91cc, 0xffffffff, 0x00070000,
569 	0x91d0, 0xffffffff, 0x00030002,
570 	0x91d4, 0xffffffff, 0x00050004,
571 	0x91e0, 0xffffffff, 0x00010006,
572 	0x91e4, 0xffffffff, 0x00090008,
573 	0x91e8, 0xffffffff, 0x00000000,
574 	0x91ec, 0xffffffff, 0x00070000,
575 	0x91f0, 0xffffffff, 0x00030002,
576 	0x91f4, 0xffffffff, 0x00050004,
577 	0x9200, 0xffffffff, 0x00010006,
578 	0x9204, 0xffffffff, 0x00090008,
579 	0x9208, 0xffffffff, 0x00070000,
580 	0x920c, 0xffffffff, 0x00030002,
581 	0x9210, 0xffffffff, 0x00050004,
582 	0x921c, 0xffffffff, 0x00010006,
583 	0x9220, 0xffffffff, 0x00090008,
584 	0x9224, 0xffffffff, 0x00070000,
585 	0x9228, 0xffffffff, 0x00030002,
586 	0x922c, 0xffffffff, 0x00050004,
587 	0x9238, 0xffffffff, 0x00010006,
588 	0x923c, 0xffffffff, 0x00090008,
589 	0x9240, 0xffffffff, 0x00070000,
590 	0x9244, 0xffffffff, 0x00030002,
591 	0x9248, 0xffffffff, 0x00050004,
592 	0x9254, 0xffffffff, 0x00010006,
593 	0x9258, 0xffffffff, 0x00090008,
594 	0x925c, 0xffffffff, 0x00070000,
595 	0x9260, 0xffffffff, 0x00030002,
596 	0x9264, 0xffffffff, 0x00050004,
597 	0x9270, 0xffffffff, 0x00010006,
598 	0x9274, 0xffffffff, 0x00090008,
599 	0x9278, 0xffffffff, 0x00070000,
600 	0x927c, 0xffffffff, 0x00030002,
601 	0x9280, 0xffffffff, 0x00050004,
602 	0x928c, 0xffffffff, 0x00010006,
603 	0x9290, 0xffffffff, 0x00090008,
604 	0x9294, 0xffffffff, 0x00000000,
605 	0x929c, 0xffffffff, 0x00000001,
606 	0x802c, 0xffffffff, 0xc0000000,
607 	0x977c, 0xffffffff, 0x00000100,
608 	0x3f80, 0xffffffff, 0x00000100,
609 	0xa210, 0xffffffff, 0x00000100,
610 	0xa214, 0xffffffff, 0x00000100,
611 	0x4d8, 0xffffffff, 0x00000100,
612 	0x9784, 0xffffffff, 0x00000100,
613 	0x9698, 0xffffffff, 0x00000100,
614 	0x4d4, 0xffffffff, 0x00000200,
615 	0x30cc, 0xffffffff, 0x00000100,
616 	0x802c, 0xffffffff, 0xc0000000
617 };
618 
619 static const u32 supersumo_golden_registers[] =
620 {
621 	0x5eb4, 0xffffffff, 0x00000002,
622 	0x5c4, 0xffffffff, 0x00000001,
623 	0x7030, 0xffffffff, 0x00000011,
624 	0x7c30, 0xffffffff, 0x00000011,
625 	0x6104, 0x01000300, 0x00000000,
626 	0x5bc0, 0x00300000, 0x00000000,
627 	0x8c04, 0xffffffff, 0x40600060,
628 	0x8c08, 0xffffffff, 0x001c001c,
629 	0x8c20, 0xffffffff, 0x00800080,
630 	0x8c24, 0xffffffff, 0x00800080,
631 	0x8c18, 0xffffffff, 0x20202078,
632 	0x8c1c, 0xffffffff, 0x00001010,
633 	0x918c, 0xffffffff, 0x00010006,
634 	0x91a8, 0xffffffff, 0x00010006,
635 	0x91c4, 0xffffffff, 0x00010006,
636 	0x91e0, 0xffffffff, 0x00010006,
637 	0x9200, 0xffffffff, 0x00010006,
638 	0x9150, 0xffffffff, 0x6e944040,
639 	0x917c, 0xffffffff, 0x00030002,
640 	0x9180, 0xffffffff, 0x00050004,
641 	0x9198, 0xffffffff, 0x00030002,
642 	0x919c, 0xffffffff, 0x00050004,
643 	0x91b4, 0xffffffff, 0x00030002,
644 	0x91b8, 0xffffffff, 0x00050004,
645 	0x91d0, 0xffffffff, 0x00030002,
646 	0x91d4, 0xffffffff, 0x00050004,
647 	0x91f0, 0xffffffff, 0x00030002,
648 	0x91f4, 0xffffffff, 0x00050004,
649 	0x915c, 0xffffffff, 0x00010000,
650 	0x9160, 0xffffffff, 0x00030002,
651 	0x3f90, 0xffff0000, 0xff000000,
652 	0x9178, 0xffffffff, 0x00070000,
653 	0x9194, 0xffffffff, 0x00070000,
654 	0x91b0, 0xffffffff, 0x00070000,
655 	0x91cc, 0xffffffff, 0x00070000,
656 	0x91ec, 0xffffffff, 0x00070000,
657 	0x9148, 0xffff0000, 0xff000000,
658 	0x9190, 0xffffffff, 0x00090008,
659 	0x91ac, 0xffffffff, 0x00090008,
660 	0x91c8, 0xffffffff, 0x00090008,
661 	0x91e4, 0xffffffff, 0x00090008,
662 	0x9204, 0xffffffff, 0x00090008,
663 	0x3f94, 0xffff0000, 0xff000000,
664 	0x914c, 0xffff0000, 0xff000000,
665 	0x929c, 0xffffffff, 0x00000001,
666 	0x8a18, 0xffffffff, 0x00000100,
667 	0x8b28, 0xffffffff, 0x00000100,
668 	0x9144, 0xffffffff, 0x00000100,
669 	0x5644, 0xffffffff, 0x00000100,
670 	0x9b7c, 0xffffffff, 0x00000000,
671 	0x8030, 0xffffffff, 0x0000100a,
672 	0x8a14, 0xffffffff, 0x00000007,
673 	0x8b24, 0xffffffff, 0x00ff0fff,
674 	0x8b10, 0xffffffff, 0x00000000,
675 	0x28a4c, 0x06000000, 0x06000000,
676 	0x4d8, 0xffffffff, 0x00000100,
677 	0x913c, 0xffff000f, 0x0100000a,
678 	0x960c, 0xffffffff, 0x54763210,
679 	0x88c4, 0xffffffff, 0x000000c2,
680 	0x88d4, 0xffffffff, 0x00000010,
681 	0x8974, 0xffffffff, 0x00000000,
682 	0xc78, 0x00000080, 0x00000080,
683 	0x5e78, 0xffffffff, 0x001000f0,
684 	0xd02c, 0xffffffff, 0x08421000,
685 	0xa008, 0xffffffff, 0x00010000,
686 	0x8d00, 0xffffffff, 0x100e4848,
687 	0x8d04, 0xffffffff, 0x00164745,
688 	0x8c00, 0xffffffff, 0xe4000003,
689 	0x8cf0, 0x1fffffff, 0x08e00620,
690 	0x28350, 0xffffffff, 0x00000000,
691 	0x9508, 0xffffffff, 0x00000002
692 };
693 
694 static const u32 sumo_golden_registers[] =
695 {
696 	0x900c, 0x00ffffff, 0x0017071f,
697 	0x8c18, 0xffffffff, 0x10101060,
698 	0x8c1c, 0xffffffff, 0x00001010,
699 	0x8c30, 0x0000000f, 0x00000005,
700 	0x9688, 0x0000000f, 0x00000007
701 };
702 
703 static const u32 wrestler_golden_registers[] =
704 {
705 	0x5eb4, 0xffffffff, 0x00000002,
706 	0x5c4, 0xffffffff, 0x00000001,
707 	0x7030, 0xffffffff, 0x00000011,
708 	0x7c30, 0xffffffff, 0x00000011,
709 	0x6104, 0x01000300, 0x00000000,
710 	0x5bc0, 0x00300000, 0x00000000,
711 	0x918c, 0xffffffff, 0x00010006,
712 	0x91a8, 0xffffffff, 0x00010006,
713 	0x9150, 0xffffffff, 0x6e944040,
714 	0x917c, 0xffffffff, 0x00030002,
715 	0x9198, 0xffffffff, 0x00030002,
716 	0x915c, 0xffffffff, 0x00010000,
717 	0x3f90, 0xffff0000, 0xff000000,
718 	0x9178, 0xffffffff, 0x00070000,
719 	0x9194, 0xffffffff, 0x00070000,
720 	0x9148, 0xffff0000, 0xff000000,
721 	0x9190, 0xffffffff, 0x00090008,
722 	0x91ac, 0xffffffff, 0x00090008,
723 	0x3f94, 0xffff0000, 0xff000000,
724 	0x914c, 0xffff0000, 0xff000000,
725 	0x929c, 0xffffffff, 0x00000001,
726 	0x8a18, 0xffffffff, 0x00000100,
727 	0x8b28, 0xffffffff, 0x00000100,
728 	0x9144, 0xffffffff, 0x00000100,
729 	0x9b7c, 0xffffffff, 0x00000000,
730 	0x8030, 0xffffffff, 0x0000100a,
731 	0x8a14, 0xffffffff, 0x00000001,
732 	0x8b24, 0xffffffff, 0x00ff0fff,
733 	0x8b10, 0xffffffff, 0x00000000,
734 	0x28a4c, 0x06000000, 0x06000000,
735 	0x4d8, 0xffffffff, 0x00000100,
736 	0x913c, 0xffff000f, 0x0100000a,
737 	0x960c, 0xffffffff, 0x54763210,
738 	0x88c4, 0xffffffff, 0x000000c2,
739 	0x88d4, 0xffffffff, 0x00000010,
740 	0x8974, 0xffffffff, 0x00000000,
741 	0xc78, 0x00000080, 0x00000080,
742 	0x5e78, 0xffffffff, 0x001000f0,
743 	0xd02c, 0xffffffff, 0x08421000,
744 	0xa008, 0xffffffff, 0x00010000,
745 	0x8d00, 0xffffffff, 0x100e4848,
746 	0x8d04, 0xffffffff, 0x00164745,
747 	0x8c00, 0xffffffff, 0xe4000003,
748 	0x8cf0, 0x1fffffff, 0x08e00410,
749 	0x28350, 0xffffffff, 0x00000000,
750 	0x9508, 0xffffffff, 0x00000002,
751 	0x900c, 0xffffffff, 0x0017071f,
752 	0x8c18, 0xffffffff, 0x10101060,
753 	0x8c1c, 0xffffffff, 0x00001010
754 };
755 
756 static const u32 barts_golden_registers[] =
757 {
758 	0x5eb4, 0xffffffff, 0x00000002,
759 	0x5e78, 0x8f311ff1, 0x001000f0,
760 	0x3f90, 0xffff0000, 0xff000000,
761 	0x9148, 0xffff0000, 0xff000000,
762 	0x3f94, 0xffff0000, 0xff000000,
763 	0x914c, 0xffff0000, 0xff000000,
764 	0xc78, 0x00000080, 0x00000080,
765 	0xbd4, 0x70073777, 0x00010001,
766 	0xd02c, 0xbfffff1f, 0x08421000,
767 	0xd0b8, 0x03773777, 0x02011003,
768 	0x5bc0, 0x00200000, 0x50100000,
769 	0x98f8, 0x33773777, 0x02011003,
770 	0x98fc, 0xffffffff, 0x76543210,
771 	0x7030, 0x31000311, 0x00000011,
772 	0x2f48, 0x00000007, 0x02011003,
773 	0x6b28, 0x00000010, 0x00000012,
774 	0x7728, 0x00000010, 0x00000012,
775 	0x10328, 0x00000010, 0x00000012,
776 	0x10f28, 0x00000010, 0x00000012,
777 	0x11b28, 0x00000010, 0x00000012,
778 	0x12728, 0x00000010, 0x00000012,
779 	0x240c, 0x000007ff, 0x00000380,
780 	0x8a14, 0xf000001f, 0x00000007,
781 	0x8b24, 0x3fff3fff, 0x00ff0fff,
782 	0x8b10, 0x0000ff0f, 0x00000000,
783 	0x28a4c, 0x07ffffff, 0x06000000,
784 	0x10c, 0x00000001, 0x00010003,
785 	0xa02c, 0xffffffff, 0x0000009b,
786 	0x913c, 0x0000000f, 0x0100000a,
787 	0x8d00, 0xffff7f7f, 0x100e4848,
788 	0x8d04, 0x00ffffff, 0x00164745,
789 	0x8c00, 0xfffc0003, 0xe4000003,
790 	0x8c04, 0xf8ff00ff, 0x40600060,
791 	0x8c08, 0x00ff00ff, 0x001c001c,
792 	0x8cf0, 0x1fff1fff, 0x08e00620,
793 	0x8c20, 0x0fff0fff, 0x00800080,
794 	0x8c24, 0x0fff0fff, 0x00800080,
795 	0x8c18, 0xffffffff, 0x20202078,
796 	0x8c1c, 0x0000ffff, 0x00001010,
797 	0x28350, 0x00000f01, 0x00000000,
798 	0x9508, 0x3700001f, 0x00000002,
799 	0x960c, 0xffffffff, 0x54763210,
800 	0x88c4, 0x001f3ae3, 0x000000c2,
801 	0x88d4, 0x0000001f, 0x00000010,
802 	0x8974, 0xffffffff, 0x00000000
803 };
804 
805 static const u32 turks_golden_registers[] =
806 {
807 	0x5eb4, 0xffffffff, 0x00000002,
808 	0x5e78, 0x8f311ff1, 0x001000f0,
809 	0x8c8, 0x00003000, 0x00001070,
810 	0x8cc, 0x000fffff, 0x00040035,
811 	0x3f90, 0xffff0000, 0xfff00000,
812 	0x9148, 0xffff0000, 0xfff00000,
813 	0x3f94, 0xffff0000, 0xfff00000,
814 	0x914c, 0xffff0000, 0xfff00000,
815 	0xc78, 0x00000080, 0x00000080,
816 	0xbd4, 0x00073007, 0x00010002,
817 	0xd02c, 0xbfffff1f, 0x08421000,
818 	0xd0b8, 0x03773777, 0x02010002,
819 	0x5bc0, 0x00200000, 0x50100000,
820 	0x98f8, 0x33773777, 0x00010002,
821 	0x98fc, 0xffffffff, 0x33221100,
822 	0x7030, 0x31000311, 0x00000011,
823 	0x2f48, 0x33773777, 0x00010002,
824 	0x6b28, 0x00000010, 0x00000012,
825 	0x7728, 0x00000010, 0x00000012,
826 	0x10328, 0x00000010, 0x00000012,
827 	0x10f28, 0x00000010, 0x00000012,
828 	0x11b28, 0x00000010, 0x00000012,
829 	0x12728, 0x00000010, 0x00000012,
830 	0x240c, 0x000007ff, 0x00000380,
831 	0x8a14, 0xf000001f, 0x00000007,
832 	0x8b24, 0x3fff3fff, 0x00ff0fff,
833 	0x8b10, 0x0000ff0f, 0x00000000,
834 	0x28a4c, 0x07ffffff, 0x06000000,
835 	0x10c, 0x00000001, 0x00010003,
836 	0xa02c, 0xffffffff, 0x0000009b,
837 	0x913c, 0x0000000f, 0x0100000a,
838 	0x8d00, 0xffff7f7f, 0x100e4848,
839 	0x8d04, 0x00ffffff, 0x00164745,
840 	0x8c00, 0xfffc0003, 0xe4000003,
841 	0x8c04, 0xf8ff00ff, 0x40600060,
842 	0x8c08, 0x00ff00ff, 0x001c001c,
843 	0x8cf0, 0x1fff1fff, 0x08e00410,
844 	0x8c20, 0x0fff0fff, 0x00800080,
845 	0x8c24, 0x0fff0fff, 0x00800080,
846 	0x8c18, 0xffffffff, 0x20202078,
847 	0x8c1c, 0x0000ffff, 0x00001010,
848 	0x28350, 0x00000f01, 0x00000000,
849 	0x9508, 0x3700001f, 0x00000002,
850 	0x960c, 0xffffffff, 0x54763210,
851 	0x88c4, 0x001f3ae3, 0x000000c2,
852 	0x88d4, 0x0000001f, 0x00000010,
853 	0x8974, 0xffffffff, 0x00000000
854 };
855 
856 static const u32 caicos_golden_registers[] =
857 {
858 	0x5eb4, 0xffffffff, 0x00000002,
859 	0x5e78, 0x8f311ff1, 0x001000f0,
860 	0x8c8, 0x00003420, 0x00001450,
861 	0x8cc, 0x000fffff, 0x00040035,
862 	0x3f90, 0xffff0000, 0xfffc0000,
863 	0x9148, 0xffff0000, 0xfffc0000,
864 	0x3f94, 0xffff0000, 0xfffc0000,
865 	0x914c, 0xffff0000, 0xfffc0000,
866 	0xc78, 0x00000080, 0x00000080,
867 	0xbd4, 0x00073007, 0x00010001,
868 	0xd02c, 0xbfffff1f, 0x08421000,
869 	0xd0b8, 0x03773777, 0x02010001,
870 	0x5bc0, 0x00200000, 0x50100000,
871 	0x98f8, 0x33773777, 0x02010001,
872 	0x98fc, 0xffffffff, 0x33221100,
873 	0x7030, 0x31000311, 0x00000011,
874 	0x2f48, 0x33773777, 0x02010001,
875 	0x6b28, 0x00000010, 0x00000012,
876 	0x7728, 0x00000010, 0x00000012,
877 	0x10328, 0x00000010, 0x00000012,
878 	0x10f28, 0x00000010, 0x00000012,
879 	0x11b28, 0x00000010, 0x00000012,
880 	0x12728, 0x00000010, 0x00000012,
881 	0x240c, 0x000007ff, 0x00000380,
882 	0x8a14, 0xf000001f, 0x00000001,
883 	0x8b24, 0x3fff3fff, 0x00ff0fff,
884 	0x8b10, 0x0000ff0f, 0x00000000,
885 	0x28a4c, 0x07ffffff, 0x06000000,
886 	0x10c, 0x00000001, 0x00010003,
887 	0xa02c, 0xffffffff, 0x0000009b,
888 	0x913c, 0x0000000f, 0x0100000a,
889 	0x8d00, 0xffff7f7f, 0x100e4848,
890 	0x8d04, 0x00ffffff, 0x00164745,
891 	0x8c00, 0xfffc0003, 0xe4000003,
892 	0x8c04, 0xf8ff00ff, 0x40600060,
893 	0x8c08, 0x00ff00ff, 0x001c001c,
894 	0x8cf0, 0x1fff1fff, 0x08e00410,
895 	0x8c20, 0x0fff0fff, 0x00800080,
896 	0x8c24, 0x0fff0fff, 0x00800080,
897 	0x8c18, 0xffffffff, 0x20202078,
898 	0x8c1c, 0x0000ffff, 0x00001010,
899 	0x28350, 0x00000f01, 0x00000000,
900 	0x9508, 0x3700001f, 0x00000002,
901 	0x960c, 0xffffffff, 0x54763210,
902 	0x88c4, 0x001f3ae3, 0x000000c2,
903 	0x88d4, 0x0000001f, 0x00000010,
904 	0x8974, 0xffffffff, 0x00000000
905 };
906 
907 static void evergreen_init_golden_registers(struct radeon_device *rdev)
908 {
909 	switch (rdev->family) {
910 	case CHIP_CYPRESS:
911 	case CHIP_HEMLOCK:
912 		radeon_program_register_sequence(rdev,
913 						 evergreen_golden_registers,
914 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
915 		radeon_program_register_sequence(rdev,
916 						 evergreen_golden_registers2,
917 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
918 		radeon_program_register_sequence(rdev,
919 						 cypress_mgcg_init,
920 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
921 		break;
922 	case CHIP_JUNIPER:
923 		radeon_program_register_sequence(rdev,
924 						 evergreen_golden_registers,
925 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
926 		radeon_program_register_sequence(rdev,
927 						 evergreen_golden_registers2,
928 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
929 		radeon_program_register_sequence(rdev,
930 						 juniper_mgcg_init,
931 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
932 		break;
933 	case CHIP_REDWOOD:
934 		radeon_program_register_sequence(rdev,
935 						 evergreen_golden_registers,
936 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
937 		radeon_program_register_sequence(rdev,
938 						 evergreen_golden_registers2,
939 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
940 		radeon_program_register_sequence(rdev,
941 						 redwood_mgcg_init,
942 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
943 		break;
944 	case CHIP_CEDAR:
945 		radeon_program_register_sequence(rdev,
946 						 cedar_golden_registers,
947 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
948 		radeon_program_register_sequence(rdev,
949 						 evergreen_golden_registers2,
950 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
951 		radeon_program_register_sequence(rdev,
952 						 cedar_mgcg_init,
953 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
954 		break;
955 	case CHIP_PALM:
956 		radeon_program_register_sequence(rdev,
957 						 wrestler_golden_registers,
958 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
959 		break;
960 	case CHIP_SUMO:
961 		radeon_program_register_sequence(rdev,
962 						 supersumo_golden_registers,
963 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
964 		break;
965 	case CHIP_SUMO2:
966 		radeon_program_register_sequence(rdev,
967 						 supersumo_golden_registers,
968 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
969 		radeon_program_register_sequence(rdev,
970 						 sumo_golden_registers,
971 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
972 		break;
973 	case CHIP_BARTS:
974 		radeon_program_register_sequence(rdev,
975 						 barts_golden_registers,
976 						 (const u32)ARRAY_SIZE(barts_golden_registers));
977 		break;
978 	case CHIP_TURKS:
979 		radeon_program_register_sequence(rdev,
980 						 turks_golden_registers,
981 						 (const u32)ARRAY_SIZE(turks_golden_registers));
982 		break;
983 	case CHIP_CAICOS:
984 		radeon_program_register_sequence(rdev,
985 						 caicos_golden_registers,
986 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
987 		break;
988 	default:
989 		break;
990 	}
991 }
992 
993 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
994 			     unsigned *bankh, unsigned *mtaspect,
995 			     unsigned *tile_split)
996 {
997 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
998 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
999 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1000 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1001 	switch (*bankw) {
1002 	default:
1003 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1004 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1005 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1006 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1007 	}
1008 	switch (*bankh) {
1009 	default:
1010 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1011 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1012 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1013 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1014 	}
1015 	switch (*mtaspect) {
1016 	default:
1017 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1018 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1019 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1020 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1021 	}
1022 }
1023 
1024 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1025 			      u32 cntl_reg, u32 status_reg)
1026 {
1027 	int r, i;
1028 	struct atom_clock_dividers dividers;
1029 
1030         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1031 					   clock, false, &dividers);
1032 	if (r)
1033 		return r;
1034 
1035 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1036 
1037 	for (i = 0; i < 100; i++) {
1038 		if (RREG32(status_reg) & DCLK_STATUS)
1039 			break;
1040 		mdelay(10);
1041 	}
1042 	if (i == 100)
1043 		return -ETIMEDOUT;
1044 
1045 	return 0;
1046 }
1047 
1048 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1049 {
1050 	int r = 0;
1051 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1052 
1053 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1054 	if (r)
1055 		goto done;
1056 	cg_scratch &= 0xffff0000;
1057 	cg_scratch |= vclk / 100; /* Mhz */
1058 
1059 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1060 	if (r)
1061 		goto done;
1062 	cg_scratch &= 0x0000ffff;
1063 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1064 
1065 done:
1066 	WREG32(CG_SCRATCH1, cg_scratch);
1067 
1068 	return r;
1069 }
1070 
1071 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1072 {
1073 	/* start off with something large */
1074 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1075 	int r;
1076 
1077 	/* bypass vclk and dclk with bclk */
1078 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1079 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1080 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1081 
1082 	/* put PLL in bypass mode */
1083 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1084 
1085 	if (!vclk || !dclk) {
1086 		/* keep the Bypass mode, put PLL to sleep */
1087 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1088 		return 0;
1089 	}
1090 
1091 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1092 					  16384, 0x03FFFFFF, 0, 128, 5,
1093 					  &fb_div, &vclk_div, &dclk_div);
1094 	if (r)
1095 		return r;
1096 
1097 	/* set VCO_MODE to 1 */
1098 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1099 
1100 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1101 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1102 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1103 
1104 	/* deassert UPLL_RESET */
1105 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1106 
1107 	mdelay(1);
1108 
1109 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1110 	if (r)
1111 		return r;
1112 
1113 	/* assert UPLL_RESET again */
1114 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1115 
1116 	/* disable spread spectrum. */
1117 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1118 
1119 	/* set feedback divider */
1120 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1121 
1122 	/* set ref divider to 0 */
1123 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1124 
1125 	if (fb_div < 307200)
1126 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1127 	else
1128 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1129 
1130 	/* set PDIV_A and PDIV_B */
1131 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1132 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1133 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1134 
1135 	/* give the PLL some time to settle */
1136 	mdelay(15);
1137 
1138 	/* deassert PLL_RESET */
1139 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1140 
1141 	mdelay(15);
1142 
1143 	/* switch from bypass mode to normal mode */
1144 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1145 
1146 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1147 	if (r)
1148 		return r;
1149 
1150 	/* switch VCLK and DCLK selection */
1151 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1152 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1153 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1154 
1155 	mdelay(100);
1156 
1157 	return 0;
1158 }
1159 
1160 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1161 {
1162 	int readrq;
1163 	u16 v;
1164 
1165 	readrq = pcie_get_readrq(rdev->pdev);
1166 	v = ffs(readrq) - 8;
1167 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1168 	 * to avoid hangs or perfomance issues
1169 	 */
1170 	if ((v == 0) || (v == 6) || (v == 7))
1171 		pcie_set_readrq(rdev->pdev, 512);
1172 }
1173 
1174 void dce4_program_fmt(struct drm_encoder *encoder)
1175 {
1176 	struct drm_device *dev = encoder->dev;
1177 	struct radeon_device *rdev = dev->dev_private;
1178 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1179 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1180 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1181 	int bpc = 0;
1182 	u32 tmp = 0;
1183 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1184 
1185 	if (connector) {
1186 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1187 		bpc = radeon_get_monitor_bpc(connector);
1188 		dither = radeon_connector->dither;
1189 	}
1190 
1191 	/* LVDS/eDP FMT is set up by atom */
1192 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1193 		return;
1194 
1195 	/* not needed for analog */
1196 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1197 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1198 		return;
1199 
1200 	if (bpc == 0)
1201 		return;
1202 
1203 	switch (bpc) {
1204 	case 6:
1205 		if (dither == RADEON_FMT_DITHER_ENABLE)
1206 			/* XXX sort out optimal dither settings */
1207 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1208 				FMT_SPATIAL_DITHER_EN);
1209 		else
1210 			tmp |= FMT_TRUNCATE_EN;
1211 		break;
1212 	case 8:
1213 		if (dither == RADEON_FMT_DITHER_ENABLE)
1214 			/* XXX sort out optimal dither settings */
1215 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1216 				FMT_RGB_RANDOM_ENABLE |
1217 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1218 		else
1219 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1220 		break;
1221 	case 10:
1222 	default:
1223 		/* not needed */
1224 		break;
1225 	}
1226 
1227 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1228 }
1229 
1230 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1231 {
1232 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1233 		return true;
1234 	else
1235 		return false;
1236 }
1237 
1238 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1239 {
1240 	u32 pos1, pos2;
1241 
1242 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1243 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1244 
1245 	if (pos1 != pos2)
1246 		return true;
1247 	else
1248 		return false;
1249 }
1250 
1251 /**
1252  * dce4_wait_for_vblank - vblank wait asic callback.
1253  *
1254  * @rdev: radeon_device pointer
1255  * @crtc: crtc to wait for vblank on
1256  *
1257  * Wait for vblank on the requested crtc (evergreen+).
1258  */
1259 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1260 {
1261 	unsigned i = 0;
1262 
1263 	if (crtc >= rdev->num_crtc)
1264 		return;
1265 
1266 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1267 		return;
1268 
1269 	/* depending on when we hit vblank, we may be close to active; if so,
1270 	 * wait for another frame.
1271 	 */
1272 	while (dce4_is_in_vblank(rdev, crtc)) {
1273 		if (i++ % 100 == 0) {
1274 			if (!dce4_is_counter_moving(rdev, crtc))
1275 				break;
1276 		}
1277 	}
1278 
1279 	while (!dce4_is_in_vblank(rdev, crtc)) {
1280 		if (i++ % 100 == 0) {
1281 			if (!dce4_is_counter_moving(rdev, crtc))
1282 				break;
1283 		}
1284 	}
1285 }
1286 
1287 /**
1288  * evergreen_page_flip - pageflip callback.
1289  *
1290  * @rdev: radeon_device pointer
1291  * @crtc_id: crtc to cleanup pageflip on
1292  * @crtc_base: new address of the crtc (GPU MC address)
1293  *
1294  * Does the actual pageflip (evergreen+).
1295  * During vblank we take the crtc lock and wait for the update_pending
1296  * bit to go high, when it does, we release the lock, and allow the
1297  * double buffered update to take place.
1298  * Returns the current update pending status.
1299  */
1300 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1301 {
1302 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1303 	u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1304 	int i;
1305 
1306 	/* Lock the graphics update lock */
1307 	tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1308 	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1309 
1310 	/* update the scanout addresses */
1311 	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1312 	       upper_32_bits(crtc_base));
1313 	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1314 	       (u32)crtc_base);
1315 
1316 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1317 	       upper_32_bits(crtc_base));
1318 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1319 	       (u32)crtc_base);
1320 
1321 	/* Wait for update_pending to go high. */
1322 	for (i = 0; i < rdev->usec_timeout; i++) {
1323 		if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1324 			break;
1325 		udelay(1);
1326 	}
1327 	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1328 
1329 	/* Unlock the lock, so double-buffering can take place inside vblank */
1330 	tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1331 	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1332 }
1333 
1334 /**
1335  * evergreen_page_flip_pending - check if page flip is still pending
1336  *
1337  * @rdev: radeon_device pointer
1338  * @crtc_id: crtc to check
1339  *
1340  * Returns the current update pending status.
1341  */
1342 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1343 {
1344 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1345 
1346 	/* Return current update_pending status: */
1347 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1348 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1349 }
1350 
1351 /* get temperature in millidegrees */
1352 int evergreen_get_temp(struct radeon_device *rdev)
1353 {
1354 	u32 temp, toffset;
1355 	int actual_temp = 0;
1356 
1357 	if (rdev->family == CHIP_JUNIPER) {
1358 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1359 			TOFFSET_SHIFT;
1360 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1361 			TS0_ADC_DOUT_SHIFT;
1362 
1363 		if (toffset & 0x100)
1364 			actual_temp = temp / 2 - (0x200 - toffset);
1365 		else
1366 			actual_temp = temp / 2 + toffset;
1367 
1368 		actual_temp = actual_temp * 1000;
1369 
1370 	} else {
1371 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1372 			ASIC_T_SHIFT;
1373 
1374 		if (temp & 0x400)
1375 			actual_temp = -256;
1376 		else if (temp & 0x200)
1377 			actual_temp = 255;
1378 		else if (temp & 0x100) {
1379 			actual_temp = temp & 0x1ff;
1380 			actual_temp |= ~0x1ff;
1381 		} else
1382 			actual_temp = temp & 0xff;
1383 
1384 		actual_temp = (actual_temp * 1000) / 2;
1385 	}
1386 
1387 	return actual_temp;
1388 }
1389 
1390 int sumo_get_temp(struct radeon_device *rdev)
1391 {
1392 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1393 	int actual_temp = temp - 49;
1394 
1395 	return actual_temp * 1000;
1396 }
1397 
1398 /**
1399  * sumo_pm_init_profile - Initialize power profiles callback.
1400  *
1401  * @rdev: radeon_device pointer
1402  *
1403  * Initialize the power states used in profile mode
1404  * (sumo, trinity, SI).
1405  * Used for profile mode only.
1406  */
1407 void sumo_pm_init_profile(struct radeon_device *rdev)
1408 {
1409 	int idx;
1410 
1411 	/* default */
1412 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1413 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1414 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1415 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1416 
1417 	/* low,mid sh/mh */
1418 	if (rdev->flags & RADEON_IS_MOBILITY)
1419 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1420 	else
1421 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1422 
1423 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1424 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1425 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1426 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1427 
1428 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1429 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1430 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1431 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1432 
1433 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1434 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1435 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1436 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1437 
1438 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1439 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1440 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1441 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1442 
1443 	/* high sh/mh */
1444 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1445 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1446 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1447 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1448 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1449 		rdev->pm.power_state[idx].num_clock_modes - 1;
1450 
1451 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1452 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1453 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1454 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1455 		rdev->pm.power_state[idx].num_clock_modes - 1;
1456 }
1457 
1458 /**
1459  * btc_pm_init_profile - Initialize power profiles callback.
1460  *
1461  * @rdev: radeon_device pointer
1462  *
1463  * Initialize the power states used in profile mode
1464  * (BTC, cayman).
1465  * Used for profile mode only.
1466  */
1467 void btc_pm_init_profile(struct radeon_device *rdev)
1468 {
1469 	int idx;
1470 
1471 	/* default */
1472 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1473 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1474 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1475 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1476 	/* starting with BTC, there is one state that is used for both
1477 	 * MH and SH.  Difference is that we always use the high clock index for
1478 	 * mclk.
1479 	 */
1480 	if (rdev->flags & RADEON_IS_MOBILITY)
1481 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1482 	else
1483 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1484 	/* low sh */
1485 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1486 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1487 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1488 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1489 	/* mid sh */
1490 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1491 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1492 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1493 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1494 	/* high sh */
1495 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1496 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1497 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1498 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1499 	/* low mh */
1500 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1501 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1502 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1503 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1504 	/* mid mh */
1505 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1506 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1507 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1508 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1509 	/* high mh */
1510 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1511 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1512 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1513 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1514 }
1515 
1516 /**
1517  * evergreen_pm_misc - set additional pm hw parameters callback.
1518  *
1519  * @rdev: radeon_device pointer
1520  *
1521  * Set non-clock parameters associated with a power state
1522  * (voltage, etc.) (evergreen+).
1523  */
1524 void evergreen_pm_misc(struct radeon_device *rdev)
1525 {
1526 	int req_ps_idx = rdev->pm.requested_power_state_index;
1527 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1528 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1529 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1530 
1531 	if (voltage->type == VOLTAGE_SW) {
1532 		/* 0xff0x are flags rather then an actual voltage */
1533 		if ((voltage->voltage & 0xff00) == 0xff00)
1534 			return;
1535 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1536 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1537 			rdev->pm.current_vddc = voltage->voltage;
1538 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1539 		}
1540 
1541 		/* starting with BTC, there is one state that is used for both
1542 		 * MH and SH.  Difference is that we always use the high clock index for
1543 		 * mclk and vddci.
1544 		 */
1545 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1546 		    (rdev->family >= CHIP_BARTS) &&
1547 		    rdev->pm.active_crtc_count &&
1548 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1549 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1550 			voltage = &rdev->pm.power_state[req_ps_idx].
1551 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1552 
1553 		/* 0xff0x are flags rather then an actual voltage */
1554 		if ((voltage->vddci & 0xff00) == 0xff00)
1555 			return;
1556 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1557 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1558 			rdev->pm.current_vddci = voltage->vddci;
1559 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1560 		}
1561 	}
1562 }
1563 
1564 /**
1565  * evergreen_pm_prepare - pre-power state change callback.
1566  *
1567  * @rdev: radeon_device pointer
1568  *
1569  * Prepare for a power state change (evergreen+).
1570  */
1571 void evergreen_pm_prepare(struct radeon_device *rdev)
1572 {
1573 	struct drm_device *ddev = rdev->ddev;
1574 	struct drm_crtc *crtc;
1575 	struct radeon_crtc *radeon_crtc;
1576 	u32 tmp;
1577 
1578 	/* disable any active CRTCs */
1579 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1580 		radeon_crtc = to_radeon_crtc(crtc);
1581 		if (radeon_crtc->enabled) {
1582 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1583 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1584 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1585 		}
1586 	}
1587 }
1588 
1589 /**
1590  * evergreen_pm_finish - post-power state change callback.
1591  *
1592  * @rdev: radeon_device pointer
1593  *
1594  * Clean up after a power state change (evergreen+).
1595  */
1596 void evergreen_pm_finish(struct radeon_device *rdev)
1597 {
1598 	struct drm_device *ddev = rdev->ddev;
1599 	struct drm_crtc *crtc;
1600 	struct radeon_crtc *radeon_crtc;
1601 	u32 tmp;
1602 
1603 	/* enable any active CRTCs */
1604 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1605 		radeon_crtc = to_radeon_crtc(crtc);
1606 		if (radeon_crtc->enabled) {
1607 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1608 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1609 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1610 		}
1611 	}
1612 }
1613 
1614 /**
1615  * evergreen_hpd_sense - hpd sense callback.
1616  *
1617  * @rdev: radeon_device pointer
1618  * @hpd: hpd (hotplug detect) pin
1619  *
1620  * Checks if a digital monitor is connected (evergreen+).
1621  * Returns true if connected, false if not connected.
1622  */
1623 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1624 {
1625 	bool connected = false;
1626 
1627 	switch (hpd) {
1628 	case RADEON_HPD_1:
1629 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1630 			connected = true;
1631 		break;
1632 	case RADEON_HPD_2:
1633 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1634 			connected = true;
1635 		break;
1636 	case RADEON_HPD_3:
1637 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1638 			connected = true;
1639 		break;
1640 	case RADEON_HPD_4:
1641 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1642 			connected = true;
1643 		break;
1644 	case RADEON_HPD_5:
1645 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1646 			connected = true;
1647 		break;
1648 	case RADEON_HPD_6:
1649 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1650 			connected = true;
1651 		break;
1652 	default:
1653 		break;
1654 	}
1655 
1656 	return connected;
1657 }
1658 
1659 /**
1660  * evergreen_hpd_set_polarity - hpd set polarity callback.
1661  *
1662  * @rdev: radeon_device pointer
1663  * @hpd: hpd (hotplug detect) pin
1664  *
1665  * Set the polarity of the hpd pin (evergreen+).
1666  */
1667 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1668 				enum radeon_hpd_id hpd)
1669 {
1670 	u32 tmp;
1671 	bool connected = evergreen_hpd_sense(rdev, hpd);
1672 
1673 	switch (hpd) {
1674 	case RADEON_HPD_1:
1675 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1676 		if (connected)
1677 			tmp &= ~DC_HPDx_INT_POLARITY;
1678 		else
1679 			tmp |= DC_HPDx_INT_POLARITY;
1680 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1681 		break;
1682 	case RADEON_HPD_2:
1683 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1684 		if (connected)
1685 			tmp &= ~DC_HPDx_INT_POLARITY;
1686 		else
1687 			tmp |= DC_HPDx_INT_POLARITY;
1688 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1689 		break;
1690 	case RADEON_HPD_3:
1691 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1692 		if (connected)
1693 			tmp &= ~DC_HPDx_INT_POLARITY;
1694 		else
1695 			tmp |= DC_HPDx_INT_POLARITY;
1696 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1697 		break;
1698 	case RADEON_HPD_4:
1699 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1700 		if (connected)
1701 			tmp &= ~DC_HPDx_INT_POLARITY;
1702 		else
1703 			tmp |= DC_HPDx_INT_POLARITY;
1704 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1705 		break;
1706 	case RADEON_HPD_5:
1707 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1708 		if (connected)
1709 			tmp &= ~DC_HPDx_INT_POLARITY;
1710 		else
1711 			tmp |= DC_HPDx_INT_POLARITY;
1712 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1713 			break;
1714 	case RADEON_HPD_6:
1715 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1716 		if (connected)
1717 			tmp &= ~DC_HPDx_INT_POLARITY;
1718 		else
1719 			tmp |= DC_HPDx_INT_POLARITY;
1720 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1721 		break;
1722 	default:
1723 		break;
1724 	}
1725 }
1726 
1727 /**
1728  * evergreen_hpd_init - hpd setup callback.
1729  *
1730  * @rdev: radeon_device pointer
1731  *
1732  * Setup the hpd pins used by the card (evergreen+).
1733  * Enable the pin, set the polarity, and enable the hpd interrupts.
1734  */
1735 void evergreen_hpd_init(struct radeon_device *rdev)
1736 {
1737 	struct drm_device *dev = rdev->ddev;
1738 	struct drm_connector *connector;
1739 	unsigned enabled = 0;
1740 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1741 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1742 
1743 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1744 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1745 
1746 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1747 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1748 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1749 			 * aux dp channel on imac and help (but not completely fix)
1750 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1751 			 * also avoid interrupt storms during dpms.
1752 			 */
1753 			continue;
1754 		}
1755 		switch (radeon_connector->hpd.hpd) {
1756 		case RADEON_HPD_1:
1757 			WREG32(DC_HPD1_CONTROL, tmp);
1758 			break;
1759 		case RADEON_HPD_2:
1760 			WREG32(DC_HPD2_CONTROL, tmp);
1761 			break;
1762 		case RADEON_HPD_3:
1763 			WREG32(DC_HPD3_CONTROL, tmp);
1764 			break;
1765 		case RADEON_HPD_4:
1766 			WREG32(DC_HPD4_CONTROL, tmp);
1767 			break;
1768 		case RADEON_HPD_5:
1769 			WREG32(DC_HPD5_CONTROL, tmp);
1770 			break;
1771 		case RADEON_HPD_6:
1772 			WREG32(DC_HPD6_CONTROL, tmp);
1773 			break;
1774 		default:
1775 			break;
1776 		}
1777 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1778 		enabled |= 1 << radeon_connector->hpd.hpd;
1779 	}
1780 	radeon_irq_kms_enable_hpd(rdev, enabled);
1781 }
1782 
1783 /**
1784  * evergreen_hpd_fini - hpd tear down callback.
1785  *
1786  * @rdev: radeon_device pointer
1787  *
1788  * Tear down the hpd pins used by the card (evergreen+).
1789  * Disable the hpd interrupts.
1790  */
1791 void evergreen_hpd_fini(struct radeon_device *rdev)
1792 {
1793 	struct drm_device *dev = rdev->ddev;
1794 	struct drm_connector *connector;
1795 	unsigned disabled = 0;
1796 
1797 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1798 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1799 		switch (radeon_connector->hpd.hpd) {
1800 		case RADEON_HPD_1:
1801 			WREG32(DC_HPD1_CONTROL, 0);
1802 			break;
1803 		case RADEON_HPD_2:
1804 			WREG32(DC_HPD2_CONTROL, 0);
1805 			break;
1806 		case RADEON_HPD_3:
1807 			WREG32(DC_HPD3_CONTROL, 0);
1808 			break;
1809 		case RADEON_HPD_4:
1810 			WREG32(DC_HPD4_CONTROL, 0);
1811 			break;
1812 		case RADEON_HPD_5:
1813 			WREG32(DC_HPD5_CONTROL, 0);
1814 			break;
1815 		case RADEON_HPD_6:
1816 			WREG32(DC_HPD6_CONTROL, 0);
1817 			break;
1818 		default:
1819 			break;
1820 		}
1821 		disabled |= 1 << radeon_connector->hpd.hpd;
1822 	}
1823 	radeon_irq_kms_disable_hpd(rdev, disabled);
1824 }
1825 
1826 /* watermark setup */
1827 
1828 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1829 					struct radeon_crtc *radeon_crtc,
1830 					struct drm_display_mode *mode,
1831 					struct drm_display_mode *other_mode)
1832 {
1833 	u32 tmp, buffer_alloc, i;
1834 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1835 	/*
1836 	 * Line Buffer Setup
1837 	 * There are 3 line buffers, each one shared by 2 display controllers.
1838 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1839 	 * the display controllers.  The paritioning is done via one of four
1840 	 * preset allocations specified in bits 2:0:
1841 	 * first display controller
1842 	 *  0 - first half of lb (3840 * 2)
1843 	 *  1 - first 3/4 of lb (5760 * 2)
1844 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1845 	 *  3 - first 1/4 of lb (1920 * 2)
1846 	 * second display controller
1847 	 *  4 - second half of lb (3840 * 2)
1848 	 *  5 - second 3/4 of lb (5760 * 2)
1849 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1850 	 *  7 - last 1/4 of lb (1920 * 2)
1851 	 */
1852 	/* this can get tricky if we have two large displays on a paired group
1853 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1854 	 * non-linked crtcs for maximum line buffer allocation.
1855 	 */
1856 	if (radeon_crtc->base.enabled && mode) {
1857 		if (other_mode) {
1858 			tmp = 0; /* 1/2 */
1859 			buffer_alloc = 1;
1860 		} else {
1861 			tmp = 2; /* whole */
1862 			buffer_alloc = 2;
1863 		}
1864 	} else {
1865 		tmp = 0;
1866 		buffer_alloc = 0;
1867 	}
1868 
1869 	/* second controller of the pair uses second half of the lb */
1870 	if (radeon_crtc->crtc_id % 2)
1871 		tmp += 4;
1872 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1873 
1874 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1875 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1876 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1877 		for (i = 0; i < rdev->usec_timeout; i++) {
1878 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1879 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1880 				break;
1881 			udelay(1);
1882 		}
1883 	}
1884 
1885 	if (radeon_crtc->base.enabled && mode) {
1886 		switch (tmp) {
1887 		case 0:
1888 		case 4:
1889 		default:
1890 			if (ASIC_IS_DCE5(rdev))
1891 				return 4096 * 2;
1892 			else
1893 				return 3840 * 2;
1894 		case 1:
1895 		case 5:
1896 			if (ASIC_IS_DCE5(rdev))
1897 				return 6144 * 2;
1898 			else
1899 				return 5760 * 2;
1900 		case 2:
1901 		case 6:
1902 			if (ASIC_IS_DCE5(rdev))
1903 				return 8192 * 2;
1904 			else
1905 				return 7680 * 2;
1906 		case 3:
1907 		case 7:
1908 			if (ASIC_IS_DCE5(rdev))
1909 				return 2048 * 2;
1910 			else
1911 				return 1920 * 2;
1912 		}
1913 	}
1914 
1915 	/* controller not enabled, so no lb used */
1916 	return 0;
1917 }
1918 
1919 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1920 {
1921 	u32 tmp = RREG32(MC_SHARED_CHMAP);
1922 
1923 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1924 	case 0:
1925 	default:
1926 		return 1;
1927 	case 1:
1928 		return 2;
1929 	case 2:
1930 		return 4;
1931 	case 3:
1932 		return 8;
1933 	}
1934 }
1935 
1936 struct evergreen_wm_params {
1937 	u32 dram_channels; /* number of dram channels */
1938 	u32 yclk;          /* bandwidth per dram data pin in kHz */
1939 	u32 sclk;          /* engine clock in kHz */
1940 	u32 disp_clk;      /* display clock in kHz */
1941 	u32 src_width;     /* viewport width */
1942 	u32 active_time;   /* active display time in ns */
1943 	u32 blank_time;    /* blank time in ns */
1944 	bool interlaced;    /* mode is interlaced */
1945 	fixed20_12 vsc;    /* vertical scale ratio */
1946 	u32 num_heads;     /* number of active crtcs */
1947 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1948 	u32 lb_size;       /* line buffer allocated to pipe */
1949 	u32 vtaps;         /* vertical scaler taps */
1950 };
1951 
1952 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1953 {
1954 	/* Calculate DRAM Bandwidth and the part allocated to display. */
1955 	fixed20_12 dram_efficiency; /* 0.7 */
1956 	fixed20_12 yclk, dram_channels, bandwidth;
1957 	fixed20_12 a;
1958 
1959 	a.full = dfixed_const(1000);
1960 	yclk.full = dfixed_const(wm->yclk);
1961 	yclk.full = dfixed_div(yclk, a);
1962 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1963 	a.full = dfixed_const(10);
1964 	dram_efficiency.full = dfixed_const(7);
1965 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
1966 	bandwidth.full = dfixed_mul(dram_channels, yclk);
1967 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1968 
1969 	return dfixed_trunc(bandwidth);
1970 }
1971 
1972 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1973 {
1974 	/* Calculate DRAM Bandwidth and the part allocated to display. */
1975 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1976 	fixed20_12 yclk, dram_channels, bandwidth;
1977 	fixed20_12 a;
1978 
1979 	a.full = dfixed_const(1000);
1980 	yclk.full = dfixed_const(wm->yclk);
1981 	yclk.full = dfixed_div(yclk, a);
1982 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1983 	a.full = dfixed_const(10);
1984 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1985 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1986 	bandwidth.full = dfixed_mul(dram_channels, yclk);
1987 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1988 
1989 	return dfixed_trunc(bandwidth);
1990 }
1991 
1992 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1993 {
1994 	/* Calculate the display Data return Bandwidth */
1995 	fixed20_12 return_efficiency; /* 0.8 */
1996 	fixed20_12 sclk, bandwidth;
1997 	fixed20_12 a;
1998 
1999 	a.full = dfixed_const(1000);
2000 	sclk.full = dfixed_const(wm->sclk);
2001 	sclk.full = dfixed_div(sclk, a);
2002 	a.full = dfixed_const(10);
2003 	return_efficiency.full = dfixed_const(8);
2004 	return_efficiency.full = dfixed_div(return_efficiency, a);
2005 	a.full = dfixed_const(32);
2006 	bandwidth.full = dfixed_mul(a, sclk);
2007 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2008 
2009 	return dfixed_trunc(bandwidth);
2010 }
2011 
2012 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2013 {
2014 	/* Calculate the DMIF Request Bandwidth */
2015 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2016 	fixed20_12 disp_clk, bandwidth;
2017 	fixed20_12 a;
2018 
2019 	a.full = dfixed_const(1000);
2020 	disp_clk.full = dfixed_const(wm->disp_clk);
2021 	disp_clk.full = dfixed_div(disp_clk, a);
2022 	a.full = dfixed_const(10);
2023 	disp_clk_request_efficiency.full = dfixed_const(8);
2024 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2025 	a.full = dfixed_const(32);
2026 	bandwidth.full = dfixed_mul(a, disp_clk);
2027 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2028 
2029 	return dfixed_trunc(bandwidth);
2030 }
2031 
2032 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2033 {
2034 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2035 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2036 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2037 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2038 
2039 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2040 }
2041 
2042 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2043 {
2044 	/* Calculate the display mode Average Bandwidth
2045 	 * DisplayMode should contain the source and destination dimensions,
2046 	 * timing, etc.
2047 	 */
2048 	fixed20_12 bpp;
2049 	fixed20_12 line_time;
2050 	fixed20_12 src_width;
2051 	fixed20_12 bandwidth;
2052 	fixed20_12 a;
2053 
2054 	a.full = dfixed_const(1000);
2055 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2056 	line_time.full = dfixed_div(line_time, a);
2057 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2058 	src_width.full = dfixed_const(wm->src_width);
2059 	bandwidth.full = dfixed_mul(src_width, bpp);
2060 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2061 	bandwidth.full = dfixed_div(bandwidth, line_time);
2062 
2063 	return dfixed_trunc(bandwidth);
2064 }
2065 
2066 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2067 {
2068 	/* First calcualte the latency in ns */
2069 	u32 mc_latency = 2000; /* 2000 ns. */
2070 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2071 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2072 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2073 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2074 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2075 		(wm->num_heads * cursor_line_pair_return_time);
2076 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2077 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2078 	fixed20_12 a, b, c;
2079 
2080 	if (wm->num_heads == 0)
2081 		return 0;
2082 
2083 	a.full = dfixed_const(2);
2084 	b.full = dfixed_const(1);
2085 	if ((wm->vsc.full > a.full) ||
2086 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2087 	    (wm->vtaps >= 5) ||
2088 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2089 		max_src_lines_per_dst_line = 4;
2090 	else
2091 		max_src_lines_per_dst_line = 2;
2092 
2093 	a.full = dfixed_const(available_bandwidth);
2094 	b.full = dfixed_const(wm->num_heads);
2095 	a.full = dfixed_div(a, b);
2096 
2097 	b.full = dfixed_const(1000);
2098 	c.full = dfixed_const(wm->disp_clk);
2099 	b.full = dfixed_div(c, b);
2100 	c.full = dfixed_const(wm->bytes_per_pixel);
2101 	b.full = dfixed_mul(b, c);
2102 
2103 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2104 
2105 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2106 	b.full = dfixed_const(1000);
2107 	c.full = dfixed_const(lb_fill_bw);
2108 	b.full = dfixed_div(c, b);
2109 	a.full = dfixed_div(a, b);
2110 	line_fill_time = dfixed_trunc(a);
2111 
2112 	if (line_fill_time < wm->active_time)
2113 		return latency;
2114 	else
2115 		return latency + (line_fill_time - wm->active_time);
2116 
2117 }
2118 
2119 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2120 {
2121 	if (evergreen_average_bandwidth(wm) <=
2122 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2123 		return true;
2124 	else
2125 		return false;
2126 };
2127 
2128 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2129 {
2130 	if (evergreen_average_bandwidth(wm) <=
2131 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2132 		return true;
2133 	else
2134 		return false;
2135 };
2136 
2137 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2138 {
2139 	u32 lb_partitions = wm->lb_size / wm->src_width;
2140 	u32 line_time = wm->active_time + wm->blank_time;
2141 	u32 latency_tolerant_lines;
2142 	u32 latency_hiding;
2143 	fixed20_12 a;
2144 
2145 	a.full = dfixed_const(1);
2146 	if (wm->vsc.full > a.full)
2147 		latency_tolerant_lines = 1;
2148 	else {
2149 		if (lb_partitions <= (wm->vtaps + 1))
2150 			latency_tolerant_lines = 1;
2151 		else
2152 			latency_tolerant_lines = 2;
2153 	}
2154 
2155 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2156 
2157 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2158 		return true;
2159 	else
2160 		return false;
2161 }
2162 
2163 static void evergreen_program_watermarks(struct radeon_device *rdev,
2164 					 struct radeon_crtc *radeon_crtc,
2165 					 u32 lb_size, u32 num_heads)
2166 {
2167 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2168 	struct evergreen_wm_params wm_low, wm_high;
2169 	u32 dram_channels;
2170 	u32 pixel_period;
2171 	u32 line_time = 0;
2172 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2173 	u32 priority_a_mark = 0, priority_b_mark = 0;
2174 	u32 priority_a_cnt = PRIORITY_OFF;
2175 	u32 priority_b_cnt = PRIORITY_OFF;
2176 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2177 	u32 tmp, arb_control3;
2178 	fixed20_12 a, b, c;
2179 
2180 	if (radeon_crtc->base.enabled && num_heads && mode) {
2181 		pixel_period = 1000000 / (u32)mode->clock;
2182 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2183 		priority_a_cnt = 0;
2184 		priority_b_cnt = 0;
2185 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2186 
2187 		/* watermark for high clocks */
2188 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2189 			wm_high.yclk =
2190 				radeon_dpm_get_mclk(rdev, false) * 10;
2191 			wm_high.sclk =
2192 				radeon_dpm_get_sclk(rdev, false) * 10;
2193 		} else {
2194 			wm_high.yclk = rdev->pm.current_mclk * 10;
2195 			wm_high.sclk = rdev->pm.current_sclk * 10;
2196 		}
2197 
2198 		wm_high.disp_clk = mode->clock;
2199 		wm_high.src_width = mode->crtc_hdisplay;
2200 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2201 		wm_high.blank_time = line_time - wm_high.active_time;
2202 		wm_high.interlaced = false;
2203 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2204 			wm_high.interlaced = true;
2205 		wm_high.vsc = radeon_crtc->vsc;
2206 		wm_high.vtaps = 1;
2207 		if (radeon_crtc->rmx_type != RMX_OFF)
2208 			wm_high.vtaps = 2;
2209 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2210 		wm_high.lb_size = lb_size;
2211 		wm_high.dram_channels = dram_channels;
2212 		wm_high.num_heads = num_heads;
2213 
2214 		/* watermark for low clocks */
2215 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2216 			wm_low.yclk =
2217 				radeon_dpm_get_mclk(rdev, true) * 10;
2218 			wm_low.sclk =
2219 				radeon_dpm_get_sclk(rdev, true) * 10;
2220 		} else {
2221 			wm_low.yclk = rdev->pm.current_mclk * 10;
2222 			wm_low.sclk = rdev->pm.current_sclk * 10;
2223 		}
2224 
2225 		wm_low.disp_clk = mode->clock;
2226 		wm_low.src_width = mode->crtc_hdisplay;
2227 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2228 		wm_low.blank_time = line_time - wm_low.active_time;
2229 		wm_low.interlaced = false;
2230 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2231 			wm_low.interlaced = true;
2232 		wm_low.vsc = radeon_crtc->vsc;
2233 		wm_low.vtaps = 1;
2234 		if (radeon_crtc->rmx_type != RMX_OFF)
2235 			wm_low.vtaps = 2;
2236 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2237 		wm_low.lb_size = lb_size;
2238 		wm_low.dram_channels = dram_channels;
2239 		wm_low.num_heads = num_heads;
2240 
2241 		/* set for high clocks */
2242 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2243 		/* set for low clocks */
2244 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2245 
2246 		/* possibly force display priority to high */
2247 		/* should really do this at mode validation time... */
2248 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2249 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2250 		    !evergreen_check_latency_hiding(&wm_high) ||
2251 		    (rdev->disp_priority == 2)) {
2252 			DRM_DEBUG_KMS("force priority a to high\n");
2253 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2254 		}
2255 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2256 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2257 		    !evergreen_check_latency_hiding(&wm_low) ||
2258 		    (rdev->disp_priority == 2)) {
2259 			DRM_DEBUG_KMS("force priority b to high\n");
2260 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2261 		}
2262 
2263 		a.full = dfixed_const(1000);
2264 		b.full = dfixed_const(mode->clock);
2265 		b.full = dfixed_div(b, a);
2266 		c.full = dfixed_const(latency_watermark_a);
2267 		c.full = dfixed_mul(c, b);
2268 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2269 		c.full = dfixed_div(c, a);
2270 		a.full = dfixed_const(16);
2271 		c.full = dfixed_div(c, a);
2272 		priority_a_mark = dfixed_trunc(c);
2273 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2274 
2275 		a.full = dfixed_const(1000);
2276 		b.full = dfixed_const(mode->clock);
2277 		b.full = dfixed_div(b, a);
2278 		c.full = dfixed_const(latency_watermark_b);
2279 		c.full = dfixed_mul(c, b);
2280 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2281 		c.full = dfixed_div(c, a);
2282 		a.full = dfixed_const(16);
2283 		c.full = dfixed_div(c, a);
2284 		priority_b_mark = dfixed_trunc(c);
2285 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2286 	}
2287 
2288 	/* select wm A */
2289 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2290 	tmp = arb_control3;
2291 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2292 	tmp |= LATENCY_WATERMARK_MASK(1);
2293 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2294 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2295 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2296 		LATENCY_HIGH_WATERMARK(line_time)));
2297 	/* select wm B */
2298 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2299 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2300 	tmp |= LATENCY_WATERMARK_MASK(2);
2301 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2302 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2303 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2304 		LATENCY_HIGH_WATERMARK(line_time)));
2305 	/* restore original selection */
2306 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2307 
2308 	/* write the priority marks */
2309 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2310 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2311 
2312 	/* save values for DPM */
2313 	radeon_crtc->line_time = line_time;
2314 	radeon_crtc->wm_high = latency_watermark_a;
2315 	radeon_crtc->wm_low = latency_watermark_b;
2316 }
2317 
2318 /**
2319  * evergreen_bandwidth_update - update display watermarks callback.
2320  *
2321  * @rdev: radeon_device pointer
2322  *
2323  * Update the display watermarks based on the requested mode(s)
2324  * (evergreen+).
2325  */
2326 void evergreen_bandwidth_update(struct radeon_device *rdev)
2327 {
2328 	struct drm_display_mode *mode0 = NULL;
2329 	struct drm_display_mode *mode1 = NULL;
2330 	u32 num_heads = 0, lb_size;
2331 	int i;
2332 
2333 	if (!rdev->mode_info.mode_config_initialized)
2334 		return;
2335 
2336 	radeon_update_display_priority(rdev);
2337 
2338 	for (i = 0; i < rdev->num_crtc; i++) {
2339 		if (rdev->mode_info.crtcs[i]->base.enabled)
2340 			num_heads++;
2341 	}
2342 	for (i = 0; i < rdev->num_crtc; i += 2) {
2343 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2344 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2345 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2346 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2347 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2348 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2349 	}
2350 }
2351 
2352 /**
2353  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2354  *
2355  * @rdev: radeon_device pointer
2356  *
2357  * Wait for the MC (memory controller) to be idle.
2358  * (evergreen+).
2359  * Returns 0 if the MC is idle, -1 if not.
2360  */
2361 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2362 {
2363 	unsigned i;
2364 	u32 tmp;
2365 
2366 	for (i = 0; i < rdev->usec_timeout; i++) {
2367 		/* read MC_STATUS */
2368 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2369 		if (!tmp)
2370 			return 0;
2371 		udelay(1);
2372 	}
2373 	return -1;
2374 }
2375 
2376 /*
2377  * GART
2378  */
2379 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2380 {
2381 	unsigned i;
2382 	u32 tmp;
2383 
2384 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2385 
2386 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2387 	for (i = 0; i < rdev->usec_timeout; i++) {
2388 		/* read MC_STATUS */
2389 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2390 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2391 		if (tmp == 2) {
2392 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2393 			return;
2394 		}
2395 		if (tmp) {
2396 			return;
2397 		}
2398 		udelay(1);
2399 	}
2400 }
2401 
2402 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2403 {
2404 	u32 tmp;
2405 	int r;
2406 
2407 	if (rdev->gart.robj == NULL) {
2408 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2409 		return -EINVAL;
2410 	}
2411 	r = radeon_gart_table_vram_pin(rdev);
2412 	if (r)
2413 		return r;
2414 	/* Setup L2 cache */
2415 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2416 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2417 				EFFECTIVE_L2_QUEUE_SIZE(7));
2418 	WREG32(VM_L2_CNTL2, 0);
2419 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2420 	/* Setup TLB control */
2421 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2422 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2423 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2424 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2425 	if (rdev->flags & RADEON_IS_IGP) {
2426 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2427 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2428 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2429 	} else {
2430 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2431 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2432 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2433 		if ((rdev->family == CHIP_JUNIPER) ||
2434 		    (rdev->family == CHIP_CYPRESS) ||
2435 		    (rdev->family == CHIP_HEMLOCK) ||
2436 		    (rdev->family == CHIP_BARTS))
2437 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2438 	}
2439 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2440 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2441 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2442 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2443 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2444 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2445 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2446 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2447 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2448 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2449 			(u32)(rdev->dummy_page.addr >> 12));
2450 	WREG32(VM_CONTEXT1_CNTL, 0);
2451 
2452 	evergreen_pcie_gart_tlb_flush(rdev);
2453 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2454 		 (unsigned)(rdev->mc.gtt_size >> 20),
2455 		 (unsigned long long)rdev->gart.table_addr);
2456 	rdev->gart.ready = true;
2457 	return 0;
2458 }
2459 
2460 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2461 {
2462 	u32 tmp;
2463 
2464 	/* Disable all tables */
2465 	WREG32(VM_CONTEXT0_CNTL, 0);
2466 	WREG32(VM_CONTEXT1_CNTL, 0);
2467 
2468 	/* Setup L2 cache */
2469 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2470 				EFFECTIVE_L2_QUEUE_SIZE(7));
2471 	WREG32(VM_L2_CNTL2, 0);
2472 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2473 	/* Setup TLB control */
2474 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2475 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2476 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2477 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2478 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2479 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2480 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2481 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2482 	radeon_gart_table_vram_unpin(rdev);
2483 }
2484 
2485 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2486 {
2487 	evergreen_pcie_gart_disable(rdev);
2488 	radeon_gart_table_vram_free(rdev);
2489 	radeon_gart_fini(rdev);
2490 }
2491 
2492 
2493 static void evergreen_agp_enable(struct radeon_device *rdev)
2494 {
2495 	u32 tmp;
2496 
2497 	/* Setup L2 cache */
2498 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2499 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2500 				EFFECTIVE_L2_QUEUE_SIZE(7));
2501 	WREG32(VM_L2_CNTL2, 0);
2502 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2503 	/* Setup TLB control */
2504 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2505 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2506 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2507 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2508 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2509 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2510 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2511 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2512 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2513 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2514 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2515 	WREG32(VM_CONTEXT0_CNTL, 0);
2516 	WREG32(VM_CONTEXT1_CNTL, 0);
2517 }
2518 
2519 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2520 {
2521 	u32 crtc_enabled, tmp, frame_count, blackout;
2522 	int i, j;
2523 
2524 	if (!ASIC_IS_NODCE(rdev)) {
2525 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2526 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2527 
2528 		/* disable VGA render */
2529 		WREG32(VGA_RENDER_CONTROL, 0);
2530 	}
2531 	/* blank the display controllers */
2532 	for (i = 0; i < rdev->num_crtc; i++) {
2533 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2534 		if (crtc_enabled) {
2535 			save->crtc_enabled[i] = true;
2536 			if (ASIC_IS_DCE6(rdev)) {
2537 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2538 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2539 					radeon_wait_for_vblank(rdev, i);
2540 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2541 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2542 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2543 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2544 				}
2545 			} else {
2546 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2547 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2548 					radeon_wait_for_vblank(rdev, i);
2549 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2550 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2551 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2552 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2553 				}
2554 			}
2555 			/* wait for the next frame */
2556 			frame_count = radeon_get_vblank_counter(rdev, i);
2557 			for (j = 0; j < rdev->usec_timeout; j++) {
2558 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2559 					break;
2560 				udelay(1);
2561 			}
2562 
2563 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2564 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2565 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2566 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2567 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2568 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2569 			save->crtc_enabled[i] = false;
2570 			/* ***** */
2571 		} else {
2572 			save->crtc_enabled[i] = false;
2573 		}
2574 	}
2575 
2576 	radeon_mc_wait_for_idle(rdev);
2577 
2578 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2579 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2580 		/* Block CPU access */
2581 		WREG32(BIF_FB_EN, 0);
2582 		/* blackout the MC */
2583 		blackout &= ~BLACKOUT_MODE_MASK;
2584 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2585 	}
2586 	/* wait for the MC to settle */
2587 	udelay(100);
2588 
2589 	/* lock double buffered regs */
2590 	for (i = 0; i < rdev->num_crtc; i++) {
2591 		if (save->crtc_enabled[i]) {
2592 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2593 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2594 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2595 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2596 			}
2597 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2598 			if (!(tmp & 1)) {
2599 				tmp |= 1;
2600 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2601 			}
2602 		}
2603 	}
2604 }
2605 
2606 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2607 {
2608 	u32 tmp, frame_count;
2609 	int i, j;
2610 
2611 	/* update crtc base addresses */
2612 	for (i = 0; i < rdev->num_crtc; i++) {
2613 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2614 		       upper_32_bits(rdev->mc.vram_start));
2615 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2616 		       upper_32_bits(rdev->mc.vram_start));
2617 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2618 		       (u32)rdev->mc.vram_start);
2619 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2620 		       (u32)rdev->mc.vram_start);
2621 	}
2622 
2623 	if (!ASIC_IS_NODCE(rdev)) {
2624 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2625 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2626 	}
2627 
2628 	/* unlock regs and wait for update */
2629 	for (i = 0; i < rdev->num_crtc; i++) {
2630 		if (save->crtc_enabled[i]) {
2631 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2632 			if ((tmp & 0x7) != 3) {
2633 				tmp &= ~0x7;
2634 				tmp |= 0x3;
2635 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2636 			}
2637 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2638 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2639 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2640 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2641 			}
2642 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2643 			if (tmp & 1) {
2644 				tmp &= ~1;
2645 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2646 			}
2647 			for (j = 0; j < rdev->usec_timeout; j++) {
2648 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2649 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2650 					break;
2651 				udelay(1);
2652 			}
2653 		}
2654 	}
2655 
2656 	/* unblackout the MC */
2657 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2658 	tmp &= ~BLACKOUT_MODE_MASK;
2659 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2660 	/* allow CPU access */
2661 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2662 
2663 	for (i = 0; i < rdev->num_crtc; i++) {
2664 		if (save->crtc_enabled[i]) {
2665 			if (ASIC_IS_DCE6(rdev)) {
2666 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2667 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2668 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2669 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2670 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2671 			} else {
2672 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2673 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2674 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2675 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2676 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2677 			}
2678 			/* wait for the next frame */
2679 			frame_count = radeon_get_vblank_counter(rdev, i);
2680 			for (j = 0; j < rdev->usec_timeout; j++) {
2681 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2682 					break;
2683 				udelay(1);
2684 			}
2685 		}
2686 	}
2687 	if (!ASIC_IS_NODCE(rdev)) {
2688 		/* Unlock vga access */
2689 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2690 		mdelay(1);
2691 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2692 	}
2693 }
2694 
2695 void evergreen_mc_program(struct radeon_device *rdev)
2696 {
2697 	struct evergreen_mc_save save;
2698 	u32 tmp;
2699 	int i, j;
2700 
2701 	/* Initialize HDP */
2702 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2703 		WREG32((0x2c14 + j), 0x00000000);
2704 		WREG32((0x2c18 + j), 0x00000000);
2705 		WREG32((0x2c1c + j), 0x00000000);
2706 		WREG32((0x2c20 + j), 0x00000000);
2707 		WREG32((0x2c24 + j), 0x00000000);
2708 	}
2709 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2710 
2711 	evergreen_mc_stop(rdev, &save);
2712 	if (evergreen_mc_wait_for_idle(rdev)) {
2713 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2714 	}
2715 	/* Lockout access through VGA aperture*/
2716 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2717 	/* Update configuration */
2718 	if (rdev->flags & RADEON_IS_AGP) {
2719 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2720 			/* VRAM before AGP */
2721 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2722 				rdev->mc.vram_start >> 12);
2723 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2724 				rdev->mc.gtt_end >> 12);
2725 		} else {
2726 			/* VRAM after AGP */
2727 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2728 				rdev->mc.gtt_start >> 12);
2729 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2730 				rdev->mc.vram_end >> 12);
2731 		}
2732 	} else {
2733 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2734 			rdev->mc.vram_start >> 12);
2735 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2736 			rdev->mc.vram_end >> 12);
2737 	}
2738 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2739 	/* llano/ontario only */
2740 	if ((rdev->family == CHIP_PALM) ||
2741 	    (rdev->family == CHIP_SUMO) ||
2742 	    (rdev->family == CHIP_SUMO2)) {
2743 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2744 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2745 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2746 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2747 	}
2748 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2749 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2750 	WREG32(MC_VM_FB_LOCATION, tmp);
2751 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2752 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2753 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2754 	if (rdev->flags & RADEON_IS_AGP) {
2755 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2756 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2757 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2758 	} else {
2759 		WREG32(MC_VM_AGP_BASE, 0);
2760 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2761 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2762 	}
2763 	if (evergreen_mc_wait_for_idle(rdev)) {
2764 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2765 	}
2766 	evergreen_mc_resume(rdev, &save);
2767 	/* we need to own VRAM, so turn off the VGA renderer here
2768 	 * to stop it overwriting our objects */
2769 	rv515_vga_render_disable(rdev);
2770 }
2771 
2772 /*
2773  * CP.
2774  */
2775 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2776 {
2777 	struct radeon_ring *ring = &rdev->ring[ib->ring];
2778 	u32 next_rptr;
2779 
2780 	/* set to DX10/11 mode */
2781 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2782 	radeon_ring_write(ring, 1);
2783 
2784 	if (ring->rptr_save_reg) {
2785 		next_rptr = ring->wptr + 3 + 4;
2786 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2787 		radeon_ring_write(ring, ((ring->rptr_save_reg -
2788 					  PACKET3_SET_CONFIG_REG_START) >> 2));
2789 		radeon_ring_write(ring, next_rptr);
2790 	} else if (rdev->wb.enabled) {
2791 		next_rptr = ring->wptr + 5 + 4;
2792 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2793 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2794 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2795 		radeon_ring_write(ring, next_rptr);
2796 		radeon_ring_write(ring, 0);
2797 	}
2798 
2799 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2800 	radeon_ring_write(ring,
2801 #ifdef __BIG_ENDIAN
2802 			  (2 << 0) |
2803 #endif
2804 			  (ib->gpu_addr & 0xFFFFFFFC));
2805 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2806 	radeon_ring_write(ring, ib->length_dw);
2807 }
2808 
2809 
2810 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2811 {
2812 	const __be32 *fw_data;
2813 	int i;
2814 
2815 	if (!rdev->me_fw || !rdev->pfp_fw)
2816 		return -EINVAL;
2817 
2818 	r700_cp_stop(rdev);
2819 	WREG32(CP_RB_CNTL,
2820 #ifdef __BIG_ENDIAN
2821 	       BUF_SWAP_32BIT |
2822 #endif
2823 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2824 
2825 	fw_data = (const __be32 *)rdev->pfp_fw->data;
2826 	WREG32(CP_PFP_UCODE_ADDR, 0);
2827 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2828 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2829 	WREG32(CP_PFP_UCODE_ADDR, 0);
2830 
2831 	fw_data = (const __be32 *)rdev->me_fw->data;
2832 	WREG32(CP_ME_RAM_WADDR, 0);
2833 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2834 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2835 
2836 	WREG32(CP_PFP_UCODE_ADDR, 0);
2837 	WREG32(CP_ME_RAM_WADDR, 0);
2838 	WREG32(CP_ME_RAM_RADDR, 0);
2839 	return 0;
2840 }
2841 
2842 static int evergreen_cp_start(struct radeon_device *rdev)
2843 {
2844 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2845 	int r, i;
2846 	uint32_t cp_me;
2847 
2848 	r = radeon_ring_lock(rdev, ring, 7);
2849 	if (r) {
2850 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2851 		return r;
2852 	}
2853 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2854 	radeon_ring_write(ring, 0x1);
2855 	radeon_ring_write(ring, 0x0);
2856 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2857 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2858 	radeon_ring_write(ring, 0);
2859 	radeon_ring_write(ring, 0);
2860 	radeon_ring_unlock_commit(rdev, ring, false);
2861 
2862 	cp_me = 0xff;
2863 	WREG32(CP_ME_CNTL, cp_me);
2864 
2865 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2866 	if (r) {
2867 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2868 		return r;
2869 	}
2870 
2871 	/* setup clear context state */
2872 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2873 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2874 
2875 	for (i = 0; i < evergreen_default_size; i++)
2876 		radeon_ring_write(ring, evergreen_default_state[i]);
2877 
2878 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2879 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2880 
2881 	/* set clear context state */
2882 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2883 	radeon_ring_write(ring, 0);
2884 
2885 	/* SQ_VTX_BASE_VTX_LOC */
2886 	radeon_ring_write(ring, 0xc0026f00);
2887 	radeon_ring_write(ring, 0x00000000);
2888 	radeon_ring_write(ring, 0x00000000);
2889 	radeon_ring_write(ring, 0x00000000);
2890 
2891 	/* Clear consts */
2892 	radeon_ring_write(ring, 0xc0036f00);
2893 	radeon_ring_write(ring, 0x00000bc4);
2894 	radeon_ring_write(ring, 0xffffffff);
2895 	radeon_ring_write(ring, 0xffffffff);
2896 	radeon_ring_write(ring, 0xffffffff);
2897 
2898 	radeon_ring_write(ring, 0xc0026900);
2899 	radeon_ring_write(ring, 0x00000316);
2900 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2901 	radeon_ring_write(ring, 0x00000010); /*  */
2902 
2903 	radeon_ring_unlock_commit(rdev, ring, false);
2904 
2905 	return 0;
2906 }
2907 
2908 static int evergreen_cp_resume(struct radeon_device *rdev)
2909 {
2910 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2911 	u32 tmp;
2912 	u32 rb_bufsz;
2913 	int r;
2914 
2915 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2916 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2917 				 SOFT_RESET_PA |
2918 				 SOFT_RESET_SH |
2919 				 SOFT_RESET_VGT |
2920 				 SOFT_RESET_SPI |
2921 				 SOFT_RESET_SX));
2922 	RREG32(GRBM_SOFT_RESET);
2923 	mdelay(15);
2924 	WREG32(GRBM_SOFT_RESET, 0);
2925 	RREG32(GRBM_SOFT_RESET);
2926 
2927 	/* Set ring buffer size */
2928 	rb_bufsz = order_base_2(ring->ring_size / 8);
2929 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2930 #ifdef __BIG_ENDIAN
2931 	tmp |= BUF_SWAP_32BIT;
2932 #endif
2933 	WREG32(CP_RB_CNTL, tmp);
2934 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
2935 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2936 
2937 	/* Set the write pointer delay */
2938 	WREG32(CP_RB_WPTR_DELAY, 0);
2939 
2940 	/* Initialize the ring buffer's read and write pointers */
2941 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2942 	WREG32(CP_RB_RPTR_WR, 0);
2943 	ring->wptr = 0;
2944 	WREG32(CP_RB_WPTR, ring->wptr);
2945 
2946 	/* set the wb address whether it's enabled or not */
2947 	WREG32(CP_RB_RPTR_ADDR,
2948 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2949 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2950 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2951 
2952 	if (rdev->wb.enabled)
2953 		WREG32(SCRATCH_UMSK, 0xff);
2954 	else {
2955 		tmp |= RB_NO_UPDATE;
2956 		WREG32(SCRATCH_UMSK, 0);
2957 	}
2958 
2959 	mdelay(1);
2960 	WREG32(CP_RB_CNTL, tmp);
2961 
2962 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2963 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2964 
2965 	evergreen_cp_start(rdev);
2966 	ring->ready = true;
2967 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2968 	if (r) {
2969 		ring->ready = false;
2970 		return r;
2971 	}
2972 	return 0;
2973 }
2974 
2975 /*
2976  * Core functions
2977  */
2978 static void evergreen_gpu_init(struct radeon_device *rdev)
2979 {
2980 	u32 gb_addr_config;
2981 	u32 mc_shared_chmap, mc_arb_ramcfg;
2982 	u32 sx_debug_1;
2983 	u32 smx_dc_ctl0;
2984 	u32 sq_config;
2985 	u32 sq_lds_resource_mgmt;
2986 	u32 sq_gpr_resource_mgmt_1;
2987 	u32 sq_gpr_resource_mgmt_2;
2988 	u32 sq_gpr_resource_mgmt_3;
2989 	u32 sq_thread_resource_mgmt;
2990 	u32 sq_thread_resource_mgmt_2;
2991 	u32 sq_stack_resource_mgmt_1;
2992 	u32 sq_stack_resource_mgmt_2;
2993 	u32 sq_stack_resource_mgmt_3;
2994 	u32 vgt_cache_invalidation;
2995 	u32 hdp_host_path_cntl, tmp;
2996 	u32 disabled_rb_mask;
2997 	int i, j, ps_thread_count;
2998 
2999 	switch (rdev->family) {
3000 	case CHIP_CYPRESS:
3001 	case CHIP_HEMLOCK:
3002 		rdev->config.evergreen.num_ses = 2;
3003 		rdev->config.evergreen.max_pipes = 4;
3004 		rdev->config.evergreen.max_tile_pipes = 8;
3005 		rdev->config.evergreen.max_simds = 10;
3006 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3007 		rdev->config.evergreen.max_gprs = 256;
3008 		rdev->config.evergreen.max_threads = 248;
3009 		rdev->config.evergreen.max_gs_threads = 32;
3010 		rdev->config.evergreen.max_stack_entries = 512;
3011 		rdev->config.evergreen.sx_num_of_sets = 4;
3012 		rdev->config.evergreen.sx_max_export_size = 256;
3013 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3014 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3015 		rdev->config.evergreen.max_hw_contexts = 8;
3016 		rdev->config.evergreen.sq_num_cf_insts = 2;
3017 
3018 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3019 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3020 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3021 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3022 		break;
3023 	case CHIP_JUNIPER:
3024 		rdev->config.evergreen.num_ses = 1;
3025 		rdev->config.evergreen.max_pipes = 4;
3026 		rdev->config.evergreen.max_tile_pipes = 4;
3027 		rdev->config.evergreen.max_simds = 10;
3028 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3029 		rdev->config.evergreen.max_gprs = 256;
3030 		rdev->config.evergreen.max_threads = 248;
3031 		rdev->config.evergreen.max_gs_threads = 32;
3032 		rdev->config.evergreen.max_stack_entries = 512;
3033 		rdev->config.evergreen.sx_num_of_sets = 4;
3034 		rdev->config.evergreen.sx_max_export_size = 256;
3035 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3036 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3037 		rdev->config.evergreen.max_hw_contexts = 8;
3038 		rdev->config.evergreen.sq_num_cf_insts = 2;
3039 
3040 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3041 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3042 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3043 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3044 		break;
3045 	case CHIP_REDWOOD:
3046 		rdev->config.evergreen.num_ses = 1;
3047 		rdev->config.evergreen.max_pipes = 4;
3048 		rdev->config.evergreen.max_tile_pipes = 4;
3049 		rdev->config.evergreen.max_simds = 5;
3050 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3051 		rdev->config.evergreen.max_gprs = 256;
3052 		rdev->config.evergreen.max_threads = 248;
3053 		rdev->config.evergreen.max_gs_threads = 32;
3054 		rdev->config.evergreen.max_stack_entries = 256;
3055 		rdev->config.evergreen.sx_num_of_sets = 4;
3056 		rdev->config.evergreen.sx_max_export_size = 256;
3057 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3058 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3059 		rdev->config.evergreen.max_hw_contexts = 8;
3060 		rdev->config.evergreen.sq_num_cf_insts = 2;
3061 
3062 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3063 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3064 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3065 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3066 		break;
3067 	case CHIP_CEDAR:
3068 	default:
3069 		rdev->config.evergreen.num_ses = 1;
3070 		rdev->config.evergreen.max_pipes = 2;
3071 		rdev->config.evergreen.max_tile_pipes = 2;
3072 		rdev->config.evergreen.max_simds = 2;
3073 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3074 		rdev->config.evergreen.max_gprs = 256;
3075 		rdev->config.evergreen.max_threads = 192;
3076 		rdev->config.evergreen.max_gs_threads = 16;
3077 		rdev->config.evergreen.max_stack_entries = 256;
3078 		rdev->config.evergreen.sx_num_of_sets = 4;
3079 		rdev->config.evergreen.sx_max_export_size = 128;
3080 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3081 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3082 		rdev->config.evergreen.max_hw_contexts = 4;
3083 		rdev->config.evergreen.sq_num_cf_insts = 1;
3084 
3085 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3086 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3087 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3088 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3089 		break;
3090 	case CHIP_PALM:
3091 		rdev->config.evergreen.num_ses = 1;
3092 		rdev->config.evergreen.max_pipes = 2;
3093 		rdev->config.evergreen.max_tile_pipes = 2;
3094 		rdev->config.evergreen.max_simds = 2;
3095 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3096 		rdev->config.evergreen.max_gprs = 256;
3097 		rdev->config.evergreen.max_threads = 192;
3098 		rdev->config.evergreen.max_gs_threads = 16;
3099 		rdev->config.evergreen.max_stack_entries = 256;
3100 		rdev->config.evergreen.sx_num_of_sets = 4;
3101 		rdev->config.evergreen.sx_max_export_size = 128;
3102 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3103 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3104 		rdev->config.evergreen.max_hw_contexts = 4;
3105 		rdev->config.evergreen.sq_num_cf_insts = 1;
3106 
3107 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3108 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3109 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3110 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3111 		break;
3112 	case CHIP_SUMO:
3113 		rdev->config.evergreen.num_ses = 1;
3114 		rdev->config.evergreen.max_pipes = 4;
3115 		rdev->config.evergreen.max_tile_pipes = 4;
3116 		if (rdev->pdev->device == 0x9648)
3117 			rdev->config.evergreen.max_simds = 3;
3118 		else if ((rdev->pdev->device == 0x9647) ||
3119 			 (rdev->pdev->device == 0x964a))
3120 			rdev->config.evergreen.max_simds = 4;
3121 		else
3122 			rdev->config.evergreen.max_simds = 5;
3123 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3124 		rdev->config.evergreen.max_gprs = 256;
3125 		rdev->config.evergreen.max_threads = 248;
3126 		rdev->config.evergreen.max_gs_threads = 32;
3127 		rdev->config.evergreen.max_stack_entries = 256;
3128 		rdev->config.evergreen.sx_num_of_sets = 4;
3129 		rdev->config.evergreen.sx_max_export_size = 256;
3130 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3131 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3132 		rdev->config.evergreen.max_hw_contexts = 8;
3133 		rdev->config.evergreen.sq_num_cf_insts = 2;
3134 
3135 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3136 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3137 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3138 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3139 		break;
3140 	case CHIP_SUMO2:
3141 		rdev->config.evergreen.num_ses = 1;
3142 		rdev->config.evergreen.max_pipes = 4;
3143 		rdev->config.evergreen.max_tile_pipes = 4;
3144 		rdev->config.evergreen.max_simds = 2;
3145 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3146 		rdev->config.evergreen.max_gprs = 256;
3147 		rdev->config.evergreen.max_threads = 248;
3148 		rdev->config.evergreen.max_gs_threads = 32;
3149 		rdev->config.evergreen.max_stack_entries = 512;
3150 		rdev->config.evergreen.sx_num_of_sets = 4;
3151 		rdev->config.evergreen.sx_max_export_size = 256;
3152 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3153 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3154 		rdev->config.evergreen.max_hw_contexts = 4;
3155 		rdev->config.evergreen.sq_num_cf_insts = 2;
3156 
3157 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3158 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3159 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3160 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3161 		break;
3162 	case CHIP_BARTS:
3163 		rdev->config.evergreen.num_ses = 2;
3164 		rdev->config.evergreen.max_pipes = 4;
3165 		rdev->config.evergreen.max_tile_pipes = 8;
3166 		rdev->config.evergreen.max_simds = 7;
3167 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3168 		rdev->config.evergreen.max_gprs = 256;
3169 		rdev->config.evergreen.max_threads = 248;
3170 		rdev->config.evergreen.max_gs_threads = 32;
3171 		rdev->config.evergreen.max_stack_entries = 512;
3172 		rdev->config.evergreen.sx_num_of_sets = 4;
3173 		rdev->config.evergreen.sx_max_export_size = 256;
3174 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3175 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3176 		rdev->config.evergreen.max_hw_contexts = 8;
3177 		rdev->config.evergreen.sq_num_cf_insts = 2;
3178 
3179 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3180 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3181 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3182 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3183 		break;
3184 	case CHIP_TURKS:
3185 		rdev->config.evergreen.num_ses = 1;
3186 		rdev->config.evergreen.max_pipes = 4;
3187 		rdev->config.evergreen.max_tile_pipes = 4;
3188 		rdev->config.evergreen.max_simds = 6;
3189 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3190 		rdev->config.evergreen.max_gprs = 256;
3191 		rdev->config.evergreen.max_threads = 248;
3192 		rdev->config.evergreen.max_gs_threads = 32;
3193 		rdev->config.evergreen.max_stack_entries = 256;
3194 		rdev->config.evergreen.sx_num_of_sets = 4;
3195 		rdev->config.evergreen.sx_max_export_size = 256;
3196 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3197 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3198 		rdev->config.evergreen.max_hw_contexts = 8;
3199 		rdev->config.evergreen.sq_num_cf_insts = 2;
3200 
3201 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3202 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3203 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3204 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3205 		break;
3206 	case CHIP_CAICOS:
3207 		rdev->config.evergreen.num_ses = 1;
3208 		rdev->config.evergreen.max_pipes = 2;
3209 		rdev->config.evergreen.max_tile_pipes = 2;
3210 		rdev->config.evergreen.max_simds = 2;
3211 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3212 		rdev->config.evergreen.max_gprs = 256;
3213 		rdev->config.evergreen.max_threads = 192;
3214 		rdev->config.evergreen.max_gs_threads = 16;
3215 		rdev->config.evergreen.max_stack_entries = 256;
3216 		rdev->config.evergreen.sx_num_of_sets = 4;
3217 		rdev->config.evergreen.sx_max_export_size = 128;
3218 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3219 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3220 		rdev->config.evergreen.max_hw_contexts = 4;
3221 		rdev->config.evergreen.sq_num_cf_insts = 1;
3222 
3223 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3224 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3225 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3226 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3227 		break;
3228 	}
3229 
3230 	/* Initialize HDP */
3231 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3232 		WREG32((0x2c14 + j), 0x00000000);
3233 		WREG32((0x2c18 + j), 0x00000000);
3234 		WREG32((0x2c1c + j), 0x00000000);
3235 		WREG32((0x2c20 + j), 0x00000000);
3236 		WREG32((0x2c24 + j), 0x00000000);
3237 	}
3238 
3239 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3240 
3241 	evergreen_fix_pci_max_read_req_size(rdev);
3242 
3243 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3244 	if ((rdev->family == CHIP_PALM) ||
3245 	    (rdev->family == CHIP_SUMO) ||
3246 	    (rdev->family == CHIP_SUMO2))
3247 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3248 	else
3249 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3250 
3251 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3252 	 * not have bank info, so create a custom tiling dword.
3253 	 * bits 3:0   num_pipes
3254 	 * bits 7:4   num_banks
3255 	 * bits 11:8  group_size
3256 	 * bits 15:12 row_size
3257 	 */
3258 	rdev->config.evergreen.tile_config = 0;
3259 	switch (rdev->config.evergreen.max_tile_pipes) {
3260 	case 1:
3261 	default:
3262 		rdev->config.evergreen.tile_config |= (0 << 0);
3263 		break;
3264 	case 2:
3265 		rdev->config.evergreen.tile_config |= (1 << 0);
3266 		break;
3267 	case 4:
3268 		rdev->config.evergreen.tile_config |= (2 << 0);
3269 		break;
3270 	case 8:
3271 		rdev->config.evergreen.tile_config |= (3 << 0);
3272 		break;
3273 	}
3274 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3275 	if (rdev->flags & RADEON_IS_IGP)
3276 		rdev->config.evergreen.tile_config |= 1 << 4;
3277 	else {
3278 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3279 		case 0: /* four banks */
3280 			rdev->config.evergreen.tile_config |= 0 << 4;
3281 			break;
3282 		case 1: /* eight banks */
3283 			rdev->config.evergreen.tile_config |= 1 << 4;
3284 			break;
3285 		case 2: /* sixteen banks */
3286 		default:
3287 			rdev->config.evergreen.tile_config |= 2 << 4;
3288 			break;
3289 		}
3290 	}
3291 	rdev->config.evergreen.tile_config |= 0 << 8;
3292 	rdev->config.evergreen.tile_config |=
3293 		((gb_addr_config & 0x30000000) >> 28) << 12;
3294 
3295 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3296 		u32 efuse_straps_4;
3297 		u32 efuse_straps_3;
3298 
3299 		efuse_straps_4 = RREG32_RCU(0x204);
3300 		efuse_straps_3 = RREG32_RCU(0x203);
3301 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3302 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3303 	} else {
3304 		tmp = 0;
3305 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3306 			u32 rb_disable_bitmap;
3307 
3308 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3309 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3310 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3311 			tmp <<= 4;
3312 			tmp |= rb_disable_bitmap;
3313 		}
3314 	}
3315 	/* enabled rb are just the one not disabled :) */
3316 	disabled_rb_mask = tmp;
3317 	tmp = 0;
3318 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3319 		tmp |= (1 << i);
3320 	/* if all the backends are disabled, fix it up here */
3321 	if ((disabled_rb_mask & tmp) == tmp) {
3322 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3323 			disabled_rb_mask &= ~(1 << i);
3324 	}
3325 
3326 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3327 		u32 simd_disable_bitmap;
3328 
3329 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3330 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3331 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3332 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3333 		tmp <<= 16;
3334 		tmp |= simd_disable_bitmap;
3335 	}
3336 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3337 
3338 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3339 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3340 
3341 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3342 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3343 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3344 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3345 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3346 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3347 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3348 
3349 	if ((rdev->config.evergreen.max_backends == 1) &&
3350 	    (rdev->flags & RADEON_IS_IGP)) {
3351 		if ((disabled_rb_mask & 3) == 1) {
3352 			/* RB0 disabled, RB1 enabled */
3353 			tmp = 0x11111111;
3354 		} else {
3355 			/* RB1 disabled, RB0 enabled */
3356 			tmp = 0x00000000;
3357 		}
3358 	} else {
3359 		tmp = gb_addr_config & NUM_PIPES_MASK;
3360 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3361 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3362 	}
3363 	WREG32(GB_BACKEND_MAP, tmp);
3364 
3365 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3366 	WREG32(CGTS_TCC_DISABLE, 0);
3367 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3368 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3369 
3370 	/* set HW defaults for 3D engine */
3371 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3372 				     ROQ_IB2_START(0x2b)));
3373 
3374 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3375 
3376 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3377 			     SYNC_GRADIENT |
3378 			     SYNC_WALKER |
3379 			     SYNC_ALIGNER));
3380 
3381 	sx_debug_1 = RREG32(SX_DEBUG_1);
3382 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3383 	WREG32(SX_DEBUG_1, sx_debug_1);
3384 
3385 
3386 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3387 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3388 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3389 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3390 
3391 	if (rdev->family <= CHIP_SUMO2)
3392 		WREG32(SMX_SAR_CTL0, 0x00010000);
3393 
3394 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3395 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3396 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3397 
3398 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3399 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3400 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3401 
3402 	WREG32(VGT_NUM_INSTANCES, 1);
3403 	WREG32(SPI_CONFIG_CNTL, 0);
3404 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3405 	WREG32(CP_PERFMON_CNTL, 0);
3406 
3407 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3408 				  FETCH_FIFO_HIWATER(0x4) |
3409 				  DONE_FIFO_HIWATER(0xe0) |
3410 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3411 
3412 	sq_config = RREG32(SQ_CONFIG);
3413 	sq_config &= ~(PS_PRIO(3) |
3414 		       VS_PRIO(3) |
3415 		       GS_PRIO(3) |
3416 		       ES_PRIO(3));
3417 	sq_config |= (VC_ENABLE |
3418 		      EXPORT_SRC_C |
3419 		      PS_PRIO(0) |
3420 		      VS_PRIO(1) |
3421 		      GS_PRIO(2) |
3422 		      ES_PRIO(3));
3423 
3424 	switch (rdev->family) {
3425 	case CHIP_CEDAR:
3426 	case CHIP_PALM:
3427 	case CHIP_SUMO:
3428 	case CHIP_SUMO2:
3429 	case CHIP_CAICOS:
3430 		/* no vertex cache */
3431 		sq_config &= ~VC_ENABLE;
3432 		break;
3433 	default:
3434 		break;
3435 	}
3436 
3437 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3438 
3439 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3440 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3441 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3442 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3443 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3444 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3445 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3446 
3447 	switch (rdev->family) {
3448 	case CHIP_CEDAR:
3449 	case CHIP_PALM:
3450 	case CHIP_SUMO:
3451 	case CHIP_SUMO2:
3452 		ps_thread_count = 96;
3453 		break;
3454 	default:
3455 		ps_thread_count = 128;
3456 		break;
3457 	}
3458 
3459 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3460 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3461 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3462 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3463 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3464 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3465 
3466 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3467 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3468 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3469 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3470 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3471 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3472 
3473 	WREG32(SQ_CONFIG, sq_config);
3474 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3475 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3476 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3477 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3478 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3479 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3480 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3481 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3482 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3483 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3484 
3485 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3486 					  FORCE_EOV_MAX_REZ_CNT(255)));
3487 
3488 	switch (rdev->family) {
3489 	case CHIP_CEDAR:
3490 	case CHIP_PALM:
3491 	case CHIP_SUMO:
3492 	case CHIP_SUMO2:
3493 	case CHIP_CAICOS:
3494 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3495 		break;
3496 	default:
3497 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3498 		break;
3499 	}
3500 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3501 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3502 
3503 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3504 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3505 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3506 
3507 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3508 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3509 
3510 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3511 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3512 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3513 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3514 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3515 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3516 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3517 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3518 
3519 	/* clear render buffer base addresses */
3520 	WREG32(CB_COLOR0_BASE, 0);
3521 	WREG32(CB_COLOR1_BASE, 0);
3522 	WREG32(CB_COLOR2_BASE, 0);
3523 	WREG32(CB_COLOR3_BASE, 0);
3524 	WREG32(CB_COLOR4_BASE, 0);
3525 	WREG32(CB_COLOR5_BASE, 0);
3526 	WREG32(CB_COLOR6_BASE, 0);
3527 	WREG32(CB_COLOR7_BASE, 0);
3528 	WREG32(CB_COLOR8_BASE, 0);
3529 	WREG32(CB_COLOR9_BASE, 0);
3530 	WREG32(CB_COLOR10_BASE, 0);
3531 	WREG32(CB_COLOR11_BASE, 0);
3532 
3533 	/* set the shader const cache sizes to 0 */
3534 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3535 		WREG32(i, 0);
3536 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3537 		WREG32(i, 0);
3538 
3539 	tmp = RREG32(HDP_MISC_CNTL);
3540 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3541 	WREG32(HDP_MISC_CNTL, tmp);
3542 
3543 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3544 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3545 
3546 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3547 
3548 	udelay(50);
3549 
3550 }
3551 
3552 int evergreen_mc_init(struct radeon_device *rdev)
3553 {
3554 	u32 tmp;
3555 	int chansize, numchan;
3556 
3557 	/* Get VRAM informations */
3558 	rdev->mc.vram_is_ddr = true;
3559 	if ((rdev->family == CHIP_PALM) ||
3560 	    (rdev->family == CHIP_SUMO) ||
3561 	    (rdev->family == CHIP_SUMO2))
3562 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3563 	else
3564 		tmp = RREG32(MC_ARB_RAMCFG);
3565 	if (tmp & CHANSIZE_OVERRIDE) {
3566 		chansize = 16;
3567 	} else if (tmp & CHANSIZE_MASK) {
3568 		chansize = 64;
3569 	} else {
3570 		chansize = 32;
3571 	}
3572 	tmp = RREG32(MC_SHARED_CHMAP);
3573 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3574 	case 0:
3575 	default:
3576 		numchan = 1;
3577 		break;
3578 	case 1:
3579 		numchan = 2;
3580 		break;
3581 	case 2:
3582 		numchan = 4;
3583 		break;
3584 	case 3:
3585 		numchan = 8;
3586 		break;
3587 	}
3588 	rdev->mc.vram_width = numchan * chansize;
3589 	/* Could aper size report 0 ? */
3590 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3591 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3592 	/* Setup GPU memory space */
3593 	if ((rdev->family == CHIP_PALM) ||
3594 	    (rdev->family == CHIP_SUMO) ||
3595 	    (rdev->family == CHIP_SUMO2)) {
3596 		/* size in bytes on fusion */
3597 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3598 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3599 	} else {
3600 		/* size in MB on evergreen/cayman/tn */
3601 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3602 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3603 	}
3604 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3605 	r700_vram_gtt_location(rdev, &rdev->mc);
3606 	radeon_update_bandwidth_info(rdev);
3607 
3608 	return 0;
3609 }
3610 
3611 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3612 {
3613 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3614 		RREG32(GRBM_STATUS));
3615 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3616 		RREG32(GRBM_STATUS_SE0));
3617 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3618 		RREG32(GRBM_STATUS_SE1));
3619 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3620 		RREG32(SRBM_STATUS));
3621 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3622 		RREG32(SRBM_STATUS2));
3623 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3624 		RREG32(CP_STALLED_STAT1));
3625 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3626 		RREG32(CP_STALLED_STAT2));
3627 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3628 		RREG32(CP_BUSY_STAT));
3629 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3630 		RREG32(CP_STAT));
3631 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3632 		RREG32(DMA_STATUS_REG));
3633 	if (rdev->family >= CHIP_CAYMAN) {
3634 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3635 			 RREG32(DMA_STATUS_REG + 0x800));
3636 	}
3637 }
3638 
3639 bool evergreen_is_display_hung(struct radeon_device *rdev)
3640 {
3641 	u32 crtc_hung = 0;
3642 	u32 crtc_status[6];
3643 	u32 i, j, tmp;
3644 
3645 	for (i = 0; i < rdev->num_crtc; i++) {
3646 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3647 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3648 			crtc_hung |= (1 << i);
3649 		}
3650 	}
3651 
3652 	for (j = 0; j < 10; j++) {
3653 		for (i = 0; i < rdev->num_crtc; i++) {
3654 			if (crtc_hung & (1 << i)) {
3655 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3656 				if (tmp != crtc_status[i])
3657 					crtc_hung &= ~(1 << i);
3658 			}
3659 		}
3660 		if (crtc_hung == 0)
3661 			return false;
3662 		udelay(100);
3663 	}
3664 
3665 	return true;
3666 }
3667 
3668 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3669 {
3670 	u32 reset_mask = 0;
3671 	u32 tmp;
3672 
3673 	/* GRBM_STATUS */
3674 	tmp = RREG32(GRBM_STATUS);
3675 	if (tmp & (PA_BUSY | SC_BUSY |
3676 		   SH_BUSY | SX_BUSY |
3677 		   TA_BUSY | VGT_BUSY |
3678 		   DB_BUSY | CB_BUSY |
3679 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3680 		reset_mask |= RADEON_RESET_GFX;
3681 
3682 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3683 		   CP_BUSY | CP_COHERENCY_BUSY))
3684 		reset_mask |= RADEON_RESET_CP;
3685 
3686 	if (tmp & GRBM_EE_BUSY)
3687 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3688 
3689 	/* DMA_STATUS_REG */
3690 	tmp = RREG32(DMA_STATUS_REG);
3691 	if (!(tmp & DMA_IDLE))
3692 		reset_mask |= RADEON_RESET_DMA;
3693 
3694 	/* SRBM_STATUS2 */
3695 	tmp = RREG32(SRBM_STATUS2);
3696 	if (tmp & DMA_BUSY)
3697 		reset_mask |= RADEON_RESET_DMA;
3698 
3699 	/* SRBM_STATUS */
3700 	tmp = RREG32(SRBM_STATUS);
3701 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3702 		reset_mask |= RADEON_RESET_RLC;
3703 
3704 	if (tmp & IH_BUSY)
3705 		reset_mask |= RADEON_RESET_IH;
3706 
3707 	if (tmp & SEM_BUSY)
3708 		reset_mask |= RADEON_RESET_SEM;
3709 
3710 	if (tmp & GRBM_RQ_PENDING)
3711 		reset_mask |= RADEON_RESET_GRBM;
3712 
3713 	if (tmp & VMC_BUSY)
3714 		reset_mask |= RADEON_RESET_VMC;
3715 
3716 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3717 		   MCC_BUSY | MCD_BUSY))
3718 		reset_mask |= RADEON_RESET_MC;
3719 
3720 	if (evergreen_is_display_hung(rdev))
3721 		reset_mask |= RADEON_RESET_DISPLAY;
3722 
3723 	/* VM_L2_STATUS */
3724 	tmp = RREG32(VM_L2_STATUS);
3725 	if (tmp & L2_BUSY)
3726 		reset_mask |= RADEON_RESET_VMC;
3727 
3728 	/* Skip MC reset as it's mostly likely not hung, just busy */
3729 	if (reset_mask & RADEON_RESET_MC) {
3730 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3731 		reset_mask &= ~RADEON_RESET_MC;
3732 	}
3733 
3734 	return reset_mask;
3735 }
3736 
3737 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3738 {
3739 	struct evergreen_mc_save save;
3740 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3741 	u32 tmp;
3742 
3743 	if (reset_mask == 0)
3744 		return;
3745 
3746 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3747 
3748 	evergreen_print_gpu_status_regs(rdev);
3749 
3750 	/* Disable CP parsing/prefetching */
3751 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3752 
3753 	if (reset_mask & RADEON_RESET_DMA) {
3754 		/* Disable DMA */
3755 		tmp = RREG32(DMA_RB_CNTL);
3756 		tmp &= ~DMA_RB_ENABLE;
3757 		WREG32(DMA_RB_CNTL, tmp);
3758 	}
3759 
3760 	udelay(50);
3761 
3762 	evergreen_mc_stop(rdev, &save);
3763 	if (evergreen_mc_wait_for_idle(rdev)) {
3764 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3765 	}
3766 
3767 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3768 		grbm_soft_reset |= SOFT_RESET_DB |
3769 			SOFT_RESET_CB |
3770 			SOFT_RESET_PA |
3771 			SOFT_RESET_SC |
3772 			SOFT_RESET_SPI |
3773 			SOFT_RESET_SX |
3774 			SOFT_RESET_SH |
3775 			SOFT_RESET_TC |
3776 			SOFT_RESET_TA |
3777 			SOFT_RESET_VC |
3778 			SOFT_RESET_VGT;
3779 	}
3780 
3781 	if (reset_mask & RADEON_RESET_CP) {
3782 		grbm_soft_reset |= SOFT_RESET_CP |
3783 			SOFT_RESET_VGT;
3784 
3785 		srbm_soft_reset |= SOFT_RESET_GRBM;
3786 	}
3787 
3788 	if (reset_mask & RADEON_RESET_DMA)
3789 		srbm_soft_reset |= SOFT_RESET_DMA;
3790 
3791 	if (reset_mask & RADEON_RESET_DISPLAY)
3792 		srbm_soft_reset |= SOFT_RESET_DC;
3793 
3794 	if (reset_mask & RADEON_RESET_RLC)
3795 		srbm_soft_reset |= SOFT_RESET_RLC;
3796 
3797 	if (reset_mask & RADEON_RESET_SEM)
3798 		srbm_soft_reset |= SOFT_RESET_SEM;
3799 
3800 	if (reset_mask & RADEON_RESET_IH)
3801 		srbm_soft_reset |= SOFT_RESET_IH;
3802 
3803 	if (reset_mask & RADEON_RESET_GRBM)
3804 		srbm_soft_reset |= SOFT_RESET_GRBM;
3805 
3806 	if (reset_mask & RADEON_RESET_VMC)
3807 		srbm_soft_reset |= SOFT_RESET_VMC;
3808 
3809 	if (!(rdev->flags & RADEON_IS_IGP)) {
3810 		if (reset_mask & RADEON_RESET_MC)
3811 			srbm_soft_reset |= SOFT_RESET_MC;
3812 	}
3813 
3814 	if (grbm_soft_reset) {
3815 		tmp = RREG32(GRBM_SOFT_RESET);
3816 		tmp |= grbm_soft_reset;
3817 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3818 		WREG32(GRBM_SOFT_RESET, tmp);
3819 		tmp = RREG32(GRBM_SOFT_RESET);
3820 
3821 		udelay(50);
3822 
3823 		tmp &= ~grbm_soft_reset;
3824 		WREG32(GRBM_SOFT_RESET, tmp);
3825 		tmp = RREG32(GRBM_SOFT_RESET);
3826 	}
3827 
3828 	if (srbm_soft_reset) {
3829 		tmp = RREG32(SRBM_SOFT_RESET);
3830 		tmp |= srbm_soft_reset;
3831 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3832 		WREG32(SRBM_SOFT_RESET, tmp);
3833 		tmp = RREG32(SRBM_SOFT_RESET);
3834 
3835 		udelay(50);
3836 
3837 		tmp &= ~srbm_soft_reset;
3838 		WREG32(SRBM_SOFT_RESET, tmp);
3839 		tmp = RREG32(SRBM_SOFT_RESET);
3840 	}
3841 
3842 	/* Wait a little for things to settle down */
3843 	udelay(50);
3844 
3845 	evergreen_mc_resume(rdev, &save);
3846 	udelay(50);
3847 
3848 	evergreen_print_gpu_status_regs(rdev);
3849 }
3850 
3851 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3852 {
3853 	struct evergreen_mc_save save;
3854 	u32 tmp, i;
3855 
3856 	dev_info(rdev->dev, "GPU pci config reset\n");
3857 
3858 	/* disable dpm? */
3859 
3860 	/* Disable CP parsing/prefetching */
3861 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3862 	udelay(50);
3863 	/* Disable DMA */
3864 	tmp = RREG32(DMA_RB_CNTL);
3865 	tmp &= ~DMA_RB_ENABLE;
3866 	WREG32(DMA_RB_CNTL, tmp);
3867 	/* XXX other engines? */
3868 
3869 	/* halt the rlc */
3870 	r600_rlc_stop(rdev);
3871 
3872 	udelay(50);
3873 
3874 	/* set mclk/sclk to bypass */
3875 	rv770_set_clk_bypass_mode(rdev);
3876 	/* disable BM */
3877 	pci_disable_busmaster(rdev->pdev->dev);
3878 	/* disable mem access */
3879 	evergreen_mc_stop(rdev, &save);
3880 	if (evergreen_mc_wait_for_idle(rdev)) {
3881 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3882 	}
3883 	/* reset */
3884 	radeon_pci_config_reset(rdev);
3885 	/* wait for asic to come out of reset */
3886 	for (i = 0; i < rdev->usec_timeout; i++) {
3887 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3888 			break;
3889 		udelay(1);
3890 	}
3891 }
3892 
3893 int evergreen_asic_reset(struct radeon_device *rdev)
3894 {
3895 	u32 reset_mask;
3896 
3897 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3898 
3899 	if (reset_mask)
3900 		r600_set_bios_scratch_engine_hung(rdev, true);
3901 
3902 	/* try soft reset */
3903 	evergreen_gpu_soft_reset(rdev, reset_mask);
3904 
3905 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3906 
3907 	/* try pci config reset */
3908 	if (reset_mask && radeon_hard_reset)
3909 		evergreen_gpu_pci_config_reset(rdev);
3910 
3911 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3912 
3913 	if (!reset_mask)
3914 		r600_set_bios_scratch_engine_hung(rdev, false);
3915 
3916 	return 0;
3917 }
3918 
3919 /**
3920  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3921  *
3922  * @rdev: radeon_device pointer
3923  * @ring: radeon_ring structure holding ring information
3924  *
3925  * Check if the GFX engine is locked up.
3926  * Returns true if the engine appears to be locked up, false if not.
3927  */
3928 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3929 {
3930 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3931 
3932 	if (!(reset_mask & (RADEON_RESET_GFX |
3933 			    RADEON_RESET_COMPUTE |
3934 			    RADEON_RESET_CP))) {
3935 		radeon_ring_lockup_update(rdev, ring);
3936 		return false;
3937 	}
3938 	return radeon_ring_test_lockup(rdev, ring);
3939 }
3940 
3941 /*
3942  * RLC
3943  */
3944 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3945 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3946 
3947 void sumo_rlc_fini(struct radeon_device *rdev)
3948 {
3949 	int r;
3950 
3951 	/* save restore block */
3952 	if (rdev->rlc.save_restore_obj) {
3953 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3954 		if (unlikely(r != 0))
3955 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3956 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
3957 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3958 
3959 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
3960 		rdev->rlc.save_restore_obj = NULL;
3961 	}
3962 
3963 	/* clear state block */
3964 	if (rdev->rlc.clear_state_obj) {
3965 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3966 		if (unlikely(r != 0))
3967 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3968 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
3969 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3970 
3971 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
3972 		rdev->rlc.clear_state_obj = NULL;
3973 	}
3974 
3975 	/* clear state block */
3976 	if (rdev->rlc.cp_table_obj) {
3977 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3978 		if (unlikely(r != 0))
3979 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3980 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
3981 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3982 
3983 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
3984 		rdev->rlc.cp_table_obj = NULL;
3985 	}
3986 }
3987 
3988 #define CP_ME_TABLE_SIZE    96
3989 
3990 int sumo_rlc_init(struct radeon_device *rdev)
3991 {
3992 	const u32 *src_ptr;
3993 	volatile u32 *dst_ptr;
3994 	u32 dws, data, i, j, k, reg_num;
3995 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
3996 	u64 reg_list_mc_addr;
3997 	const struct cs_section_def *cs_data;
3998 	int r;
3999 	void *vptr;
4000 
4001 	vptr = NULL;
4002 	src_ptr = rdev->rlc.reg_list;
4003 	dws = rdev->rlc.reg_list_size;
4004 	if (rdev->family >= CHIP_BONAIRE) {
4005 		dws += (5 * 16) + 48 + 48 + 64;
4006 	}
4007 	cs_data = rdev->rlc.cs_data;
4008 
4009 	if (src_ptr) {
4010 		/* save restore block */
4011 		if (rdev->rlc.save_restore_obj == NULL) {
4012 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4013 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4014 					     &rdev->rlc.save_restore_obj);
4015 			if (r) {
4016 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4017 				return r;
4018 			}
4019 		}
4020 
4021 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4022 		if (unlikely(r != 0)) {
4023 			sumo_rlc_fini(rdev);
4024 			return r;
4025 		}
4026 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4027 				  &rdev->rlc.save_restore_gpu_addr);
4028 		if (r) {
4029 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4030 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4031 			sumo_rlc_fini(rdev);
4032 			return r;
4033 		}
4034 
4035 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void**)&vptr);
4036 		if (r) {
4037 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4038 			sumo_rlc_fini(rdev);
4039 			return r;
4040 		}
4041 		rdev->rlc.sr_ptr = vptr;
4042 		/* write the sr buffer */
4043 		dst_ptr = rdev->rlc.sr_ptr;
4044 		if (rdev->family >= CHIP_TAHITI) {
4045 			/* SI */
4046 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4047 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4048 		} else {
4049 			/* ON/LN/TN */
4050 			/* format:
4051 			 * dw0: (reg2 << 16) | reg1
4052 			 * dw1: reg1 save space
4053 			 * dw2: reg2 save space
4054 			 */
4055 			for (i = 0; i < dws; i++) {
4056 				data = src_ptr[i] >> 2;
4057 				i++;
4058 				if (i < dws)
4059 					data |= (src_ptr[i] >> 2) << 16;
4060 				j = (((i - 1) * 3) / 2);
4061 				dst_ptr[j] = cpu_to_le32(data);
4062 			}
4063 			j = ((i * 3) / 2);
4064 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4065 		}
4066 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4067 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4068 	}
4069 
4070 	if (cs_data) {
4071 		/* clear state block */
4072 		if (rdev->family >= CHIP_BONAIRE) {
4073 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4074 		} else if (rdev->family >= CHIP_TAHITI) {
4075 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4076 			dws = rdev->rlc.clear_state_size + (256 / 4);
4077 		} else {
4078 			reg_list_num = 0;
4079 			dws = 0;
4080 			for (i = 0; cs_data[i].section != NULL; i++) {
4081 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4082 					reg_list_num++;
4083 					dws += cs_data[i].section[j].reg_count;
4084 				}
4085 			}
4086 			reg_list_blk_index = (3 * reg_list_num + 2);
4087 			dws += reg_list_blk_index;
4088 			rdev->rlc.clear_state_size = dws;
4089 		}
4090 
4091 		if (rdev->rlc.clear_state_obj == NULL) {
4092 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4093 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4094 					     &rdev->rlc.clear_state_obj);
4095 			if (r) {
4096 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4097 				sumo_rlc_fini(rdev);
4098 				return r;
4099 			}
4100 		}
4101 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4102 		if (unlikely(r != 0)) {
4103 			sumo_rlc_fini(rdev);
4104 			return r;
4105 		}
4106 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4107 				  &rdev->rlc.clear_state_gpu_addr);
4108 		if (r) {
4109 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4110 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4111 			sumo_rlc_fini(rdev);
4112 			return r;
4113 		}
4114 
4115 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void**)&vptr);
4116 		if (r) {
4117 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4118 			sumo_rlc_fini(rdev);
4119 			return r;
4120 		}
4121 		rdev->rlc.cs_ptr = vptr;
4122 		/* set up the cs buffer */
4123 		dst_ptr = rdev->rlc.cs_ptr;
4124 		if (rdev->family >= CHIP_BONAIRE) {
4125 			cik_get_csb_buffer(rdev, dst_ptr);
4126 		} else if (rdev->family >= CHIP_TAHITI) {
4127 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4128 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4129 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4130 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4131 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4132 		} else {
4133 			reg_list_hdr_blk_index = 0;
4134 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4135 			data = upper_32_bits(reg_list_mc_addr);
4136 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4137 			reg_list_hdr_blk_index++;
4138 			for (i = 0; cs_data[i].section != NULL; i++) {
4139 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4140 					reg_num = cs_data[i].section[j].reg_count;
4141 					data = reg_list_mc_addr & 0xffffffff;
4142 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4143 					reg_list_hdr_blk_index++;
4144 
4145 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4146 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4147 					reg_list_hdr_blk_index++;
4148 
4149 					data = 0x08000000 | (reg_num * 4);
4150 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4151 					reg_list_hdr_blk_index++;
4152 
4153 					for (k = 0; k < reg_num; k++) {
4154 						data = cs_data[i].section[j].extent[k];
4155 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4156 					}
4157 					reg_list_mc_addr += reg_num * 4;
4158 					reg_list_blk_index += reg_num;
4159 				}
4160 			}
4161 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4162 		}
4163 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4164 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4165 	}
4166 
4167 	if (rdev->rlc.cp_table_size) {
4168 		if (rdev->rlc.cp_table_obj == NULL) {
4169 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4170 					     PAGE_SIZE, true,
4171 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4172 					     &rdev->rlc.cp_table_obj);
4173 			if (r) {
4174 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4175 				sumo_rlc_fini(rdev);
4176 				return r;
4177 			}
4178 		}
4179 
4180 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4181 		if (unlikely(r != 0)) {
4182 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4183 			sumo_rlc_fini(rdev);
4184 			return r;
4185 		}
4186 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4187 				  &rdev->rlc.cp_table_gpu_addr);
4188 		if (r) {
4189 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4190 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4191 			sumo_rlc_fini(rdev);
4192 			return r;
4193 		}
4194 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void**)&vptr);
4195 		if (r) {
4196 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4197 			sumo_rlc_fini(rdev);
4198 			return r;
4199 		}
4200 		rdev->rlc.cp_table_ptr = vptr;
4201 
4202 		cik_init_cp_pg_table(rdev);
4203 
4204 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4205 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4206 
4207 	}
4208 
4209 	return 0;
4210 }
4211 
4212 static void evergreen_rlc_start(struct radeon_device *rdev)
4213 {
4214 	u32 mask = RLC_ENABLE;
4215 
4216 	if (rdev->flags & RADEON_IS_IGP) {
4217 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4218 	}
4219 
4220 	WREG32(RLC_CNTL, mask);
4221 }
4222 
4223 int evergreen_rlc_resume(struct radeon_device *rdev)
4224 {
4225 	u32 i;
4226 	const __be32 *fw_data;
4227 
4228 	if (!rdev->rlc_fw)
4229 		return -EINVAL;
4230 
4231 	r600_rlc_stop(rdev);
4232 
4233 	WREG32(RLC_HB_CNTL, 0);
4234 
4235 	if (rdev->flags & RADEON_IS_IGP) {
4236 		if (rdev->family == CHIP_ARUBA) {
4237 			u32 always_on_bitmap =
4238 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4239 			/* find out the number of active simds */
4240 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4241 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4242 			tmp = hweight32(~tmp);
4243 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4244 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4245 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4246 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4247 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4248 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4249 			}
4250 		} else {
4251 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4252 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4253 		}
4254 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4255 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4256 	} else {
4257 		WREG32(RLC_HB_BASE, 0);
4258 		WREG32(RLC_HB_RPTR, 0);
4259 		WREG32(RLC_HB_WPTR, 0);
4260 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4261 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4262 	}
4263 	WREG32(RLC_MC_CNTL, 0);
4264 	WREG32(RLC_UCODE_CNTL, 0);
4265 
4266 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4267 	if (rdev->family >= CHIP_ARUBA) {
4268 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4269 			WREG32(RLC_UCODE_ADDR, i);
4270 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4271 		}
4272 	} else if (rdev->family >= CHIP_CAYMAN) {
4273 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4274 			WREG32(RLC_UCODE_ADDR, i);
4275 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4276 		}
4277 	} else {
4278 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4279 			WREG32(RLC_UCODE_ADDR, i);
4280 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4281 		}
4282 	}
4283 	WREG32(RLC_UCODE_ADDR, 0);
4284 
4285 	evergreen_rlc_start(rdev);
4286 
4287 	return 0;
4288 }
4289 
4290 /* Interrupts */
4291 
4292 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4293 {
4294 	if (crtc >= rdev->num_crtc)
4295 		return 0;
4296 	else
4297 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4298 }
4299 
4300 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4301 {
4302 	u32 tmp;
4303 
4304 	if (rdev->family >= CHIP_CAYMAN) {
4305 		cayman_cp_int_cntl_setup(rdev, 0,
4306 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4307 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4308 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4309 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4310 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4311 	} else
4312 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4313 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4314 	WREG32(DMA_CNTL, tmp);
4315 	WREG32(GRBM_INT_CNTL, 0);
4316 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4317 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4318 	if (rdev->num_crtc >= 4) {
4319 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4320 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4321 	}
4322 	if (rdev->num_crtc >= 6) {
4323 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4324 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4325 	}
4326 
4327 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4328 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4329 	if (rdev->num_crtc >= 4) {
4330 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4331 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4332 	}
4333 	if (rdev->num_crtc >= 6) {
4334 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4335 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4336 	}
4337 
4338 	/* only one DAC on DCE5 */
4339 	if (!ASIC_IS_DCE5(rdev))
4340 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4341 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4342 
4343 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4344 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4345 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4346 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4347 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4348 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4349 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4350 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4351 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4352 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4353 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4354 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4355 
4356 }
4357 
4358 int evergreen_irq_set(struct radeon_device *rdev)
4359 {
4360 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4361 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4362 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4363 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4364 	u32 grbm_int_cntl = 0;
4365 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4366 	u32 dma_cntl, dma_cntl1 = 0;
4367 	u32 thermal_int = 0;
4368 
4369 	if (!rdev->irq.installed) {
4370 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4371 		return -EINVAL;
4372 	}
4373 	/* don't enable anything if the ih is disabled */
4374 	if (!rdev->ih.enabled) {
4375 		r600_disable_interrupts(rdev);
4376 		/* force the active interrupt state to all disabled */
4377 		evergreen_disable_interrupt_state(rdev);
4378 		return 0;
4379 	}
4380 
4381 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4382 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4383 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4384 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4385 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4386 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4387 	if (rdev->family == CHIP_ARUBA)
4388 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4389 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4390 	else
4391 		thermal_int = RREG32(CG_THERMAL_INT) &
4392 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4393 
4394 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4395 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4396 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4397 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4398 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4399 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4400 
4401 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4402 
4403 	if (rdev->family >= CHIP_CAYMAN) {
4404 		/* enable CP interrupts on all rings */
4405 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4406 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4407 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4408 		}
4409 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4410 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4411 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4412 		}
4413 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4414 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4415 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4416 		}
4417 	} else {
4418 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4419 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4420 			cp_int_cntl |= RB_INT_ENABLE;
4421 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4422 		}
4423 	}
4424 
4425 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4426 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4427 		dma_cntl |= TRAP_ENABLE;
4428 	}
4429 
4430 	if (rdev->family >= CHIP_CAYMAN) {
4431 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4432 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4433 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4434 			dma_cntl1 |= TRAP_ENABLE;
4435 		}
4436 	}
4437 
4438 	if (rdev->irq.dpm_thermal) {
4439 		DRM_DEBUG("dpm thermal\n");
4440 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4441 	}
4442 
4443 	if (rdev->irq.crtc_vblank_int[0] ||
4444 	    atomic_read(&rdev->irq.pflip[0])) {
4445 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4446 		crtc1 |= VBLANK_INT_MASK;
4447 	}
4448 	if (rdev->irq.crtc_vblank_int[1] ||
4449 	    atomic_read(&rdev->irq.pflip[1])) {
4450 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4451 		crtc2 |= VBLANK_INT_MASK;
4452 	}
4453 	if (rdev->irq.crtc_vblank_int[2] ||
4454 	    atomic_read(&rdev->irq.pflip[2])) {
4455 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4456 		crtc3 |= VBLANK_INT_MASK;
4457 	}
4458 	if (rdev->irq.crtc_vblank_int[3] ||
4459 	    atomic_read(&rdev->irq.pflip[3])) {
4460 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4461 		crtc4 |= VBLANK_INT_MASK;
4462 	}
4463 	if (rdev->irq.crtc_vblank_int[4] ||
4464 	    atomic_read(&rdev->irq.pflip[4])) {
4465 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4466 		crtc5 |= VBLANK_INT_MASK;
4467 	}
4468 	if (rdev->irq.crtc_vblank_int[5] ||
4469 	    atomic_read(&rdev->irq.pflip[5])) {
4470 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4471 		crtc6 |= VBLANK_INT_MASK;
4472 	}
4473 	if (rdev->irq.hpd[0]) {
4474 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4475 		hpd1 |= DC_HPDx_INT_EN;
4476 	}
4477 	if (rdev->irq.hpd[1]) {
4478 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4479 		hpd2 |= DC_HPDx_INT_EN;
4480 	}
4481 	if (rdev->irq.hpd[2]) {
4482 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4483 		hpd3 |= DC_HPDx_INT_EN;
4484 	}
4485 	if (rdev->irq.hpd[3]) {
4486 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4487 		hpd4 |= DC_HPDx_INT_EN;
4488 	}
4489 	if (rdev->irq.hpd[4]) {
4490 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4491 		hpd5 |= DC_HPDx_INT_EN;
4492 	}
4493 	if (rdev->irq.hpd[5]) {
4494 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4495 		hpd6 |= DC_HPDx_INT_EN;
4496 	}
4497 	if (rdev->irq.afmt[0]) {
4498 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4499 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4500 	}
4501 	if (rdev->irq.afmt[1]) {
4502 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4503 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4504 	}
4505 	if (rdev->irq.afmt[2]) {
4506 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4507 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4508 	}
4509 	if (rdev->irq.afmt[3]) {
4510 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4511 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4512 	}
4513 	if (rdev->irq.afmt[4]) {
4514 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4515 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4516 	}
4517 	if (rdev->irq.afmt[5]) {
4518 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4519 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4520 	}
4521 
4522 	if (rdev->family >= CHIP_CAYMAN) {
4523 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4524 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4525 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4526 	} else
4527 		WREG32(CP_INT_CNTL, cp_int_cntl);
4528 
4529 	WREG32(DMA_CNTL, dma_cntl);
4530 
4531 	if (rdev->family >= CHIP_CAYMAN)
4532 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4533 
4534 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4535 
4536 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4537 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4538 	if (rdev->num_crtc >= 4) {
4539 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4540 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4541 	}
4542 	if (rdev->num_crtc >= 6) {
4543 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4544 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4545 	}
4546 
4547 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4548 	       GRPH_PFLIP_INT_MASK);
4549 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4550 	       GRPH_PFLIP_INT_MASK);
4551 	if (rdev->num_crtc >= 4) {
4552 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4553 		       GRPH_PFLIP_INT_MASK);
4554 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4555 		       GRPH_PFLIP_INT_MASK);
4556 	}
4557 	if (rdev->num_crtc >= 6) {
4558 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4559 		       GRPH_PFLIP_INT_MASK);
4560 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4561 		       GRPH_PFLIP_INT_MASK);
4562 	}
4563 
4564 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4565 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4566 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4567 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4568 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4569 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4570 	if (rdev->family == CHIP_ARUBA)
4571 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4572 	else
4573 		WREG32(CG_THERMAL_INT, thermal_int);
4574 
4575 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4576 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4577 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4578 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4579 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4580 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4581 
4582 	return 0;
4583 }
4584 
4585 static void evergreen_irq_ack(struct radeon_device *rdev)
4586 {
4587 	u32 tmp;
4588 
4589 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4590 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4591 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4592 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4593 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4594 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4595 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4596 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4597 	if (rdev->num_crtc >= 4) {
4598 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4599 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4600 	}
4601 	if (rdev->num_crtc >= 6) {
4602 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4603 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4604 	}
4605 
4606 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4607 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4608 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4609 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4610 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4611 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4612 
4613 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4614 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4615 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4616 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4617 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4618 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4619 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4620 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4621 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4622 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4623 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4624 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4625 
4626 	if (rdev->num_crtc >= 4) {
4627 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4628 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4629 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4630 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4631 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4632 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4633 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4634 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4635 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4636 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4637 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4638 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4639 	}
4640 
4641 	if (rdev->num_crtc >= 6) {
4642 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4643 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4644 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4645 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4646 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4647 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4648 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4649 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4650 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4651 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4652 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4653 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4654 	}
4655 
4656 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4657 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4658 		tmp |= DC_HPDx_INT_ACK;
4659 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4660 	}
4661 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4662 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4663 		tmp |= DC_HPDx_INT_ACK;
4664 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4665 	}
4666 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4667 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4668 		tmp |= DC_HPDx_INT_ACK;
4669 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4670 	}
4671 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4672 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4673 		tmp |= DC_HPDx_INT_ACK;
4674 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4675 	}
4676 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4677 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4678 		tmp |= DC_HPDx_INT_ACK;
4679 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4680 	}
4681 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4682 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4683 		tmp |= DC_HPDx_INT_ACK;
4684 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4685 	}
4686 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4687 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4688 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4689 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4690 	}
4691 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4692 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4693 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4694 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4695 	}
4696 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4697 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4698 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4699 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4700 	}
4701 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4702 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4703 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4704 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4705 	}
4706 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4707 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4708 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4709 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4710 	}
4711 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4712 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4713 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4714 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4715 	}
4716 }
4717 
4718 static void evergreen_irq_disable(struct radeon_device *rdev)
4719 {
4720 	r600_disable_interrupts(rdev);
4721 	/* Wait and acknowledge irq */
4722 	mdelay(1);
4723 	evergreen_irq_ack(rdev);
4724 	evergreen_disable_interrupt_state(rdev);
4725 }
4726 
4727 void evergreen_irq_suspend(struct radeon_device *rdev)
4728 {
4729 	evergreen_irq_disable(rdev);
4730 	r600_rlc_stop(rdev);
4731 }
4732 
4733 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4734 {
4735 	u32 wptr, tmp;
4736 
4737 	if (rdev->wb.enabled)
4738 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4739 	else
4740 		wptr = RREG32(IH_RB_WPTR);
4741 
4742 	if (wptr & RB_OVERFLOW) {
4743 		wptr &= ~RB_OVERFLOW;
4744 		/* When a ring buffer overflow happen start parsing interrupt
4745 		 * from the last not overwritten vector (wptr + 16). Hopefully
4746 		 * this should allow us to catchup.
4747 		 */
4748 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4749 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4750 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4751 		tmp = RREG32(IH_RB_CNTL);
4752 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4753 		WREG32(IH_RB_CNTL, tmp);
4754 	}
4755 	return (wptr & rdev->ih.ptr_mask);
4756 }
4757 
4758 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
4759 {
4760 	u32 wptr;
4761 	u32 rptr;
4762 	u32 src_id, src_data;
4763 	u32 ring_index;
4764 	bool queue_hotplug = false;
4765 	bool queue_hdmi = false;
4766 	bool queue_thermal = false;
4767 	u32 status, addr;
4768 
4769 	if (!rdev->ih.enabled || rdev->shutdown)
4770 		return IRQ_NONE;
4771 
4772 	wptr = evergreen_get_ih_wptr(rdev);
4773 
4774 restart_ih:
4775 	/* is somebody else already processing irqs? */
4776 	if (atomic_xchg(&rdev->ih.lock, 1))
4777 		return IRQ_NONE;
4778 
4779 	rptr = rdev->ih.rptr;
4780 	DRM_DEBUG_VBLANK("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4781 
4782 	/* Order reading of wptr vs. reading of IH ring data */
4783 	rmb();
4784 
4785 	/* display interrupts */
4786 	evergreen_irq_ack(rdev);
4787 
4788 	while (rptr != wptr) {
4789 		/* wptr/rptr are in bytes! */
4790 		ring_index = rptr / 4;
4791 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4792 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4793 
4794 		switch (src_id) {
4795 		case 1: /* D1 vblank/vline */
4796 			switch (src_data) {
4797 			case 0: /* D1 vblank */
4798 				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4799 					if (rdev->irq.crtc_vblank_int[0]) {
4800 						drm_handle_vblank(rdev->ddev, 0);
4801 						rdev->pm.vblank_sync = true;
4802 						wake_up(&rdev->irq.vblank_queue);
4803 					}
4804 					if (atomic_read(&rdev->irq.pflip[0]))
4805 						radeon_crtc_handle_vblank(rdev, 0);
4806 					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4807 					DRM_DEBUG_VBLANK("IH: D1 vblank\n");
4808 				}
4809 				break;
4810 			case 1: /* D1 vline */
4811 				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4812 					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4813 					DRM_DEBUG_VBLANK("IH: D1 vline\n");
4814 				}
4815 				break;
4816 			default:
4817 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4818 				break;
4819 			}
4820 			break;
4821 		case 2: /* D2 vblank/vline */
4822 			switch (src_data) {
4823 			case 0: /* D2 vblank */
4824 				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4825 					if (rdev->irq.crtc_vblank_int[1]) {
4826 						drm_handle_vblank(rdev->ddev, 1);
4827 						rdev->pm.vblank_sync = true;
4828 						wake_up(&rdev->irq.vblank_queue);
4829 					}
4830 					if (atomic_read(&rdev->irq.pflip[1]))
4831 						radeon_crtc_handle_vblank(rdev, 1);
4832 					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4833 					DRM_DEBUG_VBLANK("IH: D2 vblank\n");
4834 				}
4835 				break;
4836 			case 1: /* D2 vline */
4837 				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4838 					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4839 					DRM_DEBUG_VBLANK("IH: D2 vline\n");
4840 				}
4841 				break;
4842 			default:
4843 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4844 				break;
4845 			}
4846 			break;
4847 		case 3: /* D3 vblank/vline */
4848 			switch (src_data) {
4849 			case 0: /* D3 vblank */
4850 				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4851 					if (rdev->irq.crtc_vblank_int[2]) {
4852 						drm_handle_vblank(rdev->ddev, 2);
4853 						rdev->pm.vblank_sync = true;
4854 						wake_up(&rdev->irq.vblank_queue);
4855 					}
4856 					if (atomic_read(&rdev->irq.pflip[2]))
4857 						radeon_crtc_handle_vblank(rdev, 2);
4858 					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4859 					DRM_DEBUG_VBLANK("IH: D3 vblank\n");
4860 				}
4861 				break;
4862 			case 1: /* D3 vline */
4863 				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4864 					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4865 					DRM_DEBUG_VBLANK("IH: D3 vline\n");
4866 				}
4867 				break;
4868 			default:
4869 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4870 				break;
4871 			}
4872 			break;
4873 		case 4: /* D4 vblank/vline */
4874 			switch (src_data) {
4875 			case 0: /* D4 vblank */
4876 				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4877 					if (rdev->irq.crtc_vblank_int[3]) {
4878 						drm_handle_vblank(rdev->ddev, 3);
4879 						rdev->pm.vblank_sync = true;
4880 						wake_up(&rdev->irq.vblank_queue);
4881 					}
4882 					if (atomic_read(&rdev->irq.pflip[3]))
4883 						radeon_crtc_handle_vblank(rdev, 3);
4884 					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4885 					DRM_DEBUG_VBLANK("IH: D4 vblank\n");
4886 				}
4887 				break;
4888 			case 1: /* D4 vline */
4889 				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4890 					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4891 					DRM_DEBUG_VBLANK("IH: D4 vline\n");
4892 				}
4893 				break;
4894 			default:
4895 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4896 				break;
4897 			}
4898 			break;
4899 		case 5: /* D5 vblank/vline */
4900 			switch (src_data) {
4901 			case 0: /* D5 vblank */
4902 				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4903 					if (rdev->irq.crtc_vblank_int[4]) {
4904 						drm_handle_vblank(rdev->ddev, 4);
4905 						rdev->pm.vblank_sync = true;
4906 						wake_up(&rdev->irq.vblank_queue);
4907 					}
4908 					if (atomic_read(&rdev->irq.pflip[4]))
4909 						radeon_crtc_handle_vblank(rdev, 4);
4910 					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4911 					DRM_DEBUG_VBLANK("IH: D5 vblank\n");
4912 				}
4913 				break;
4914 			case 1: /* D5 vline */
4915 				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4916 					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4917 					DRM_DEBUG_VBLANK("IH: D5 vline\n");
4918 				}
4919 				break;
4920 			default:
4921 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4922 				break;
4923 			}
4924 			break;
4925 		case 6: /* D6 vblank/vline */
4926 			switch (src_data) {
4927 			case 0: /* D6 vblank */
4928 				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4929 					if (rdev->irq.crtc_vblank_int[5]) {
4930 						drm_handle_vblank(rdev->ddev, 5);
4931 						rdev->pm.vblank_sync = true;
4932 						wake_up(&rdev->irq.vblank_queue);
4933 					}
4934 					if (atomic_read(&rdev->irq.pflip[5]))
4935 						radeon_crtc_handle_vblank(rdev, 5);
4936 					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4937 					DRM_DEBUG_VBLANK("IH: D6 vblank\n");
4938 				}
4939 				break;
4940 			case 1: /* D6 vline */
4941 				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4942 					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4943 					DRM_DEBUG_VBLANK("IH: D6 vline\n");
4944 				}
4945 				break;
4946 			default:
4947 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4948 				break;
4949 			}
4950 			break;
4951 		case 8: /* D1 page flip */
4952 		case 10: /* D2 page flip */
4953 		case 12: /* D3 page flip */
4954 		case 14: /* D4 page flip */
4955 		case 16: /* D5 page flip */
4956 		case 18: /* D6 page flip */
4957 			DRM_DEBUG_VBLANK("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4958 			if (radeon_use_pflipirq > 0)
4959 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
4960 			break;
4961 		case 42: /* HPD hotplug */
4962 			switch (src_data) {
4963 			case 0:
4964 				if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4965 					rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4966 					queue_hotplug = true;
4967 					DRM_DEBUG("IH: HPD1\n");
4968 				}
4969 				break;
4970 			case 1:
4971 				if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4972 					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4973 					queue_hotplug = true;
4974 					DRM_DEBUG("IH: HPD2\n");
4975 				}
4976 				break;
4977 			case 2:
4978 				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4979 					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4980 					queue_hotplug = true;
4981 					DRM_DEBUG("IH: HPD3\n");
4982 				}
4983 				break;
4984 			case 3:
4985 				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4986 					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4987 					queue_hotplug = true;
4988 					DRM_DEBUG("IH: HPD4\n");
4989 				}
4990 				break;
4991 			case 4:
4992 				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4993 					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4994 					queue_hotplug = true;
4995 					DRM_DEBUG("IH: HPD5\n");
4996 				}
4997 				break;
4998 			case 5:
4999 				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
5000 					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5001 					queue_hotplug = true;
5002 					DRM_DEBUG("IH: HPD6\n");
5003 				}
5004 				break;
5005 			default:
5006 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5007 				break;
5008 			}
5009 			break;
5010 		case 44: /* hdmi */
5011 			switch (src_data) {
5012 			case 0:
5013 				if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
5014 					rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5015 					queue_hdmi = true;
5016 					DRM_DEBUG("IH: HDMI0\n");
5017 				}
5018 				break;
5019 			case 1:
5020 				if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
5021 					rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5022 					queue_hdmi = true;
5023 					DRM_DEBUG("IH: HDMI1\n");
5024 				}
5025 				break;
5026 			case 2:
5027 				if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
5028 					rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5029 					queue_hdmi = true;
5030 					DRM_DEBUG("IH: HDMI2\n");
5031 				}
5032 				break;
5033 			case 3:
5034 				if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
5035 					rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5036 					queue_hdmi = true;
5037 					DRM_DEBUG("IH: HDMI3\n");
5038 				}
5039 				break;
5040 			case 4:
5041 				if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
5042 					rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5043 					queue_hdmi = true;
5044 					DRM_DEBUG("IH: HDMI4\n");
5045 				}
5046 				break;
5047 			case 5:
5048 				if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5049 					rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5050 					queue_hdmi = true;
5051 					DRM_DEBUG("IH: HDMI5\n");
5052 				}
5053 				break;
5054 			default:
5055 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5056 				break;
5057 			}
5058 		case 124: /* UVD */
5059 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5060 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5061 			break;
5062 		case 146:
5063 		case 147:
5064 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5065 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5066 			/* reset addr and status */
5067 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5068 			if (addr == 0x0 && status == 0x0)
5069 				break;
5070 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5071 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5072 				addr);
5073 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5074 				status);
5075 			cayman_vm_decode_fault(rdev, status, addr);
5076 			break;
5077 		case 176: /* CP_INT in ring buffer */
5078 		case 177: /* CP_INT in IB1 */
5079 		case 178: /* CP_INT in IB2 */
5080 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5081 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5082 			break;
5083 		case 181: /* CP EOP event */
5084 			DRM_DEBUG("IH: CP EOP\n");
5085 			if (rdev->family >= CHIP_CAYMAN) {
5086 				switch (src_data) {
5087 				case 0:
5088 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5089 					break;
5090 				case 1:
5091 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5092 					break;
5093 				case 2:
5094 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5095 					break;
5096 				}
5097 			} else
5098 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5099 			break;
5100 		case 224: /* DMA trap event */
5101 			DRM_DEBUG("IH: DMA trap\n");
5102 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5103 			break;
5104 		case 230: /* thermal low to high */
5105 			DRM_DEBUG("IH: thermal low to high\n");
5106 			rdev->pm.dpm.thermal.high_to_low = false;
5107 			queue_thermal = true;
5108 			break;
5109 		case 231: /* thermal high to low */
5110 			DRM_DEBUG("IH: thermal high to low\n");
5111 			rdev->pm.dpm.thermal.high_to_low = true;
5112 			queue_thermal = true;
5113 			break;
5114 		case 233: /* GUI IDLE */
5115 			DRM_DEBUG("IH: GUI idle\n");
5116 			break;
5117 		case 244: /* DMA trap event */
5118 			if (rdev->family >= CHIP_CAYMAN) {
5119 				DRM_DEBUG("IH: DMA1 trap\n");
5120 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5121 			}
5122 			break;
5123 		default:
5124 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5125 			break;
5126 		}
5127 
5128 		/* wptr/rptr are in bytes! */
5129 		rptr += 16;
5130 		rptr &= rdev->ih.ptr_mask;
5131 		WREG32(IH_RB_RPTR, rptr);
5132 	}
5133 	if (queue_hotplug)
5134 		taskqueue_enqueue(rdev->tq, &rdev->hotplug_work);
5135 	if (queue_hdmi)
5136 		taskqueue_enqueue(rdev->tq, &rdev->audio_work);
5137 	if (queue_thermal && rdev->pm.dpm_enabled)
5138 		taskqueue_enqueue(rdev->tq, &rdev->pm.dpm.thermal.work);
5139 	rdev->ih.rptr = rptr;
5140 	atomic_set(&rdev->ih.lock, 0);
5141 
5142 	/* make sure wptr hasn't changed while processing */
5143 	wptr = evergreen_get_ih_wptr(rdev);
5144 	if (wptr != rptr)
5145 		goto restart_ih;
5146 
5147 	return IRQ_HANDLED;
5148 }
5149 
5150 static int evergreen_startup(struct radeon_device *rdev)
5151 {
5152 	struct radeon_ring *ring;
5153 	int r;
5154 
5155 	/* enable pcie gen2 link */
5156 	evergreen_pcie_gen2_enable(rdev);
5157 	/* enable aspm */
5158 	evergreen_program_aspm(rdev);
5159 
5160 	/* scratch needs to be initialized before MC */
5161 	r = r600_vram_scratch_init(rdev);
5162 	if (r)
5163 		return r;
5164 
5165 	evergreen_mc_program(rdev);
5166 
5167 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5168 		r = ni_mc_load_microcode(rdev);
5169 		if (r) {
5170 			DRM_ERROR("Failed to load MC firmware!\n");
5171 			return r;
5172 		}
5173 	}
5174 
5175 	if (rdev->flags & RADEON_IS_AGP) {
5176 		evergreen_agp_enable(rdev);
5177 	} else {
5178 		r = evergreen_pcie_gart_enable(rdev);
5179 		if (r)
5180 			return r;
5181 	}
5182 	evergreen_gpu_init(rdev);
5183 
5184 	/* allocate rlc buffers */
5185 	if (rdev->flags & RADEON_IS_IGP) {
5186 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5187 		rdev->rlc.reg_list_size =
5188 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5189 		rdev->rlc.cs_data = evergreen_cs_data;
5190 		r = sumo_rlc_init(rdev);
5191 		if (r) {
5192 			DRM_ERROR("Failed to init rlc BOs!\n");
5193 			return r;
5194 		}
5195 	}
5196 
5197 	/* allocate wb buffer */
5198 	r = radeon_wb_init(rdev);
5199 	if (r)
5200 		return r;
5201 
5202 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5203 	if (r) {
5204 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5205 		return r;
5206 	}
5207 
5208 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5209 	if (r) {
5210 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5211 		return r;
5212 	}
5213 
5214 	r = uvd_v2_2_resume(rdev);
5215 	if (!r) {
5216 		r = radeon_fence_driver_start_ring(rdev,
5217 						   R600_RING_TYPE_UVD_INDEX);
5218 		if (r)
5219 			dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5220 	}
5221 
5222 	if (r)
5223 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5224 
5225 	/* Enable IRQ */
5226 	if (!rdev->irq.installed) {
5227 		r = radeon_irq_kms_init(rdev);
5228 		if (r)
5229 			return r;
5230 	}
5231 
5232 	r = r600_irq_init(rdev);
5233 	if (r) {
5234 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5235 		radeon_irq_kms_fini(rdev);
5236 		return r;
5237 	}
5238 	evergreen_irq_set(rdev);
5239 
5240 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5241 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5242 			     RADEON_CP_PACKET2);
5243 	if (r)
5244 		return r;
5245 
5246 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5247 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5248 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5249 	if (r)
5250 		return r;
5251 
5252 	r = evergreen_cp_load_microcode(rdev);
5253 	if (r)
5254 		return r;
5255 	r = evergreen_cp_resume(rdev);
5256 	if (r)
5257 		return r;
5258 	r = r600_dma_resume(rdev);
5259 	if (r)
5260 		return r;
5261 
5262 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5263 	if (ring->ring_size) {
5264 		r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5265 				     RADEON_CP_PACKET2);
5266 		if (!r)
5267 			r = uvd_v1_0_init(rdev);
5268 
5269 		if (r)
5270 			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5271 	}
5272 
5273 	r = radeon_ib_pool_init(rdev);
5274 	if (r) {
5275 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5276 		return r;
5277 	}
5278 
5279 	r = r600_audio_init(rdev);
5280 	if (r) {
5281 		DRM_ERROR("radeon: audio init failed\n");
5282 		return r;
5283 	}
5284 
5285 	return 0;
5286 }
5287 
5288 int evergreen_resume(struct radeon_device *rdev)
5289 {
5290 	int r;
5291 
5292 	/* reset the asic, the gfx blocks are often in a bad state
5293 	 * after the driver is unloaded or after a resume
5294 	 */
5295 	if (radeon_asic_reset(rdev))
5296 		dev_warn(rdev->dev, "GPU reset failed !\n");
5297 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5298 	 * posting will perform necessary task to bring back GPU into good
5299 	 * shape.
5300 	 */
5301 	/* post card */
5302 	atom_asic_init(rdev->mode_info.atom_context);
5303 
5304 	/* init golden registers */
5305 	evergreen_init_golden_registers(rdev);
5306 
5307 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5308 		radeon_pm_resume(rdev);
5309 
5310 	rdev->accel_working = true;
5311 	r = evergreen_startup(rdev);
5312 	if (r) {
5313 		DRM_ERROR("evergreen startup failed on resume\n");
5314 		rdev->accel_working = false;
5315 		return r;
5316 	}
5317 
5318 	return r;
5319 
5320 }
5321 
5322 int evergreen_suspend(struct radeon_device *rdev)
5323 {
5324 	radeon_pm_suspend(rdev);
5325 	r600_audio_fini(rdev);
5326 	uvd_v1_0_fini(rdev);
5327 	radeon_uvd_suspend(rdev);
5328 	r700_cp_stop(rdev);
5329 	r600_dma_stop(rdev);
5330 	evergreen_irq_suspend(rdev);
5331 	radeon_wb_disable(rdev);
5332 	evergreen_pcie_gart_disable(rdev);
5333 
5334 	return 0;
5335 }
5336 
5337 /* Plan is to move initialization in that function and use
5338  * helper function so that radeon_device_init pretty much
5339  * do nothing more than calling asic specific function. This
5340  * should also allow to remove a bunch of callback function
5341  * like vram_info.
5342  */
5343 int evergreen_init(struct radeon_device *rdev)
5344 {
5345 	int r;
5346 
5347 	/* Read BIOS */
5348 	if (!radeon_get_bios(rdev)) {
5349 		if (ASIC_IS_AVIVO(rdev))
5350 			return -EINVAL;
5351 	}
5352 	/* Must be an ATOMBIOS */
5353 	if (!rdev->is_atom_bios) {
5354 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5355 		return -EINVAL;
5356 	}
5357 	r = radeon_atombios_init(rdev);
5358 	if (r)
5359 		return r;
5360 	/* reset the asic, the gfx blocks are often in a bad state
5361 	 * after the driver is unloaded or after a resume
5362 	 */
5363 	if (radeon_asic_reset(rdev))
5364 		dev_warn(rdev->dev, "GPU reset failed !\n");
5365 	/* Post card if necessary */
5366 	if (!radeon_card_posted(rdev)) {
5367 		if (!rdev->bios) {
5368 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5369 			return -EINVAL;
5370 		}
5371 		DRM_INFO("GPU not posted. posting now...\n");
5372 		atom_asic_init(rdev->mode_info.atom_context);
5373 	}
5374 	/* init golden registers */
5375 	evergreen_init_golden_registers(rdev);
5376 	/* Initialize scratch registers */
5377 	r600_scratch_init(rdev);
5378 	/* Initialize surface registers */
5379 	radeon_surface_init(rdev);
5380 	/* Initialize clocks */
5381 	radeon_get_clock_info(rdev->ddev);
5382 	/* Fence driver */
5383 	r = radeon_fence_driver_init(rdev);
5384 	if (r)
5385 		return r;
5386 	/* initialize AGP */
5387 	if (rdev->flags & RADEON_IS_AGP) {
5388 		r = radeon_agp_init(rdev);
5389 		if (r)
5390 			radeon_agp_disable(rdev);
5391 	}
5392 	/* initialize memory controller */
5393 	r = evergreen_mc_init(rdev);
5394 	if (r)
5395 		return r;
5396 	/* Memory manager */
5397 	r = radeon_bo_init(rdev);
5398 	if (r)
5399 		return r;
5400 
5401 	if (ASIC_IS_DCE5(rdev)) {
5402 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5403 			r = ni_init_microcode(rdev);
5404 			if (r) {
5405 				DRM_ERROR("Failed to load firmware!\n");
5406 				return r;
5407 			}
5408 		}
5409 	} else {
5410 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5411 			r = r600_init_microcode(rdev);
5412 			if (r) {
5413 				DRM_ERROR("Failed to load firmware!\n");
5414 				return r;
5415 			}
5416 		}
5417 	}
5418 
5419 	/* Initialize power management */
5420 	radeon_pm_init(rdev);
5421 
5422 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5423 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5424 
5425 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5426 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5427 
5428 	r = radeon_uvd_init(rdev);
5429 	if (!r) {
5430 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5431 		r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5432 			       4096);
5433 	}
5434 
5435 	rdev->ih.ring_obj = NULL;
5436 	r600_ih_ring_init(rdev, 64 * 1024);
5437 
5438 	r = r600_pcie_gart_init(rdev);
5439 	if (r)
5440 		return r;
5441 
5442 	rdev->accel_working = true;
5443 	r = evergreen_startup(rdev);
5444 	if (r) {
5445 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5446 		r700_cp_fini(rdev);
5447 		r600_dma_fini(rdev);
5448 		r600_irq_fini(rdev);
5449 		if (rdev->flags & RADEON_IS_IGP)
5450 			sumo_rlc_fini(rdev);
5451 		radeon_wb_fini(rdev);
5452 		radeon_ib_pool_fini(rdev);
5453 		radeon_irq_kms_fini(rdev);
5454 		evergreen_pcie_gart_fini(rdev);
5455 		rdev->accel_working = false;
5456 	}
5457 
5458 	/* Don't start up if the MC ucode is missing on BTC parts.
5459 	 * The default clocks and voltages before the MC ucode
5460 	 * is loaded are not suffient for advanced operations.
5461 	 */
5462 	if (ASIC_IS_DCE5(rdev)) {
5463 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5464 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5465 			return -EINVAL;
5466 		}
5467 	}
5468 
5469 	return 0;
5470 }
5471 
5472 void evergreen_fini(struct radeon_device *rdev)
5473 {
5474 	radeon_pm_fini(rdev);
5475 	r600_audio_fini(rdev);
5476 	r700_cp_fini(rdev);
5477 	r600_dma_fini(rdev);
5478 	r600_irq_fini(rdev);
5479 	if (rdev->flags & RADEON_IS_IGP)
5480 		sumo_rlc_fini(rdev);
5481 	radeon_wb_fini(rdev);
5482 	radeon_ib_pool_fini(rdev);
5483 	radeon_irq_kms_fini(rdev);
5484 	uvd_v1_0_fini(rdev);
5485 	radeon_uvd_fini(rdev);
5486 	evergreen_pcie_gart_fini(rdev);
5487 	r600_vram_scratch_fini(rdev);
5488 	radeon_gem_fini(rdev);
5489 	radeon_fence_driver_fini(rdev);
5490 	radeon_agp_fini(rdev);
5491 	radeon_bo_fini(rdev);
5492 	radeon_atombios_fini(rdev);
5493 	if (ASIC_IS_DCE5(rdev))
5494 		ni_fini_microcode(rdev);
5495 	else
5496 		r600_fini_microcode(rdev);
5497 	kfree(rdev->bios);
5498 	rdev->bios = NULL;
5499 }
5500 
5501 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5502 {
5503 	u32 link_width_cntl, speed_cntl, mask;
5504 	int ret;
5505 
5506 	if (radeon_pcie_gen2 == 0)
5507 		return;
5508 
5509 	if (rdev->flags & RADEON_IS_IGP)
5510 		return;
5511 
5512 	if (!(rdev->flags & RADEON_IS_PCIE))
5513 		return;
5514 
5515 	/* x2 cards have a special sequence */
5516 	if (ASIC_IS_X2(rdev))
5517 		return;
5518 
5519 	ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
5520 	if (ret != 0)
5521 		return;
5522 
5523 	if (!(mask & DRM_PCIE_SPEED_50))
5524 		return;
5525 
5526 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5527 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5528 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5529 		return;
5530 	}
5531 
5532 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5533 
5534 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5535 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5536 
5537 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5538 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5539 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5540 
5541 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5542 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5543 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5544 
5545 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5546 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5547 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5548 
5549 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5550 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5551 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5552 
5553 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5554 		speed_cntl |= LC_GEN2_EN_STRAP;
5555 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5556 
5557 	} else {
5558 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5559 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5560 		if (1)
5561 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5562 		else
5563 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5564 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5565 	}
5566 }
5567 
5568 void evergreen_program_aspm(struct radeon_device *rdev)
5569 {
5570 	u32 data, orig;
5571 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5572 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5573 	/* fusion_platform = true
5574 	 * if the system is a fusion system
5575 	 * (APU or DGPU in a fusion system).
5576 	 * todo: check if the system is a fusion platform.
5577 	 */
5578 	bool fusion_platform = false;
5579 
5580 	if (radeon_aspm == 0)
5581 		return;
5582 
5583 	if (!(rdev->flags & RADEON_IS_PCIE))
5584 		return;
5585 
5586 	switch (rdev->family) {
5587 	case CHIP_CYPRESS:
5588 	case CHIP_HEMLOCK:
5589 	case CHIP_JUNIPER:
5590 	case CHIP_REDWOOD:
5591 	case CHIP_CEDAR:
5592 	case CHIP_SUMO:
5593 	case CHIP_SUMO2:
5594 	case CHIP_PALM:
5595 	case CHIP_ARUBA:
5596 		disable_l0s = true;
5597 		break;
5598 	default:
5599 		disable_l0s = false;
5600 		break;
5601 	}
5602 
5603 	if (rdev->flags & RADEON_IS_IGP)
5604 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5605 
5606 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5607 	if (fusion_platform)
5608 		data &= ~MULTI_PIF;
5609 	else
5610 		data |= MULTI_PIF;
5611 	if (data != orig)
5612 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5613 
5614 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5615 	if (fusion_platform)
5616 		data &= ~MULTI_PIF;
5617 	else
5618 		data |= MULTI_PIF;
5619 	if (data != orig)
5620 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5621 
5622 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5623 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5624 	if (!disable_l0s) {
5625 		if (rdev->family >= CHIP_BARTS)
5626 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5627 		else
5628 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5629 	}
5630 
5631 	if (!disable_l1) {
5632 		if (rdev->family >= CHIP_BARTS)
5633 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5634 		else
5635 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5636 
5637 		if (!disable_plloff_in_l1) {
5638 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5639 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5640 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5641 			if (data != orig)
5642 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5643 
5644 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5645 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5646 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5647 			if (data != orig)
5648 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5649 
5650 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5651 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5652 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5653 			if (data != orig)
5654 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5655 
5656 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5657 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5658 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5659 			if (data != orig)
5660 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5661 
5662 			if (rdev->family >= CHIP_BARTS) {
5663 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5664 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5665 				data |= PLL_RAMP_UP_TIME_0(4);
5666 				if (data != orig)
5667 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5668 
5669 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5670 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5671 				data |= PLL_RAMP_UP_TIME_1(4);
5672 				if (data != orig)
5673 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5674 
5675 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5676 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5677 				data |= PLL_RAMP_UP_TIME_0(4);
5678 				if (data != orig)
5679 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5680 
5681 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5682 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5683 				data |= PLL_RAMP_UP_TIME_1(4);
5684 				if (data != orig)
5685 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5686 			}
5687 
5688 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5689 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5690 			data |= LC_DYN_LANES_PWR_STATE(3);
5691 			if (data != orig)
5692 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5693 
5694 			if (rdev->family >= CHIP_BARTS) {
5695 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5696 				data &= ~LS2_EXIT_TIME_MASK;
5697 				data |= LS2_EXIT_TIME(1);
5698 				if (data != orig)
5699 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5700 
5701 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5702 				data &= ~LS2_EXIT_TIME_MASK;
5703 				data |= LS2_EXIT_TIME(1);
5704 				if (data != orig)
5705 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5706 			}
5707 		}
5708 	}
5709 
5710 	/* evergreen parts only */
5711 	if (rdev->family < CHIP_BARTS)
5712 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5713 
5714 	if (pcie_lc_cntl != pcie_lc_cntl_old)
5715 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5716 }
5717