xref: /dragonfly/sys/dev/drm/radeon/evergreen.c (revision b5302a4e)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <drm/drmP.h>
26 #include "radeon.h"
27 #include "radeon_asic.h"
28 #include <uapi_drm/radeon_drm.h>
29 #include "evergreend.h"
30 #include "atom.h"
31 #include "avivod.h"
32 #include "evergreen_reg.h"
33 #include "evergreen_blit_shaders.h"
34 #include "radeon_ucode.h"
35 
36 static const u32 crtc_offsets[6] =
37 {
38 	EVERGREEN_CRTC0_REGISTER_OFFSET,
39 	EVERGREEN_CRTC1_REGISTER_OFFSET,
40 	EVERGREEN_CRTC2_REGISTER_OFFSET,
41 	EVERGREEN_CRTC3_REGISTER_OFFSET,
42 	EVERGREEN_CRTC4_REGISTER_OFFSET,
43 	EVERGREEN_CRTC5_REGISTER_OFFSET
44 };
45 
46 #include "clearstate_evergreen.h"
47 
48 static const u32 sumo_rlc_save_restore_register_list[] =
49 {
50 	0x98fc,
51 	0x9830,
52 	0x9834,
53 	0x9838,
54 	0x9870,
55 	0x9874,
56 	0x8a14,
57 	0x8b24,
58 	0x8bcc,
59 	0x8b10,
60 	0x8d00,
61 	0x8d04,
62 	0x8c00,
63 	0x8c04,
64 	0x8c08,
65 	0x8c0c,
66 	0x8d8c,
67 	0x8c20,
68 	0x8c24,
69 	0x8c28,
70 	0x8c18,
71 	0x8c1c,
72 	0x8cf0,
73 	0x8e2c,
74 	0x8e38,
75 	0x8c30,
76 	0x9508,
77 	0x9688,
78 	0x9608,
79 	0x960c,
80 	0x9610,
81 	0x9614,
82 	0x88c4,
83 	0x88d4,
84 	0xa008,
85 	0x900c,
86 	0x9100,
87 	0x913c,
88 	0x98f8,
89 	0x98f4,
90 	0x9b7c,
91 	0x3f8c,
92 	0x8950,
93 	0x8954,
94 	0x8a18,
95 	0x8b28,
96 	0x9144,
97 	0x9148,
98 	0x914c,
99 	0x3f90,
100 	0x3f94,
101 	0x915c,
102 	0x9160,
103 	0x9178,
104 	0x917c,
105 	0x9180,
106 	0x918c,
107 	0x9190,
108 	0x9194,
109 	0x9198,
110 	0x919c,
111 	0x91a8,
112 	0x91ac,
113 	0x91b0,
114 	0x91b4,
115 	0x91b8,
116 	0x91c4,
117 	0x91c8,
118 	0x91cc,
119 	0x91d0,
120 	0x91d4,
121 	0x91e0,
122 	0x91e4,
123 	0x91ec,
124 	0x91f0,
125 	0x91f4,
126 	0x9200,
127 	0x9204,
128 	0x929c,
129 	0x9150,
130 	0x802c,
131 };
132 
133 static void evergreen_gpu_init(struct radeon_device *rdev);
134 
135 static const u32 evergreen_golden_registers[] =
136 {
137 	0x3f90, 0xffff0000, 0xff000000,
138 	0x9148, 0xffff0000, 0xff000000,
139 	0x3f94, 0xffff0000, 0xff000000,
140 	0x914c, 0xffff0000, 0xff000000,
141 	0x9b7c, 0xffffffff, 0x00000000,
142 	0x8a14, 0xffffffff, 0x00000007,
143 	0x8b10, 0xffffffff, 0x00000000,
144 	0x960c, 0xffffffff, 0x54763210,
145 	0x88c4, 0xffffffff, 0x000000c2,
146 	0x88d4, 0xffffffff, 0x00000010,
147 	0x8974, 0xffffffff, 0x00000000,
148 	0xc78, 0x00000080, 0x00000080,
149 	0x5eb4, 0xffffffff, 0x00000002,
150 	0x5e78, 0xffffffff, 0x001000f0,
151 	0x6104, 0x01000300, 0x00000000,
152 	0x5bc0, 0x00300000, 0x00000000,
153 	0x7030, 0xffffffff, 0x00000011,
154 	0x7c30, 0xffffffff, 0x00000011,
155 	0x10830, 0xffffffff, 0x00000011,
156 	0x11430, 0xffffffff, 0x00000011,
157 	0x12030, 0xffffffff, 0x00000011,
158 	0x12c30, 0xffffffff, 0x00000011,
159 	0xd02c, 0xffffffff, 0x08421000,
160 	0x240c, 0xffffffff, 0x00000380,
161 	0x8b24, 0xffffffff, 0x00ff0fff,
162 	0x28a4c, 0x06000000, 0x06000000,
163 	0x10c, 0x00000001, 0x00000001,
164 	0x8d00, 0xffffffff, 0x100e4848,
165 	0x8d04, 0xffffffff, 0x00164745,
166 	0x8c00, 0xffffffff, 0xe4000003,
167 	0x8c04, 0xffffffff, 0x40600060,
168 	0x8c08, 0xffffffff, 0x001c001c,
169 	0x8cf0, 0xffffffff, 0x08e00620,
170 	0x8c20, 0xffffffff, 0x00800080,
171 	0x8c24, 0xffffffff, 0x00800080,
172 	0x8c18, 0xffffffff, 0x20202078,
173 	0x8c1c, 0xffffffff, 0x00001010,
174 	0x28350, 0xffffffff, 0x00000000,
175 	0xa008, 0xffffffff, 0x00010000,
176 	0x5c4, 0xffffffff, 0x00000001,
177 	0x9508, 0xffffffff, 0x00000002,
178 	0x913c, 0x0000000f, 0x0000000a
179 };
180 
181 static const u32 evergreen_golden_registers2[] =
182 {
183 	0x2f4c, 0xffffffff, 0x00000000,
184 	0x54f4, 0xffffffff, 0x00000000,
185 	0x54f0, 0xffffffff, 0x00000000,
186 	0x5498, 0xffffffff, 0x00000000,
187 	0x549c, 0xffffffff, 0x00000000,
188 	0x5494, 0xffffffff, 0x00000000,
189 	0x53cc, 0xffffffff, 0x00000000,
190 	0x53c8, 0xffffffff, 0x00000000,
191 	0x53c4, 0xffffffff, 0x00000000,
192 	0x53c0, 0xffffffff, 0x00000000,
193 	0x53bc, 0xffffffff, 0x00000000,
194 	0x53b8, 0xffffffff, 0x00000000,
195 	0x53b4, 0xffffffff, 0x00000000,
196 	0x53b0, 0xffffffff, 0x00000000
197 };
198 
199 static const u32 cypress_mgcg_init[] =
200 {
201 	0x802c, 0xffffffff, 0xc0000000,
202 	0x5448, 0xffffffff, 0x00000100,
203 	0x55e4, 0xffffffff, 0x00000100,
204 	0x160c, 0xffffffff, 0x00000100,
205 	0x5644, 0xffffffff, 0x00000100,
206 	0xc164, 0xffffffff, 0x00000100,
207 	0x8a18, 0xffffffff, 0x00000100,
208 	0x897c, 0xffffffff, 0x06000100,
209 	0x8b28, 0xffffffff, 0x00000100,
210 	0x9144, 0xffffffff, 0x00000100,
211 	0x9a60, 0xffffffff, 0x00000100,
212 	0x9868, 0xffffffff, 0x00000100,
213 	0x8d58, 0xffffffff, 0x00000100,
214 	0x9510, 0xffffffff, 0x00000100,
215 	0x949c, 0xffffffff, 0x00000100,
216 	0x9654, 0xffffffff, 0x00000100,
217 	0x9030, 0xffffffff, 0x00000100,
218 	0x9034, 0xffffffff, 0x00000100,
219 	0x9038, 0xffffffff, 0x00000100,
220 	0x903c, 0xffffffff, 0x00000100,
221 	0x9040, 0xffffffff, 0x00000100,
222 	0xa200, 0xffffffff, 0x00000100,
223 	0xa204, 0xffffffff, 0x00000100,
224 	0xa208, 0xffffffff, 0x00000100,
225 	0xa20c, 0xffffffff, 0x00000100,
226 	0x971c, 0xffffffff, 0x00000100,
227 	0x977c, 0xffffffff, 0x00000100,
228 	0x3f80, 0xffffffff, 0x00000100,
229 	0xa210, 0xffffffff, 0x00000100,
230 	0xa214, 0xffffffff, 0x00000100,
231 	0x4d8, 0xffffffff, 0x00000100,
232 	0x9784, 0xffffffff, 0x00000100,
233 	0x9698, 0xffffffff, 0x00000100,
234 	0x4d4, 0xffffffff, 0x00000200,
235 	0x30cc, 0xffffffff, 0x00000100,
236 	0xd0c0, 0xffffffff, 0xff000100,
237 	0x802c, 0xffffffff, 0x40000000,
238 	0x915c, 0xffffffff, 0x00010000,
239 	0x9160, 0xffffffff, 0x00030002,
240 	0x9178, 0xffffffff, 0x00070000,
241 	0x917c, 0xffffffff, 0x00030002,
242 	0x9180, 0xffffffff, 0x00050004,
243 	0x918c, 0xffffffff, 0x00010006,
244 	0x9190, 0xffffffff, 0x00090008,
245 	0x9194, 0xffffffff, 0x00070000,
246 	0x9198, 0xffffffff, 0x00030002,
247 	0x919c, 0xffffffff, 0x00050004,
248 	0x91a8, 0xffffffff, 0x00010006,
249 	0x91ac, 0xffffffff, 0x00090008,
250 	0x91b0, 0xffffffff, 0x00070000,
251 	0x91b4, 0xffffffff, 0x00030002,
252 	0x91b8, 0xffffffff, 0x00050004,
253 	0x91c4, 0xffffffff, 0x00010006,
254 	0x91c8, 0xffffffff, 0x00090008,
255 	0x91cc, 0xffffffff, 0x00070000,
256 	0x91d0, 0xffffffff, 0x00030002,
257 	0x91d4, 0xffffffff, 0x00050004,
258 	0x91e0, 0xffffffff, 0x00010006,
259 	0x91e4, 0xffffffff, 0x00090008,
260 	0x91e8, 0xffffffff, 0x00000000,
261 	0x91ec, 0xffffffff, 0x00070000,
262 	0x91f0, 0xffffffff, 0x00030002,
263 	0x91f4, 0xffffffff, 0x00050004,
264 	0x9200, 0xffffffff, 0x00010006,
265 	0x9204, 0xffffffff, 0x00090008,
266 	0x9208, 0xffffffff, 0x00070000,
267 	0x920c, 0xffffffff, 0x00030002,
268 	0x9210, 0xffffffff, 0x00050004,
269 	0x921c, 0xffffffff, 0x00010006,
270 	0x9220, 0xffffffff, 0x00090008,
271 	0x9224, 0xffffffff, 0x00070000,
272 	0x9228, 0xffffffff, 0x00030002,
273 	0x922c, 0xffffffff, 0x00050004,
274 	0x9238, 0xffffffff, 0x00010006,
275 	0x923c, 0xffffffff, 0x00090008,
276 	0x9240, 0xffffffff, 0x00070000,
277 	0x9244, 0xffffffff, 0x00030002,
278 	0x9248, 0xffffffff, 0x00050004,
279 	0x9254, 0xffffffff, 0x00010006,
280 	0x9258, 0xffffffff, 0x00090008,
281 	0x925c, 0xffffffff, 0x00070000,
282 	0x9260, 0xffffffff, 0x00030002,
283 	0x9264, 0xffffffff, 0x00050004,
284 	0x9270, 0xffffffff, 0x00010006,
285 	0x9274, 0xffffffff, 0x00090008,
286 	0x9278, 0xffffffff, 0x00070000,
287 	0x927c, 0xffffffff, 0x00030002,
288 	0x9280, 0xffffffff, 0x00050004,
289 	0x928c, 0xffffffff, 0x00010006,
290 	0x9290, 0xffffffff, 0x00090008,
291 	0x9294, 0xffffffff, 0x00000000,
292 	0x929c, 0xffffffff, 0x00000001,
293 	0x802c, 0xffffffff, 0x40010000,
294 	0x915c, 0xffffffff, 0x00010000,
295 	0x9160, 0xffffffff, 0x00030002,
296 	0x9178, 0xffffffff, 0x00070000,
297 	0x917c, 0xffffffff, 0x00030002,
298 	0x9180, 0xffffffff, 0x00050004,
299 	0x918c, 0xffffffff, 0x00010006,
300 	0x9190, 0xffffffff, 0x00090008,
301 	0x9194, 0xffffffff, 0x00070000,
302 	0x9198, 0xffffffff, 0x00030002,
303 	0x919c, 0xffffffff, 0x00050004,
304 	0x91a8, 0xffffffff, 0x00010006,
305 	0x91ac, 0xffffffff, 0x00090008,
306 	0x91b0, 0xffffffff, 0x00070000,
307 	0x91b4, 0xffffffff, 0x00030002,
308 	0x91b8, 0xffffffff, 0x00050004,
309 	0x91c4, 0xffffffff, 0x00010006,
310 	0x91c8, 0xffffffff, 0x00090008,
311 	0x91cc, 0xffffffff, 0x00070000,
312 	0x91d0, 0xffffffff, 0x00030002,
313 	0x91d4, 0xffffffff, 0x00050004,
314 	0x91e0, 0xffffffff, 0x00010006,
315 	0x91e4, 0xffffffff, 0x00090008,
316 	0x91e8, 0xffffffff, 0x00000000,
317 	0x91ec, 0xffffffff, 0x00070000,
318 	0x91f0, 0xffffffff, 0x00030002,
319 	0x91f4, 0xffffffff, 0x00050004,
320 	0x9200, 0xffffffff, 0x00010006,
321 	0x9204, 0xffffffff, 0x00090008,
322 	0x9208, 0xffffffff, 0x00070000,
323 	0x920c, 0xffffffff, 0x00030002,
324 	0x9210, 0xffffffff, 0x00050004,
325 	0x921c, 0xffffffff, 0x00010006,
326 	0x9220, 0xffffffff, 0x00090008,
327 	0x9224, 0xffffffff, 0x00070000,
328 	0x9228, 0xffffffff, 0x00030002,
329 	0x922c, 0xffffffff, 0x00050004,
330 	0x9238, 0xffffffff, 0x00010006,
331 	0x923c, 0xffffffff, 0x00090008,
332 	0x9240, 0xffffffff, 0x00070000,
333 	0x9244, 0xffffffff, 0x00030002,
334 	0x9248, 0xffffffff, 0x00050004,
335 	0x9254, 0xffffffff, 0x00010006,
336 	0x9258, 0xffffffff, 0x00090008,
337 	0x925c, 0xffffffff, 0x00070000,
338 	0x9260, 0xffffffff, 0x00030002,
339 	0x9264, 0xffffffff, 0x00050004,
340 	0x9270, 0xffffffff, 0x00010006,
341 	0x9274, 0xffffffff, 0x00090008,
342 	0x9278, 0xffffffff, 0x00070000,
343 	0x927c, 0xffffffff, 0x00030002,
344 	0x9280, 0xffffffff, 0x00050004,
345 	0x928c, 0xffffffff, 0x00010006,
346 	0x9290, 0xffffffff, 0x00090008,
347 	0x9294, 0xffffffff, 0x00000000,
348 	0x929c, 0xffffffff, 0x00000001,
349 	0x802c, 0xffffffff, 0xc0000000
350 };
351 
352 static const u32 redwood_mgcg_init[] =
353 {
354 	0x802c, 0xffffffff, 0xc0000000,
355 	0x5448, 0xffffffff, 0x00000100,
356 	0x55e4, 0xffffffff, 0x00000100,
357 	0x160c, 0xffffffff, 0x00000100,
358 	0x5644, 0xffffffff, 0x00000100,
359 	0xc164, 0xffffffff, 0x00000100,
360 	0x8a18, 0xffffffff, 0x00000100,
361 	0x897c, 0xffffffff, 0x06000100,
362 	0x8b28, 0xffffffff, 0x00000100,
363 	0x9144, 0xffffffff, 0x00000100,
364 	0x9a60, 0xffffffff, 0x00000100,
365 	0x9868, 0xffffffff, 0x00000100,
366 	0x8d58, 0xffffffff, 0x00000100,
367 	0x9510, 0xffffffff, 0x00000100,
368 	0x949c, 0xffffffff, 0x00000100,
369 	0x9654, 0xffffffff, 0x00000100,
370 	0x9030, 0xffffffff, 0x00000100,
371 	0x9034, 0xffffffff, 0x00000100,
372 	0x9038, 0xffffffff, 0x00000100,
373 	0x903c, 0xffffffff, 0x00000100,
374 	0x9040, 0xffffffff, 0x00000100,
375 	0xa200, 0xffffffff, 0x00000100,
376 	0xa204, 0xffffffff, 0x00000100,
377 	0xa208, 0xffffffff, 0x00000100,
378 	0xa20c, 0xffffffff, 0x00000100,
379 	0x971c, 0xffffffff, 0x00000100,
380 	0x977c, 0xffffffff, 0x00000100,
381 	0x3f80, 0xffffffff, 0x00000100,
382 	0xa210, 0xffffffff, 0x00000100,
383 	0xa214, 0xffffffff, 0x00000100,
384 	0x4d8, 0xffffffff, 0x00000100,
385 	0x9784, 0xffffffff, 0x00000100,
386 	0x9698, 0xffffffff, 0x00000100,
387 	0x4d4, 0xffffffff, 0x00000200,
388 	0x30cc, 0xffffffff, 0x00000100,
389 	0xd0c0, 0xffffffff, 0xff000100,
390 	0x802c, 0xffffffff, 0x40000000,
391 	0x915c, 0xffffffff, 0x00010000,
392 	0x9160, 0xffffffff, 0x00030002,
393 	0x9178, 0xffffffff, 0x00070000,
394 	0x917c, 0xffffffff, 0x00030002,
395 	0x9180, 0xffffffff, 0x00050004,
396 	0x918c, 0xffffffff, 0x00010006,
397 	0x9190, 0xffffffff, 0x00090008,
398 	0x9194, 0xffffffff, 0x00070000,
399 	0x9198, 0xffffffff, 0x00030002,
400 	0x919c, 0xffffffff, 0x00050004,
401 	0x91a8, 0xffffffff, 0x00010006,
402 	0x91ac, 0xffffffff, 0x00090008,
403 	0x91b0, 0xffffffff, 0x00070000,
404 	0x91b4, 0xffffffff, 0x00030002,
405 	0x91b8, 0xffffffff, 0x00050004,
406 	0x91c4, 0xffffffff, 0x00010006,
407 	0x91c8, 0xffffffff, 0x00090008,
408 	0x91cc, 0xffffffff, 0x00070000,
409 	0x91d0, 0xffffffff, 0x00030002,
410 	0x91d4, 0xffffffff, 0x00050004,
411 	0x91e0, 0xffffffff, 0x00010006,
412 	0x91e4, 0xffffffff, 0x00090008,
413 	0x91e8, 0xffffffff, 0x00000000,
414 	0x91ec, 0xffffffff, 0x00070000,
415 	0x91f0, 0xffffffff, 0x00030002,
416 	0x91f4, 0xffffffff, 0x00050004,
417 	0x9200, 0xffffffff, 0x00010006,
418 	0x9204, 0xffffffff, 0x00090008,
419 	0x9294, 0xffffffff, 0x00000000,
420 	0x929c, 0xffffffff, 0x00000001,
421 	0x802c, 0xffffffff, 0xc0000000
422 };
423 
424 static const u32 cedar_golden_registers[] =
425 {
426 	0x3f90, 0xffff0000, 0xff000000,
427 	0x9148, 0xffff0000, 0xff000000,
428 	0x3f94, 0xffff0000, 0xff000000,
429 	0x914c, 0xffff0000, 0xff000000,
430 	0x9b7c, 0xffffffff, 0x00000000,
431 	0x8a14, 0xffffffff, 0x00000007,
432 	0x8b10, 0xffffffff, 0x00000000,
433 	0x960c, 0xffffffff, 0x54763210,
434 	0x88c4, 0xffffffff, 0x000000c2,
435 	0x88d4, 0xffffffff, 0x00000000,
436 	0x8974, 0xffffffff, 0x00000000,
437 	0xc78, 0x00000080, 0x00000080,
438 	0x5eb4, 0xffffffff, 0x00000002,
439 	0x5e78, 0xffffffff, 0x001000f0,
440 	0x6104, 0x01000300, 0x00000000,
441 	0x5bc0, 0x00300000, 0x00000000,
442 	0x7030, 0xffffffff, 0x00000011,
443 	0x7c30, 0xffffffff, 0x00000011,
444 	0x10830, 0xffffffff, 0x00000011,
445 	0x11430, 0xffffffff, 0x00000011,
446 	0xd02c, 0xffffffff, 0x08421000,
447 	0x240c, 0xffffffff, 0x00000380,
448 	0x8b24, 0xffffffff, 0x00ff0fff,
449 	0x28a4c, 0x06000000, 0x06000000,
450 	0x10c, 0x00000001, 0x00000001,
451 	0x8d00, 0xffffffff, 0x100e4848,
452 	0x8d04, 0xffffffff, 0x00164745,
453 	0x8c00, 0xffffffff, 0xe4000003,
454 	0x8c04, 0xffffffff, 0x40600060,
455 	0x8c08, 0xffffffff, 0x001c001c,
456 	0x8cf0, 0xffffffff, 0x08e00410,
457 	0x8c20, 0xffffffff, 0x00800080,
458 	0x8c24, 0xffffffff, 0x00800080,
459 	0x8c18, 0xffffffff, 0x20202078,
460 	0x8c1c, 0xffffffff, 0x00001010,
461 	0x28350, 0xffffffff, 0x00000000,
462 	0xa008, 0xffffffff, 0x00010000,
463 	0x5c4, 0xffffffff, 0x00000001,
464 	0x9508, 0xffffffff, 0x00000002
465 };
466 
467 static const u32 cedar_mgcg_init[] =
468 {
469 	0x802c, 0xffffffff, 0xc0000000,
470 	0x5448, 0xffffffff, 0x00000100,
471 	0x55e4, 0xffffffff, 0x00000100,
472 	0x160c, 0xffffffff, 0x00000100,
473 	0x5644, 0xffffffff, 0x00000100,
474 	0xc164, 0xffffffff, 0x00000100,
475 	0x8a18, 0xffffffff, 0x00000100,
476 	0x897c, 0xffffffff, 0x06000100,
477 	0x8b28, 0xffffffff, 0x00000100,
478 	0x9144, 0xffffffff, 0x00000100,
479 	0x9a60, 0xffffffff, 0x00000100,
480 	0x9868, 0xffffffff, 0x00000100,
481 	0x8d58, 0xffffffff, 0x00000100,
482 	0x9510, 0xffffffff, 0x00000100,
483 	0x949c, 0xffffffff, 0x00000100,
484 	0x9654, 0xffffffff, 0x00000100,
485 	0x9030, 0xffffffff, 0x00000100,
486 	0x9034, 0xffffffff, 0x00000100,
487 	0x9038, 0xffffffff, 0x00000100,
488 	0x903c, 0xffffffff, 0x00000100,
489 	0x9040, 0xffffffff, 0x00000100,
490 	0xa200, 0xffffffff, 0x00000100,
491 	0xa204, 0xffffffff, 0x00000100,
492 	0xa208, 0xffffffff, 0x00000100,
493 	0xa20c, 0xffffffff, 0x00000100,
494 	0x971c, 0xffffffff, 0x00000100,
495 	0x977c, 0xffffffff, 0x00000100,
496 	0x3f80, 0xffffffff, 0x00000100,
497 	0xa210, 0xffffffff, 0x00000100,
498 	0xa214, 0xffffffff, 0x00000100,
499 	0x4d8, 0xffffffff, 0x00000100,
500 	0x9784, 0xffffffff, 0x00000100,
501 	0x9698, 0xffffffff, 0x00000100,
502 	0x4d4, 0xffffffff, 0x00000200,
503 	0x30cc, 0xffffffff, 0x00000100,
504 	0xd0c0, 0xffffffff, 0xff000100,
505 	0x802c, 0xffffffff, 0x40000000,
506 	0x915c, 0xffffffff, 0x00010000,
507 	0x9178, 0xffffffff, 0x00050000,
508 	0x917c, 0xffffffff, 0x00030002,
509 	0x918c, 0xffffffff, 0x00010004,
510 	0x9190, 0xffffffff, 0x00070006,
511 	0x9194, 0xffffffff, 0x00050000,
512 	0x9198, 0xffffffff, 0x00030002,
513 	0x91a8, 0xffffffff, 0x00010004,
514 	0x91ac, 0xffffffff, 0x00070006,
515 	0x91e8, 0xffffffff, 0x00000000,
516 	0x9294, 0xffffffff, 0x00000000,
517 	0x929c, 0xffffffff, 0x00000001,
518 	0x802c, 0xffffffff, 0xc0000000
519 };
520 
521 static const u32 juniper_mgcg_init[] =
522 {
523 	0x802c, 0xffffffff, 0xc0000000,
524 	0x5448, 0xffffffff, 0x00000100,
525 	0x55e4, 0xffffffff, 0x00000100,
526 	0x160c, 0xffffffff, 0x00000100,
527 	0x5644, 0xffffffff, 0x00000100,
528 	0xc164, 0xffffffff, 0x00000100,
529 	0x8a18, 0xffffffff, 0x00000100,
530 	0x897c, 0xffffffff, 0x06000100,
531 	0x8b28, 0xffffffff, 0x00000100,
532 	0x9144, 0xffffffff, 0x00000100,
533 	0x9a60, 0xffffffff, 0x00000100,
534 	0x9868, 0xffffffff, 0x00000100,
535 	0x8d58, 0xffffffff, 0x00000100,
536 	0x9510, 0xffffffff, 0x00000100,
537 	0x949c, 0xffffffff, 0x00000100,
538 	0x9654, 0xffffffff, 0x00000100,
539 	0x9030, 0xffffffff, 0x00000100,
540 	0x9034, 0xffffffff, 0x00000100,
541 	0x9038, 0xffffffff, 0x00000100,
542 	0x903c, 0xffffffff, 0x00000100,
543 	0x9040, 0xffffffff, 0x00000100,
544 	0xa200, 0xffffffff, 0x00000100,
545 	0xa204, 0xffffffff, 0x00000100,
546 	0xa208, 0xffffffff, 0x00000100,
547 	0xa20c, 0xffffffff, 0x00000100,
548 	0x971c, 0xffffffff, 0x00000100,
549 	0xd0c0, 0xffffffff, 0xff000100,
550 	0x802c, 0xffffffff, 0x40000000,
551 	0x915c, 0xffffffff, 0x00010000,
552 	0x9160, 0xffffffff, 0x00030002,
553 	0x9178, 0xffffffff, 0x00070000,
554 	0x917c, 0xffffffff, 0x00030002,
555 	0x9180, 0xffffffff, 0x00050004,
556 	0x918c, 0xffffffff, 0x00010006,
557 	0x9190, 0xffffffff, 0x00090008,
558 	0x9194, 0xffffffff, 0x00070000,
559 	0x9198, 0xffffffff, 0x00030002,
560 	0x919c, 0xffffffff, 0x00050004,
561 	0x91a8, 0xffffffff, 0x00010006,
562 	0x91ac, 0xffffffff, 0x00090008,
563 	0x91b0, 0xffffffff, 0x00070000,
564 	0x91b4, 0xffffffff, 0x00030002,
565 	0x91b8, 0xffffffff, 0x00050004,
566 	0x91c4, 0xffffffff, 0x00010006,
567 	0x91c8, 0xffffffff, 0x00090008,
568 	0x91cc, 0xffffffff, 0x00070000,
569 	0x91d0, 0xffffffff, 0x00030002,
570 	0x91d4, 0xffffffff, 0x00050004,
571 	0x91e0, 0xffffffff, 0x00010006,
572 	0x91e4, 0xffffffff, 0x00090008,
573 	0x91e8, 0xffffffff, 0x00000000,
574 	0x91ec, 0xffffffff, 0x00070000,
575 	0x91f0, 0xffffffff, 0x00030002,
576 	0x91f4, 0xffffffff, 0x00050004,
577 	0x9200, 0xffffffff, 0x00010006,
578 	0x9204, 0xffffffff, 0x00090008,
579 	0x9208, 0xffffffff, 0x00070000,
580 	0x920c, 0xffffffff, 0x00030002,
581 	0x9210, 0xffffffff, 0x00050004,
582 	0x921c, 0xffffffff, 0x00010006,
583 	0x9220, 0xffffffff, 0x00090008,
584 	0x9224, 0xffffffff, 0x00070000,
585 	0x9228, 0xffffffff, 0x00030002,
586 	0x922c, 0xffffffff, 0x00050004,
587 	0x9238, 0xffffffff, 0x00010006,
588 	0x923c, 0xffffffff, 0x00090008,
589 	0x9240, 0xffffffff, 0x00070000,
590 	0x9244, 0xffffffff, 0x00030002,
591 	0x9248, 0xffffffff, 0x00050004,
592 	0x9254, 0xffffffff, 0x00010006,
593 	0x9258, 0xffffffff, 0x00090008,
594 	0x925c, 0xffffffff, 0x00070000,
595 	0x9260, 0xffffffff, 0x00030002,
596 	0x9264, 0xffffffff, 0x00050004,
597 	0x9270, 0xffffffff, 0x00010006,
598 	0x9274, 0xffffffff, 0x00090008,
599 	0x9278, 0xffffffff, 0x00070000,
600 	0x927c, 0xffffffff, 0x00030002,
601 	0x9280, 0xffffffff, 0x00050004,
602 	0x928c, 0xffffffff, 0x00010006,
603 	0x9290, 0xffffffff, 0x00090008,
604 	0x9294, 0xffffffff, 0x00000000,
605 	0x929c, 0xffffffff, 0x00000001,
606 	0x802c, 0xffffffff, 0xc0000000,
607 	0x977c, 0xffffffff, 0x00000100,
608 	0x3f80, 0xffffffff, 0x00000100,
609 	0xa210, 0xffffffff, 0x00000100,
610 	0xa214, 0xffffffff, 0x00000100,
611 	0x4d8, 0xffffffff, 0x00000100,
612 	0x9784, 0xffffffff, 0x00000100,
613 	0x9698, 0xffffffff, 0x00000100,
614 	0x4d4, 0xffffffff, 0x00000200,
615 	0x30cc, 0xffffffff, 0x00000100,
616 	0x802c, 0xffffffff, 0xc0000000
617 };
618 
619 static const u32 supersumo_golden_registers[] =
620 {
621 	0x5eb4, 0xffffffff, 0x00000002,
622 	0x5c4, 0xffffffff, 0x00000001,
623 	0x7030, 0xffffffff, 0x00000011,
624 	0x7c30, 0xffffffff, 0x00000011,
625 	0x6104, 0x01000300, 0x00000000,
626 	0x5bc0, 0x00300000, 0x00000000,
627 	0x8c04, 0xffffffff, 0x40600060,
628 	0x8c08, 0xffffffff, 0x001c001c,
629 	0x8c20, 0xffffffff, 0x00800080,
630 	0x8c24, 0xffffffff, 0x00800080,
631 	0x8c18, 0xffffffff, 0x20202078,
632 	0x8c1c, 0xffffffff, 0x00001010,
633 	0x918c, 0xffffffff, 0x00010006,
634 	0x91a8, 0xffffffff, 0x00010006,
635 	0x91c4, 0xffffffff, 0x00010006,
636 	0x91e0, 0xffffffff, 0x00010006,
637 	0x9200, 0xffffffff, 0x00010006,
638 	0x9150, 0xffffffff, 0x6e944040,
639 	0x917c, 0xffffffff, 0x00030002,
640 	0x9180, 0xffffffff, 0x00050004,
641 	0x9198, 0xffffffff, 0x00030002,
642 	0x919c, 0xffffffff, 0x00050004,
643 	0x91b4, 0xffffffff, 0x00030002,
644 	0x91b8, 0xffffffff, 0x00050004,
645 	0x91d0, 0xffffffff, 0x00030002,
646 	0x91d4, 0xffffffff, 0x00050004,
647 	0x91f0, 0xffffffff, 0x00030002,
648 	0x91f4, 0xffffffff, 0x00050004,
649 	0x915c, 0xffffffff, 0x00010000,
650 	0x9160, 0xffffffff, 0x00030002,
651 	0x3f90, 0xffff0000, 0xff000000,
652 	0x9178, 0xffffffff, 0x00070000,
653 	0x9194, 0xffffffff, 0x00070000,
654 	0x91b0, 0xffffffff, 0x00070000,
655 	0x91cc, 0xffffffff, 0x00070000,
656 	0x91ec, 0xffffffff, 0x00070000,
657 	0x9148, 0xffff0000, 0xff000000,
658 	0x9190, 0xffffffff, 0x00090008,
659 	0x91ac, 0xffffffff, 0x00090008,
660 	0x91c8, 0xffffffff, 0x00090008,
661 	0x91e4, 0xffffffff, 0x00090008,
662 	0x9204, 0xffffffff, 0x00090008,
663 	0x3f94, 0xffff0000, 0xff000000,
664 	0x914c, 0xffff0000, 0xff000000,
665 	0x929c, 0xffffffff, 0x00000001,
666 	0x8a18, 0xffffffff, 0x00000100,
667 	0x8b28, 0xffffffff, 0x00000100,
668 	0x9144, 0xffffffff, 0x00000100,
669 	0x5644, 0xffffffff, 0x00000100,
670 	0x9b7c, 0xffffffff, 0x00000000,
671 	0x8030, 0xffffffff, 0x0000100a,
672 	0x8a14, 0xffffffff, 0x00000007,
673 	0x8b24, 0xffffffff, 0x00ff0fff,
674 	0x8b10, 0xffffffff, 0x00000000,
675 	0x28a4c, 0x06000000, 0x06000000,
676 	0x4d8, 0xffffffff, 0x00000100,
677 	0x913c, 0xffff000f, 0x0100000a,
678 	0x960c, 0xffffffff, 0x54763210,
679 	0x88c4, 0xffffffff, 0x000000c2,
680 	0x88d4, 0xffffffff, 0x00000010,
681 	0x8974, 0xffffffff, 0x00000000,
682 	0xc78, 0x00000080, 0x00000080,
683 	0x5e78, 0xffffffff, 0x001000f0,
684 	0xd02c, 0xffffffff, 0x08421000,
685 	0xa008, 0xffffffff, 0x00010000,
686 	0x8d00, 0xffffffff, 0x100e4848,
687 	0x8d04, 0xffffffff, 0x00164745,
688 	0x8c00, 0xffffffff, 0xe4000003,
689 	0x8cf0, 0x1fffffff, 0x08e00620,
690 	0x28350, 0xffffffff, 0x00000000,
691 	0x9508, 0xffffffff, 0x00000002
692 };
693 
694 static const u32 sumo_golden_registers[] =
695 {
696 	0x900c, 0x00ffffff, 0x0017071f,
697 	0x8c18, 0xffffffff, 0x10101060,
698 	0x8c1c, 0xffffffff, 0x00001010,
699 	0x8c30, 0x0000000f, 0x00000005,
700 	0x9688, 0x0000000f, 0x00000007
701 };
702 
703 static const u32 wrestler_golden_registers[] =
704 {
705 	0x5eb4, 0xffffffff, 0x00000002,
706 	0x5c4, 0xffffffff, 0x00000001,
707 	0x7030, 0xffffffff, 0x00000011,
708 	0x7c30, 0xffffffff, 0x00000011,
709 	0x6104, 0x01000300, 0x00000000,
710 	0x5bc0, 0x00300000, 0x00000000,
711 	0x918c, 0xffffffff, 0x00010006,
712 	0x91a8, 0xffffffff, 0x00010006,
713 	0x9150, 0xffffffff, 0x6e944040,
714 	0x917c, 0xffffffff, 0x00030002,
715 	0x9198, 0xffffffff, 0x00030002,
716 	0x915c, 0xffffffff, 0x00010000,
717 	0x3f90, 0xffff0000, 0xff000000,
718 	0x9178, 0xffffffff, 0x00070000,
719 	0x9194, 0xffffffff, 0x00070000,
720 	0x9148, 0xffff0000, 0xff000000,
721 	0x9190, 0xffffffff, 0x00090008,
722 	0x91ac, 0xffffffff, 0x00090008,
723 	0x3f94, 0xffff0000, 0xff000000,
724 	0x914c, 0xffff0000, 0xff000000,
725 	0x929c, 0xffffffff, 0x00000001,
726 	0x8a18, 0xffffffff, 0x00000100,
727 	0x8b28, 0xffffffff, 0x00000100,
728 	0x9144, 0xffffffff, 0x00000100,
729 	0x9b7c, 0xffffffff, 0x00000000,
730 	0x8030, 0xffffffff, 0x0000100a,
731 	0x8a14, 0xffffffff, 0x00000001,
732 	0x8b24, 0xffffffff, 0x00ff0fff,
733 	0x8b10, 0xffffffff, 0x00000000,
734 	0x28a4c, 0x06000000, 0x06000000,
735 	0x4d8, 0xffffffff, 0x00000100,
736 	0x913c, 0xffff000f, 0x0100000a,
737 	0x960c, 0xffffffff, 0x54763210,
738 	0x88c4, 0xffffffff, 0x000000c2,
739 	0x88d4, 0xffffffff, 0x00000010,
740 	0x8974, 0xffffffff, 0x00000000,
741 	0xc78, 0x00000080, 0x00000080,
742 	0x5e78, 0xffffffff, 0x001000f0,
743 	0xd02c, 0xffffffff, 0x08421000,
744 	0xa008, 0xffffffff, 0x00010000,
745 	0x8d00, 0xffffffff, 0x100e4848,
746 	0x8d04, 0xffffffff, 0x00164745,
747 	0x8c00, 0xffffffff, 0xe4000003,
748 	0x8cf0, 0x1fffffff, 0x08e00410,
749 	0x28350, 0xffffffff, 0x00000000,
750 	0x9508, 0xffffffff, 0x00000002,
751 	0x900c, 0xffffffff, 0x0017071f,
752 	0x8c18, 0xffffffff, 0x10101060,
753 	0x8c1c, 0xffffffff, 0x00001010
754 };
755 
756 static const u32 barts_golden_registers[] =
757 {
758 	0x5eb4, 0xffffffff, 0x00000002,
759 	0x5e78, 0x8f311ff1, 0x001000f0,
760 	0x3f90, 0xffff0000, 0xff000000,
761 	0x9148, 0xffff0000, 0xff000000,
762 	0x3f94, 0xffff0000, 0xff000000,
763 	0x914c, 0xffff0000, 0xff000000,
764 	0xc78, 0x00000080, 0x00000080,
765 	0xbd4, 0x70073777, 0x00010001,
766 	0xd02c, 0xbfffff1f, 0x08421000,
767 	0xd0b8, 0x03773777, 0x02011003,
768 	0x5bc0, 0x00200000, 0x50100000,
769 	0x98f8, 0x33773777, 0x02011003,
770 	0x98fc, 0xffffffff, 0x76543210,
771 	0x7030, 0x31000311, 0x00000011,
772 	0x2f48, 0x00000007, 0x02011003,
773 	0x6b28, 0x00000010, 0x00000012,
774 	0x7728, 0x00000010, 0x00000012,
775 	0x10328, 0x00000010, 0x00000012,
776 	0x10f28, 0x00000010, 0x00000012,
777 	0x11b28, 0x00000010, 0x00000012,
778 	0x12728, 0x00000010, 0x00000012,
779 	0x240c, 0x000007ff, 0x00000380,
780 	0x8a14, 0xf000001f, 0x00000007,
781 	0x8b24, 0x3fff3fff, 0x00ff0fff,
782 	0x8b10, 0x0000ff0f, 0x00000000,
783 	0x28a4c, 0x07ffffff, 0x06000000,
784 	0x10c, 0x00000001, 0x00010003,
785 	0xa02c, 0xffffffff, 0x0000009b,
786 	0x913c, 0x0000000f, 0x0100000a,
787 	0x8d00, 0xffff7f7f, 0x100e4848,
788 	0x8d04, 0x00ffffff, 0x00164745,
789 	0x8c00, 0xfffc0003, 0xe4000003,
790 	0x8c04, 0xf8ff00ff, 0x40600060,
791 	0x8c08, 0x00ff00ff, 0x001c001c,
792 	0x8cf0, 0x1fff1fff, 0x08e00620,
793 	0x8c20, 0x0fff0fff, 0x00800080,
794 	0x8c24, 0x0fff0fff, 0x00800080,
795 	0x8c18, 0xffffffff, 0x20202078,
796 	0x8c1c, 0x0000ffff, 0x00001010,
797 	0x28350, 0x00000f01, 0x00000000,
798 	0x9508, 0x3700001f, 0x00000002,
799 	0x960c, 0xffffffff, 0x54763210,
800 	0x88c4, 0x001f3ae3, 0x000000c2,
801 	0x88d4, 0x0000001f, 0x00000010,
802 	0x8974, 0xffffffff, 0x00000000
803 };
804 
805 static const u32 turks_golden_registers[] =
806 {
807 	0x5eb4, 0xffffffff, 0x00000002,
808 	0x5e78, 0x8f311ff1, 0x001000f0,
809 	0x8c8, 0x00003000, 0x00001070,
810 	0x8cc, 0x000fffff, 0x00040035,
811 	0x3f90, 0xffff0000, 0xfff00000,
812 	0x9148, 0xffff0000, 0xfff00000,
813 	0x3f94, 0xffff0000, 0xfff00000,
814 	0x914c, 0xffff0000, 0xfff00000,
815 	0xc78, 0x00000080, 0x00000080,
816 	0xbd4, 0x00073007, 0x00010002,
817 	0xd02c, 0xbfffff1f, 0x08421000,
818 	0xd0b8, 0x03773777, 0x02010002,
819 	0x5bc0, 0x00200000, 0x50100000,
820 	0x98f8, 0x33773777, 0x00010002,
821 	0x98fc, 0xffffffff, 0x33221100,
822 	0x7030, 0x31000311, 0x00000011,
823 	0x2f48, 0x33773777, 0x00010002,
824 	0x6b28, 0x00000010, 0x00000012,
825 	0x7728, 0x00000010, 0x00000012,
826 	0x10328, 0x00000010, 0x00000012,
827 	0x10f28, 0x00000010, 0x00000012,
828 	0x11b28, 0x00000010, 0x00000012,
829 	0x12728, 0x00000010, 0x00000012,
830 	0x240c, 0x000007ff, 0x00000380,
831 	0x8a14, 0xf000001f, 0x00000007,
832 	0x8b24, 0x3fff3fff, 0x00ff0fff,
833 	0x8b10, 0x0000ff0f, 0x00000000,
834 	0x28a4c, 0x07ffffff, 0x06000000,
835 	0x10c, 0x00000001, 0x00010003,
836 	0xa02c, 0xffffffff, 0x0000009b,
837 	0x913c, 0x0000000f, 0x0100000a,
838 	0x8d00, 0xffff7f7f, 0x100e4848,
839 	0x8d04, 0x00ffffff, 0x00164745,
840 	0x8c00, 0xfffc0003, 0xe4000003,
841 	0x8c04, 0xf8ff00ff, 0x40600060,
842 	0x8c08, 0x00ff00ff, 0x001c001c,
843 	0x8cf0, 0x1fff1fff, 0x08e00410,
844 	0x8c20, 0x0fff0fff, 0x00800080,
845 	0x8c24, 0x0fff0fff, 0x00800080,
846 	0x8c18, 0xffffffff, 0x20202078,
847 	0x8c1c, 0x0000ffff, 0x00001010,
848 	0x28350, 0x00000f01, 0x00000000,
849 	0x9508, 0x3700001f, 0x00000002,
850 	0x960c, 0xffffffff, 0x54763210,
851 	0x88c4, 0x001f3ae3, 0x000000c2,
852 	0x88d4, 0x0000001f, 0x00000010,
853 	0x8974, 0xffffffff, 0x00000000
854 };
855 
856 static const u32 caicos_golden_registers[] =
857 {
858 	0x5eb4, 0xffffffff, 0x00000002,
859 	0x5e78, 0x8f311ff1, 0x001000f0,
860 	0x8c8, 0x00003420, 0x00001450,
861 	0x8cc, 0x000fffff, 0x00040035,
862 	0x3f90, 0xffff0000, 0xfffc0000,
863 	0x9148, 0xffff0000, 0xfffc0000,
864 	0x3f94, 0xffff0000, 0xfffc0000,
865 	0x914c, 0xffff0000, 0xfffc0000,
866 	0xc78, 0x00000080, 0x00000080,
867 	0xbd4, 0x00073007, 0x00010001,
868 	0xd02c, 0xbfffff1f, 0x08421000,
869 	0xd0b8, 0x03773777, 0x02010001,
870 	0x5bc0, 0x00200000, 0x50100000,
871 	0x98f8, 0x33773777, 0x02010001,
872 	0x98fc, 0xffffffff, 0x33221100,
873 	0x7030, 0x31000311, 0x00000011,
874 	0x2f48, 0x33773777, 0x02010001,
875 	0x6b28, 0x00000010, 0x00000012,
876 	0x7728, 0x00000010, 0x00000012,
877 	0x10328, 0x00000010, 0x00000012,
878 	0x10f28, 0x00000010, 0x00000012,
879 	0x11b28, 0x00000010, 0x00000012,
880 	0x12728, 0x00000010, 0x00000012,
881 	0x240c, 0x000007ff, 0x00000380,
882 	0x8a14, 0xf000001f, 0x00000001,
883 	0x8b24, 0x3fff3fff, 0x00ff0fff,
884 	0x8b10, 0x0000ff0f, 0x00000000,
885 	0x28a4c, 0x07ffffff, 0x06000000,
886 	0x10c, 0x00000001, 0x00010003,
887 	0xa02c, 0xffffffff, 0x0000009b,
888 	0x913c, 0x0000000f, 0x0100000a,
889 	0x8d00, 0xffff7f7f, 0x100e4848,
890 	0x8d04, 0x00ffffff, 0x00164745,
891 	0x8c00, 0xfffc0003, 0xe4000003,
892 	0x8c04, 0xf8ff00ff, 0x40600060,
893 	0x8c08, 0x00ff00ff, 0x001c001c,
894 	0x8cf0, 0x1fff1fff, 0x08e00410,
895 	0x8c20, 0x0fff0fff, 0x00800080,
896 	0x8c24, 0x0fff0fff, 0x00800080,
897 	0x8c18, 0xffffffff, 0x20202078,
898 	0x8c1c, 0x0000ffff, 0x00001010,
899 	0x28350, 0x00000f01, 0x00000000,
900 	0x9508, 0x3700001f, 0x00000002,
901 	0x960c, 0xffffffff, 0x54763210,
902 	0x88c4, 0x001f3ae3, 0x000000c2,
903 	0x88d4, 0x0000001f, 0x00000010,
904 	0x8974, 0xffffffff, 0x00000000
905 };
906 
907 static void evergreen_init_golden_registers(struct radeon_device *rdev)
908 {
909 	switch (rdev->family) {
910 	case CHIP_CYPRESS:
911 	case CHIP_HEMLOCK:
912 		radeon_program_register_sequence(rdev,
913 						 evergreen_golden_registers,
914 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
915 		radeon_program_register_sequence(rdev,
916 						 evergreen_golden_registers2,
917 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
918 		radeon_program_register_sequence(rdev,
919 						 cypress_mgcg_init,
920 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
921 		break;
922 	case CHIP_JUNIPER:
923 		radeon_program_register_sequence(rdev,
924 						 evergreen_golden_registers,
925 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
926 		radeon_program_register_sequence(rdev,
927 						 evergreen_golden_registers2,
928 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
929 		radeon_program_register_sequence(rdev,
930 						 juniper_mgcg_init,
931 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
932 		break;
933 	case CHIP_REDWOOD:
934 		radeon_program_register_sequence(rdev,
935 						 evergreen_golden_registers,
936 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
937 		radeon_program_register_sequence(rdev,
938 						 evergreen_golden_registers2,
939 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
940 		radeon_program_register_sequence(rdev,
941 						 redwood_mgcg_init,
942 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
943 		break;
944 	case CHIP_CEDAR:
945 		radeon_program_register_sequence(rdev,
946 						 cedar_golden_registers,
947 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
948 		radeon_program_register_sequence(rdev,
949 						 evergreen_golden_registers2,
950 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
951 		radeon_program_register_sequence(rdev,
952 						 cedar_mgcg_init,
953 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
954 		break;
955 	case CHIP_PALM:
956 		radeon_program_register_sequence(rdev,
957 						 wrestler_golden_registers,
958 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
959 		break;
960 	case CHIP_SUMO:
961 		radeon_program_register_sequence(rdev,
962 						 supersumo_golden_registers,
963 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
964 		break;
965 	case CHIP_SUMO2:
966 		radeon_program_register_sequence(rdev,
967 						 supersumo_golden_registers,
968 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
969 		radeon_program_register_sequence(rdev,
970 						 sumo_golden_registers,
971 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
972 		break;
973 	case CHIP_BARTS:
974 		radeon_program_register_sequence(rdev,
975 						 barts_golden_registers,
976 						 (const u32)ARRAY_SIZE(barts_golden_registers));
977 		break;
978 	case CHIP_TURKS:
979 		radeon_program_register_sequence(rdev,
980 						 turks_golden_registers,
981 						 (const u32)ARRAY_SIZE(turks_golden_registers));
982 		break;
983 	case CHIP_CAICOS:
984 		radeon_program_register_sequence(rdev,
985 						 caicos_golden_registers,
986 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
987 		break;
988 	default:
989 		break;
990 	}
991 }
992 
993 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
994 			     unsigned *bankh, unsigned *mtaspect,
995 			     unsigned *tile_split)
996 {
997 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
998 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
999 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1000 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1001 	switch (*bankw) {
1002 	default:
1003 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1004 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1005 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1006 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1007 	}
1008 	switch (*bankh) {
1009 	default:
1010 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1011 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1012 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1013 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1014 	}
1015 	switch (*mtaspect) {
1016 	default:
1017 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1018 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1019 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1020 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1021 	}
1022 }
1023 
1024 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1025 			      u32 cntl_reg, u32 status_reg)
1026 {
1027 	int r, i;
1028 	struct atom_clock_dividers dividers;
1029 
1030         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1031 					   clock, false, &dividers);
1032 	if (r)
1033 		return r;
1034 
1035 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1036 
1037 	for (i = 0; i < 100; i++) {
1038 		if (RREG32(status_reg) & DCLK_STATUS)
1039 			break;
1040 		mdelay(10);
1041 	}
1042 	if (i == 100)
1043 		return -ETIMEDOUT;
1044 
1045 	return 0;
1046 }
1047 
1048 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1049 {
1050 	int r = 0;
1051 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1052 
1053 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1054 	if (r)
1055 		goto done;
1056 	cg_scratch &= 0xffff0000;
1057 	cg_scratch |= vclk / 100; /* Mhz */
1058 
1059 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1060 	if (r)
1061 		goto done;
1062 	cg_scratch &= 0x0000ffff;
1063 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1064 
1065 done:
1066 	WREG32(CG_SCRATCH1, cg_scratch);
1067 
1068 	return r;
1069 }
1070 
1071 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1072 {
1073 	/* start off with something large */
1074 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1075 	int r;
1076 
1077 	/* bypass vclk and dclk with bclk */
1078 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1079 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1080 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1081 
1082 	/* put PLL in bypass mode */
1083 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1084 
1085 	if (!vclk || !dclk) {
1086 		/* keep the Bypass mode, put PLL to sleep */
1087 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1088 		return 0;
1089 	}
1090 
1091 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1092 					  16384, 0x03FFFFFF, 0, 128, 5,
1093 					  &fb_div, &vclk_div, &dclk_div);
1094 	if (r)
1095 		return r;
1096 
1097 	/* set VCO_MODE to 1 */
1098 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1099 
1100 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1101 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1102 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1103 
1104 	/* deassert UPLL_RESET */
1105 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1106 
1107 	mdelay(1);
1108 
1109 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1110 	if (r)
1111 		return r;
1112 
1113 	/* assert UPLL_RESET again */
1114 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1115 
1116 	/* disable spread spectrum. */
1117 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1118 
1119 	/* set feedback divider */
1120 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1121 
1122 	/* set ref divider to 0 */
1123 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1124 
1125 	if (fb_div < 307200)
1126 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1127 	else
1128 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1129 
1130 	/* set PDIV_A and PDIV_B */
1131 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1132 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1133 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1134 
1135 	/* give the PLL some time to settle */
1136 	mdelay(15);
1137 
1138 	/* deassert PLL_RESET */
1139 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1140 
1141 	mdelay(15);
1142 
1143 	/* switch from bypass mode to normal mode */
1144 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1145 
1146 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1147 	if (r)
1148 		return r;
1149 
1150 	/* switch VCLK and DCLK selection */
1151 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1152 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1153 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1154 
1155 	mdelay(100);
1156 
1157 	return 0;
1158 }
1159 
1160 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1161 {
1162 	int readrq;
1163 	u16 v;
1164 
1165 	readrq = pcie_get_readrq(rdev->pdev);
1166 	v = ffs(readrq) - 8;
1167 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1168 	 * to avoid hangs or perfomance issues
1169 	 */
1170 	if ((v == 0) || (v == 6) || (v == 7))
1171 		pcie_set_readrq(rdev->pdev, 512);
1172 }
1173 
1174 void dce4_program_fmt(struct drm_encoder *encoder)
1175 {
1176 	struct drm_device *dev = encoder->dev;
1177 	struct radeon_device *rdev = dev->dev_private;
1178 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1179 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1180 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1181 	int bpc = 0;
1182 	u32 tmp = 0;
1183 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1184 
1185 	if (connector) {
1186 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1187 		bpc = radeon_get_monitor_bpc(connector);
1188 		dither = radeon_connector->dither;
1189 	}
1190 
1191 	/* LVDS/eDP FMT is set up by atom */
1192 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1193 		return;
1194 
1195 	/* not needed for analog */
1196 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1197 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1198 		return;
1199 
1200 	if (bpc == 0)
1201 		return;
1202 
1203 	switch (bpc) {
1204 	case 6:
1205 		if (dither == RADEON_FMT_DITHER_ENABLE)
1206 			/* XXX sort out optimal dither settings */
1207 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1208 				FMT_SPATIAL_DITHER_EN);
1209 		else
1210 			tmp |= FMT_TRUNCATE_EN;
1211 		break;
1212 	case 8:
1213 		if (dither == RADEON_FMT_DITHER_ENABLE)
1214 			/* XXX sort out optimal dither settings */
1215 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1216 				FMT_RGB_RANDOM_ENABLE |
1217 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1218 		else
1219 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1220 		break;
1221 	case 10:
1222 	default:
1223 		/* not needed */
1224 		break;
1225 	}
1226 
1227 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1228 }
1229 
1230 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1231 {
1232 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1233 		return true;
1234 	else
1235 		return false;
1236 }
1237 
1238 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1239 {
1240 	u32 pos1, pos2;
1241 
1242 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1243 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1244 
1245 	if (pos1 != pos2)
1246 		return true;
1247 	else
1248 		return false;
1249 }
1250 
1251 /**
1252  * dce4_wait_for_vblank - vblank wait asic callback.
1253  *
1254  * @rdev: radeon_device pointer
1255  * @crtc: crtc to wait for vblank on
1256  *
1257  * Wait for vblank on the requested crtc (evergreen+).
1258  */
1259 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1260 {
1261 	unsigned i = 0;
1262 
1263 	if (crtc >= rdev->num_crtc)
1264 		return;
1265 
1266 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1267 		return;
1268 
1269 	/* depending on when we hit vblank, we may be close to active; if so,
1270 	 * wait for another frame.
1271 	 */
1272 	while (dce4_is_in_vblank(rdev, crtc)) {
1273 		if (i++ % 100 == 0) {
1274 			if (!dce4_is_counter_moving(rdev, crtc))
1275 				break;
1276 		}
1277 	}
1278 
1279 	while (!dce4_is_in_vblank(rdev, crtc)) {
1280 		if (i++ % 100 == 0) {
1281 			if (!dce4_is_counter_moving(rdev, crtc))
1282 				break;
1283 		}
1284 	}
1285 }
1286 
1287 /**
1288  * evergreen_page_flip - pageflip callback.
1289  *
1290  * @rdev: radeon_device pointer
1291  * @crtc_id: crtc to cleanup pageflip on
1292  * @crtc_base: new address of the crtc (GPU MC address)
1293  *
1294  * Does the actual pageflip (evergreen+).
1295  * During vblank we take the crtc lock and wait for the update_pending
1296  * bit to go high, when it does, we release the lock, and allow the
1297  * double buffered update to take place.
1298  * Returns the current update pending status.
1299  */
1300 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1301 {
1302 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1303 	u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1304 	int i;
1305 
1306 	/* Lock the graphics update lock */
1307 	tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1308 	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1309 
1310 	/* update the scanout addresses */
1311 	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1312 	       upper_32_bits(crtc_base));
1313 	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1314 	       (u32)crtc_base);
1315 
1316 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1317 	       upper_32_bits(crtc_base));
1318 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1319 	       (u32)crtc_base);
1320 
1321 	/* Wait for update_pending to go high. */
1322 	for (i = 0; i < rdev->usec_timeout; i++) {
1323 		if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1324 			break;
1325 		udelay(1);
1326 	}
1327 	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1328 
1329 	/* Unlock the lock, so double-buffering can take place inside vblank */
1330 	tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1331 	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1332 }
1333 
1334 /**
1335  * evergreen_page_flip_pending - check if page flip is still pending
1336  *
1337  * @rdev: radeon_device pointer
1338  * @crtc_id: crtc to check
1339  *
1340  * Returns the current update pending status.
1341  */
1342 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1343 {
1344 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1345 
1346 	/* Return current update_pending status: */
1347 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1348 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1349 }
1350 
1351 /* get temperature in millidegrees */
1352 int evergreen_get_temp(struct radeon_device *rdev)
1353 {
1354 	u32 temp, toffset;
1355 	int actual_temp = 0;
1356 
1357 	if (rdev->family == CHIP_JUNIPER) {
1358 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1359 			TOFFSET_SHIFT;
1360 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1361 			TS0_ADC_DOUT_SHIFT;
1362 
1363 		if (toffset & 0x100)
1364 			actual_temp = temp / 2 - (0x200 - toffset);
1365 		else
1366 			actual_temp = temp / 2 + toffset;
1367 
1368 		actual_temp = actual_temp * 1000;
1369 
1370 	} else {
1371 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1372 			ASIC_T_SHIFT;
1373 
1374 		if (temp & 0x400)
1375 			actual_temp = -256;
1376 		else if (temp & 0x200)
1377 			actual_temp = 255;
1378 		else if (temp & 0x100) {
1379 			actual_temp = temp & 0x1ff;
1380 			actual_temp |= ~0x1ff;
1381 		} else
1382 			actual_temp = temp & 0xff;
1383 
1384 		actual_temp = (actual_temp * 1000) / 2;
1385 	}
1386 
1387 	return actual_temp;
1388 }
1389 
1390 int sumo_get_temp(struct radeon_device *rdev)
1391 {
1392 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1393 	int actual_temp = temp - 49;
1394 
1395 	return actual_temp * 1000;
1396 }
1397 
1398 /**
1399  * sumo_pm_init_profile - Initialize power profiles callback.
1400  *
1401  * @rdev: radeon_device pointer
1402  *
1403  * Initialize the power states used in profile mode
1404  * (sumo, trinity, SI).
1405  * Used for profile mode only.
1406  */
1407 void sumo_pm_init_profile(struct radeon_device *rdev)
1408 {
1409 	int idx;
1410 
1411 	/* default */
1412 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1413 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1414 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1415 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1416 
1417 	/* low,mid sh/mh */
1418 	if (rdev->flags & RADEON_IS_MOBILITY)
1419 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1420 	else
1421 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1422 
1423 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1424 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1425 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1426 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1427 
1428 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1429 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1430 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1431 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1432 
1433 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1434 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1435 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1436 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1437 
1438 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1439 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1440 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1441 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1442 
1443 	/* high sh/mh */
1444 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1445 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1446 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1447 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1448 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1449 		rdev->pm.power_state[idx].num_clock_modes - 1;
1450 
1451 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1452 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1453 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1454 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1455 		rdev->pm.power_state[idx].num_clock_modes - 1;
1456 }
1457 
1458 /**
1459  * btc_pm_init_profile - Initialize power profiles callback.
1460  *
1461  * @rdev: radeon_device pointer
1462  *
1463  * Initialize the power states used in profile mode
1464  * (BTC, cayman).
1465  * Used for profile mode only.
1466  */
1467 void btc_pm_init_profile(struct radeon_device *rdev)
1468 {
1469 	int idx;
1470 
1471 	/* default */
1472 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1473 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1474 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1475 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1476 	/* starting with BTC, there is one state that is used for both
1477 	 * MH and SH.  Difference is that we always use the high clock index for
1478 	 * mclk.
1479 	 */
1480 	if (rdev->flags & RADEON_IS_MOBILITY)
1481 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1482 	else
1483 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1484 	/* low sh */
1485 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1486 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1487 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1488 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1489 	/* mid sh */
1490 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1491 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1492 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1493 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1494 	/* high sh */
1495 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1496 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1497 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1498 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1499 	/* low mh */
1500 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1501 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1502 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1503 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1504 	/* mid mh */
1505 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1506 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1507 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1508 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1509 	/* high mh */
1510 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1511 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1512 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1513 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1514 }
1515 
1516 /**
1517  * evergreen_pm_misc - set additional pm hw parameters callback.
1518  *
1519  * @rdev: radeon_device pointer
1520  *
1521  * Set non-clock parameters associated with a power state
1522  * (voltage, etc.) (evergreen+).
1523  */
1524 void evergreen_pm_misc(struct radeon_device *rdev)
1525 {
1526 	int req_ps_idx = rdev->pm.requested_power_state_index;
1527 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1528 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1529 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1530 
1531 	if (voltage->type == VOLTAGE_SW) {
1532 		/* 0xff0x are flags rather then an actual voltage */
1533 		if ((voltage->voltage & 0xff00) == 0xff00)
1534 			return;
1535 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1536 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1537 			rdev->pm.current_vddc = voltage->voltage;
1538 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1539 		}
1540 
1541 		/* starting with BTC, there is one state that is used for both
1542 		 * MH and SH.  Difference is that we always use the high clock index for
1543 		 * mclk and vddci.
1544 		 */
1545 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1546 		    (rdev->family >= CHIP_BARTS) &&
1547 		    rdev->pm.active_crtc_count &&
1548 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1549 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1550 			voltage = &rdev->pm.power_state[req_ps_idx].
1551 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1552 
1553 		/* 0xff0x are flags rather then an actual voltage */
1554 		if ((voltage->vddci & 0xff00) == 0xff00)
1555 			return;
1556 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1557 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1558 			rdev->pm.current_vddci = voltage->vddci;
1559 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1560 		}
1561 	}
1562 }
1563 
1564 /**
1565  * evergreen_pm_prepare - pre-power state change callback.
1566  *
1567  * @rdev: radeon_device pointer
1568  *
1569  * Prepare for a power state change (evergreen+).
1570  */
1571 void evergreen_pm_prepare(struct radeon_device *rdev)
1572 {
1573 	struct drm_device *ddev = rdev->ddev;
1574 	struct drm_crtc *crtc;
1575 	struct radeon_crtc *radeon_crtc;
1576 	u32 tmp;
1577 
1578 	/* disable any active CRTCs */
1579 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1580 		radeon_crtc = to_radeon_crtc(crtc);
1581 		if (radeon_crtc->enabled) {
1582 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1583 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1584 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1585 		}
1586 	}
1587 }
1588 
1589 /**
1590  * evergreen_pm_finish - post-power state change callback.
1591  *
1592  * @rdev: radeon_device pointer
1593  *
1594  * Clean up after a power state change (evergreen+).
1595  */
1596 void evergreen_pm_finish(struct radeon_device *rdev)
1597 {
1598 	struct drm_device *ddev = rdev->ddev;
1599 	struct drm_crtc *crtc;
1600 	struct radeon_crtc *radeon_crtc;
1601 	u32 tmp;
1602 
1603 	/* enable any active CRTCs */
1604 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1605 		radeon_crtc = to_radeon_crtc(crtc);
1606 		if (radeon_crtc->enabled) {
1607 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1608 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1609 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1610 		}
1611 	}
1612 }
1613 
1614 /**
1615  * evergreen_hpd_sense - hpd sense callback.
1616  *
1617  * @rdev: radeon_device pointer
1618  * @hpd: hpd (hotplug detect) pin
1619  *
1620  * Checks if a digital monitor is connected (evergreen+).
1621  * Returns true if connected, false if not connected.
1622  */
1623 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1624 {
1625 	bool connected = false;
1626 
1627 	switch (hpd) {
1628 	case RADEON_HPD_1:
1629 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1630 			connected = true;
1631 		break;
1632 	case RADEON_HPD_2:
1633 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1634 			connected = true;
1635 		break;
1636 	case RADEON_HPD_3:
1637 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1638 			connected = true;
1639 		break;
1640 	case RADEON_HPD_4:
1641 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1642 			connected = true;
1643 		break;
1644 	case RADEON_HPD_5:
1645 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1646 			connected = true;
1647 		break;
1648 	case RADEON_HPD_6:
1649 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1650 			connected = true;
1651 		break;
1652 	default:
1653 		break;
1654 	}
1655 
1656 	return connected;
1657 }
1658 
1659 /**
1660  * evergreen_hpd_set_polarity - hpd set polarity callback.
1661  *
1662  * @rdev: radeon_device pointer
1663  * @hpd: hpd (hotplug detect) pin
1664  *
1665  * Set the polarity of the hpd pin (evergreen+).
1666  */
1667 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1668 				enum radeon_hpd_id hpd)
1669 {
1670 	u32 tmp;
1671 	bool connected = evergreen_hpd_sense(rdev, hpd);
1672 
1673 	switch (hpd) {
1674 	case RADEON_HPD_1:
1675 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1676 		if (connected)
1677 			tmp &= ~DC_HPDx_INT_POLARITY;
1678 		else
1679 			tmp |= DC_HPDx_INT_POLARITY;
1680 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1681 		break;
1682 	case RADEON_HPD_2:
1683 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1684 		if (connected)
1685 			tmp &= ~DC_HPDx_INT_POLARITY;
1686 		else
1687 			tmp |= DC_HPDx_INT_POLARITY;
1688 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1689 		break;
1690 	case RADEON_HPD_3:
1691 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1692 		if (connected)
1693 			tmp &= ~DC_HPDx_INT_POLARITY;
1694 		else
1695 			tmp |= DC_HPDx_INT_POLARITY;
1696 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1697 		break;
1698 	case RADEON_HPD_4:
1699 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1700 		if (connected)
1701 			tmp &= ~DC_HPDx_INT_POLARITY;
1702 		else
1703 			tmp |= DC_HPDx_INT_POLARITY;
1704 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1705 		break;
1706 	case RADEON_HPD_5:
1707 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1708 		if (connected)
1709 			tmp &= ~DC_HPDx_INT_POLARITY;
1710 		else
1711 			tmp |= DC_HPDx_INT_POLARITY;
1712 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1713 			break;
1714 	case RADEON_HPD_6:
1715 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1716 		if (connected)
1717 			tmp &= ~DC_HPDx_INT_POLARITY;
1718 		else
1719 			tmp |= DC_HPDx_INT_POLARITY;
1720 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1721 		break;
1722 	default:
1723 		break;
1724 	}
1725 }
1726 
1727 /**
1728  * evergreen_hpd_init - hpd setup callback.
1729  *
1730  * @rdev: radeon_device pointer
1731  *
1732  * Setup the hpd pins used by the card (evergreen+).
1733  * Enable the pin, set the polarity, and enable the hpd interrupts.
1734  */
1735 void evergreen_hpd_init(struct radeon_device *rdev)
1736 {
1737 	struct drm_device *dev = rdev->ddev;
1738 	struct drm_connector *connector;
1739 	unsigned enabled = 0;
1740 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1741 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1742 
1743 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1744 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1745 
1746 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1747 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1748 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1749 			 * aux dp channel on imac and help (but not completely fix)
1750 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1751 			 * also avoid interrupt storms during dpms.
1752 			 */
1753 			continue;
1754 		}
1755 		switch (radeon_connector->hpd.hpd) {
1756 		case RADEON_HPD_1:
1757 			WREG32(DC_HPD1_CONTROL, tmp);
1758 			break;
1759 		case RADEON_HPD_2:
1760 			WREG32(DC_HPD2_CONTROL, tmp);
1761 			break;
1762 		case RADEON_HPD_3:
1763 			WREG32(DC_HPD3_CONTROL, tmp);
1764 			break;
1765 		case RADEON_HPD_4:
1766 			WREG32(DC_HPD4_CONTROL, tmp);
1767 			break;
1768 		case RADEON_HPD_5:
1769 			WREG32(DC_HPD5_CONTROL, tmp);
1770 			break;
1771 		case RADEON_HPD_6:
1772 			WREG32(DC_HPD6_CONTROL, tmp);
1773 			break;
1774 		default:
1775 			break;
1776 		}
1777 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1778 		enabled |= 1 << radeon_connector->hpd.hpd;
1779 	}
1780 	radeon_irq_kms_enable_hpd(rdev, enabled);
1781 }
1782 
1783 /**
1784  * evergreen_hpd_fini - hpd tear down callback.
1785  *
1786  * @rdev: radeon_device pointer
1787  *
1788  * Tear down the hpd pins used by the card (evergreen+).
1789  * Disable the hpd interrupts.
1790  */
1791 void evergreen_hpd_fini(struct radeon_device *rdev)
1792 {
1793 	struct drm_device *dev = rdev->ddev;
1794 	struct drm_connector *connector;
1795 	unsigned disabled = 0;
1796 
1797 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1798 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1799 		switch (radeon_connector->hpd.hpd) {
1800 		case RADEON_HPD_1:
1801 			WREG32(DC_HPD1_CONTROL, 0);
1802 			break;
1803 		case RADEON_HPD_2:
1804 			WREG32(DC_HPD2_CONTROL, 0);
1805 			break;
1806 		case RADEON_HPD_3:
1807 			WREG32(DC_HPD3_CONTROL, 0);
1808 			break;
1809 		case RADEON_HPD_4:
1810 			WREG32(DC_HPD4_CONTROL, 0);
1811 			break;
1812 		case RADEON_HPD_5:
1813 			WREG32(DC_HPD5_CONTROL, 0);
1814 			break;
1815 		case RADEON_HPD_6:
1816 			WREG32(DC_HPD6_CONTROL, 0);
1817 			break;
1818 		default:
1819 			break;
1820 		}
1821 		disabled |= 1 << radeon_connector->hpd.hpd;
1822 	}
1823 	radeon_irq_kms_disable_hpd(rdev, disabled);
1824 }
1825 
1826 /* watermark setup */
1827 
1828 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1829 					struct radeon_crtc *radeon_crtc,
1830 					struct drm_display_mode *mode,
1831 					struct drm_display_mode *other_mode)
1832 {
1833 	u32 tmp, buffer_alloc, i;
1834 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1835 	/*
1836 	 * Line Buffer Setup
1837 	 * There are 3 line buffers, each one shared by 2 display controllers.
1838 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1839 	 * the display controllers.  The paritioning is done via one of four
1840 	 * preset allocations specified in bits 2:0:
1841 	 * first display controller
1842 	 *  0 - first half of lb (3840 * 2)
1843 	 *  1 - first 3/4 of lb (5760 * 2)
1844 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1845 	 *  3 - first 1/4 of lb (1920 * 2)
1846 	 * second display controller
1847 	 *  4 - second half of lb (3840 * 2)
1848 	 *  5 - second 3/4 of lb (5760 * 2)
1849 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1850 	 *  7 - last 1/4 of lb (1920 * 2)
1851 	 */
1852 	/* this can get tricky if we have two large displays on a paired group
1853 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1854 	 * non-linked crtcs for maximum line buffer allocation.
1855 	 */
1856 	if (radeon_crtc->base.enabled && mode) {
1857 		if (other_mode) {
1858 			tmp = 0; /* 1/2 */
1859 			buffer_alloc = 1;
1860 		} else {
1861 			tmp = 2; /* whole */
1862 			buffer_alloc = 2;
1863 		}
1864 	} else {
1865 		tmp = 0;
1866 		buffer_alloc = 0;
1867 	}
1868 
1869 	/* second controller of the pair uses second half of the lb */
1870 	if (radeon_crtc->crtc_id % 2)
1871 		tmp += 4;
1872 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1873 
1874 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1875 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1876 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1877 		for (i = 0; i < rdev->usec_timeout; i++) {
1878 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1879 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1880 				break;
1881 			udelay(1);
1882 		}
1883 	}
1884 
1885 	if (radeon_crtc->base.enabled && mode) {
1886 		switch (tmp) {
1887 		case 0:
1888 		case 4:
1889 		default:
1890 			if (ASIC_IS_DCE5(rdev))
1891 				return 4096 * 2;
1892 			else
1893 				return 3840 * 2;
1894 		case 1:
1895 		case 5:
1896 			if (ASIC_IS_DCE5(rdev))
1897 				return 6144 * 2;
1898 			else
1899 				return 5760 * 2;
1900 		case 2:
1901 		case 6:
1902 			if (ASIC_IS_DCE5(rdev))
1903 				return 8192 * 2;
1904 			else
1905 				return 7680 * 2;
1906 		case 3:
1907 		case 7:
1908 			if (ASIC_IS_DCE5(rdev))
1909 				return 2048 * 2;
1910 			else
1911 				return 1920 * 2;
1912 		}
1913 	}
1914 
1915 	/* controller not enabled, so no lb used */
1916 	return 0;
1917 }
1918 
1919 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1920 {
1921 	u32 tmp = RREG32(MC_SHARED_CHMAP);
1922 
1923 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1924 	case 0:
1925 	default:
1926 		return 1;
1927 	case 1:
1928 		return 2;
1929 	case 2:
1930 		return 4;
1931 	case 3:
1932 		return 8;
1933 	}
1934 }
1935 
1936 struct evergreen_wm_params {
1937 	u32 dram_channels; /* number of dram channels */
1938 	u32 yclk;          /* bandwidth per dram data pin in kHz */
1939 	u32 sclk;          /* engine clock in kHz */
1940 	u32 disp_clk;      /* display clock in kHz */
1941 	u32 src_width;     /* viewport width */
1942 	u32 active_time;   /* active display time in ns */
1943 	u32 blank_time;    /* blank time in ns */
1944 	bool interlaced;    /* mode is interlaced */
1945 	fixed20_12 vsc;    /* vertical scale ratio */
1946 	u32 num_heads;     /* number of active crtcs */
1947 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1948 	u32 lb_size;       /* line buffer allocated to pipe */
1949 	u32 vtaps;         /* vertical scaler taps */
1950 };
1951 
1952 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1953 {
1954 	/* Calculate DRAM Bandwidth and the part allocated to display. */
1955 	fixed20_12 dram_efficiency; /* 0.7 */
1956 	fixed20_12 yclk, dram_channels, bandwidth;
1957 	fixed20_12 a;
1958 
1959 	a.full = dfixed_const(1000);
1960 	yclk.full = dfixed_const(wm->yclk);
1961 	yclk.full = dfixed_div(yclk, a);
1962 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1963 	a.full = dfixed_const(10);
1964 	dram_efficiency.full = dfixed_const(7);
1965 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
1966 	bandwidth.full = dfixed_mul(dram_channels, yclk);
1967 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1968 
1969 	return dfixed_trunc(bandwidth);
1970 }
1971 
1972 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1973 {
1974 	/* Calculate DRAM Bandwidth and the part allocated to display. */
1975 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1976 	fixed20_12 yclk, dram_channels, bandwidth;
1977 	fixed20_12 a;
1978 
1979 	a.full = dfixed_const(1000);
1980 	yclk.full = dfixed_const(wm->yclk);
1981 	yclk.full = dfixed_div(yclk, a);
1982 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1983 	a.full = dfixed_const(10);
1984 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1985 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1986 	bandwidth.full = dfixed_mul(dram_channels, yclk);
1987 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1988 
1989 	return dfixed_trunc(bandwidth);
1990 }
1991 
1992 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1993 {
1994 	/* Calculate the display Data return Bandwidth */
1995 	fixed20_12 return_efficiency; /* 0.8 */
1996 	fixed20_12 sclk, bandwidth;
1997 	fixed20_12 a;
1998 
1999 	a.full = dfixed_const(1000);
2000 	sclk.full = dfixed_const(wm->sclk);
2001 	sclk.full = dfixed_div(sclk, a);
2002 	a.full = dfixed_const(10);
2003 	return_efficiency.full = dfixed_const(8);
2004 	return_efficiency.full = dfixed_div(return_efficiency, a);
2005 	a.full = dfixed_const(32);
2006 	bandwidth.full = dfixed_mul(a, sclk);
2007 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2008 
2009 	return dfixed_trunc(bandwidth);
2010 }
2011 
2012 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2013 {
2014 	/* Calculate the DMIF Request Bandwidth */
2015 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2016 	fixed20_12 disp_clk, bandwidth;
2017 	fixed20_12 a;
2018 
2019 	a.full = dfixed_const(1000);
2020 	disp_clk.full = dfixed_const(wm->disp_clk);
2021 	disp_clk.full = dfixed_div(disp_clk, a);
2022 	a.full = dfixed_const(10);
2023 	disp_clk_request_efficiency.full = dfixed_const(8);
2024 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2025 	a.full = dfixed_const(32);
2026 	bandwidth.full = dfixed_mul(a, disp_clk);
2027 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2028 
2029 	return dfixed_trunc(bandwidth);
2030 }
2031 
2032 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2033 {
2034 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2035 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2036 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2037 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2038 
2039 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2040 }
2041 
2042 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2043 {
2044 	/* Calculate the display mode Average Bandwidth
2045 	 * DisplayMode should contain the source and destination dimensions,
2046 	 * timing, etc.
2047 	 */
2048 	fixed20_12 bpp;
2049 	fixed20_12 line_time;
2050 	fixed20_12 src_width;
2051 	fixed20_12 bandwidth;
2052 	fixed20_12 a;
2053 
2054 	a.full = dfixed_const(1000);
2055 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2056 	line_time.full = dfixed_div(line_time, a);
2057 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2058 	src_width.full = dfixed_const(wm->src_width);
2059 	bandwidth.full = dfixed_mul(src_width, bpp);
2060 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2061 	bandwidth.full = dfixed_div(bandwidth, line_time);
2062 
2063 	return dfixed_trunc(bandwidth);
2064 }
2065 
2066 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2067 {
2068 	/* First calcualte the latency in ns */
2069 	u32 mc_latency = 2000; /* 2000 ns. */
2070 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2071 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2072 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2073 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2074 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2075 		(wm->num_heads * cursor_line_pair_return_time);
2076 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2077 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2078 	fixed20_12 a, b, c;
2079 
2080 	if (wm->num_heads == 0)
2081 		return 0;
2082 
2083 	a.full = dfixed_const(2);
2084 	b.full = dfixed_const(1);
2085 	if ((wm->vsc.full > a.full) ||
2086 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2087 	    (wm->vtaps >= 5) ||
2088 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2089 		max_src_lines_per_dst_line = 4;
2090 	else
2091 		max_src_lines_per_dst_line = 2;
2092 
2093 	a.full = dfixed_const(available_bandwidth);
2094 	b.full = dfixed_const(wm->num_heads);
2095 	a.full = dfixed_div(a, b);
2096 
2097 	b.full = dfixed_const(1000);
2098 	c.full = dfixed_const(wm->disp_clk);
2099 	b.full = dfixed_div(c, b);
2100 	c.full = dfixed_const(wm->bytes_per_pixel);
2101 	b.full = dfixed_mul(b, c);
2102 
2103 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2104 
2105 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2106 	b.full = dfixed_const(1000);
2107 	c.full = dfixed_const(lb_fill_bw);
2108 	b.full = dfixed_div(c, b);
2109 	a.full = dfixed_div(a, b);
2110 	line_fill_time = dfixed_trunc(a);
2111 
2112 	if (line_fill_time < wm->active_time)
2113 		return latency;
2114 	else
2115 		return latency + (line_fill_time - wm->active_time);
2116 
2117 }
2118 
2119 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2120 {
2121 	if (evergreen_average_bandwidth(wm) <=
2122 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2123 		return true;
2124 	else
2125 		return false;
2126 };
2127 
2128 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2129 {
2130 	if (evergreen_average_bandwidth(wm) <=
2131 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2132 		return true;
2133 	else
2134 		return false;
2135 };
2136 
2137 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2138 {
2139 	u32 lb_partitions = wm->lb_size / wm->src_width;
2140 	u32 line_time = wm->active_time + wm->blank_time;
2141 	u32 latency_tolerant_lines;
2142 	u32 latency_hiding;
2143 	fixed20_12 a;
2144 
2145 	a.full = dfixed_const(1);
2146 	if (wm->vsc.full > a.full)
2147 		latency_tolerant_lines = 1;
2148 	else {
2149 		if (lb_partitions <= (wm->vtaps + 1))
2150 			latency_tolerant_lines = 1;
2151 		else
2152 			latency_tolerant_lines = 2;
2153 	}
2154 
2155 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2156 
2157 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2158 		return true;
2159 	else
2160 		return false;
2161 }
2162 
2163 static void evergreen_program_watermarks(struct radeon_device *rdev,
2164 					 struct radeon_crtc *radeon_crtc,
2165 					 u32 lb_size, u32 num_heads)
2166 {
2167 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2168 	struct evergreen_wm_params wm_low, wm_high;
2169 	u32 dram_channels;
2170 	u32 pixel_period;
2171 	u32 line_time = 0;
2172 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2173 	u32 priority_a_mark = 0, priority_b_mark = 0;
2174 	u32 priority_a_cnt = PRIORITY_OFF;
2175 	u32 priority_b_cnt = PRIORITY_OFF;
2176 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2177 	u32 tmp, arb_control3;
2178 	fixed20_12 a, b, c;
2179 
2180 	if (radeon_crtc->base.enabled && num_heads && mode) {
2181 		pixel_period = 1000000 / (u32)mode->clock;
2182 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2183 		priority_a_cnt = 0;
2184 		priority_b_cnt = 0;
2185 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2186 
2187 		/* watermark for high clocks */
2188 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2189 			wm_high.yclk =
2190 				radeon_dpm_get_mclk(rdev, false) * 10;
2191 			wm_high.sclk =
2192 				radeon_dpm_get_sclk(rdev, false) * 10;
2193 		} else {
2194 			wm_high.yclk = rdev->pm.current_mclk * 10;
2195 			wm_high.sclk = rdev->pm.current_sclk * 10;
2196 		}
2197 
2198 		wm_high.disp_clk = mode->clock;
2199 		wm_high.src_width = mode->crtc_hdisplay;
2200 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2201 		wm_high.blank_time = line_time - wm_high.active_time;
2202 		wm_high.interlaced = false;
2203 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2204 			wm_high.interlaced = true;
2205 		wm_high.vsc = radeon_crtc->vsc;
2206 		wm_high.vtaps = 1;
2207 		if (radeon_crtc->rmx_type != RMX_OFF)
2208 			wm_high.vtaps = 2;
2209 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2210 		wm_high.lb_size = lb_size;
2211 		wm_high.dram_channels = dram_channels;
2212 		wm_high.num_heads = num_heads;
2213 
2214 		/* watermark for low clocks */
2215 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2216 			wm_low.yclk =
2217 				radeon_dpm_get_mclk(rdev, true) * 10;
2218 			wm_low.sclk =
2219 				radeon_dpm_get_sclk(rdev, true) * 10;
2220 		} else {
2221 			wm_low.yclk = rdev->pm.current_mclk * 10;
2222 			wm_low.sclk = rdev->pm.current_sclk * 10;
2223 		}
2224 
2225 		wm_low.disp_clk = mode->clock;
2226 		wm_low.src_width = mode->crtc_hdisplay;
2227 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2228 		wm_low.blank_time = line_time - wm_low.active_time;
2229 		wm_low.interlaced = false;
2230 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2231 			wm_low.interlaced = true;
2232 		wm_low.vsc = radeon_crtc->vsc;
2233 		wm_low.vtaps = 1;
2234 		if (radeon_crtc->rmx_type != RMX_OFF)
2235 			wm_low.vtaps = 2;
2236 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2237 		wm_low.lb_size = lb_size;
2238 		wm_low.dram_channels = dram_channels;
2239 		wm_low.num_heads = num_heads;
2240 
2241 		/* set for high clocks */
2242 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2243 		/* set for low clocks */
2244 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2245 
2246 		/* possibly force display priority to high */
2247 		/* should really do this at mode validation time... */
2248 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2249 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2250 		    !evergreen_check_latency_hiding(&wm_high) ||
2251 		    (rdev->disp_priority == 2)) {
2252 			DRM_DEBUG_KMS("force priority a to high\n");
2253 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2254 		}
2255 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2256 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2257 		    !evergreen_check_latency_hiding(&wm_low) ||
2258 		    (rdev->disp_priority == 2)) {
2259 			DRM_DEBUG_KMS("force priority b to high\n");
2260 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2261 		}
2262 
2263 		a.full = dfixed_const(1000);
2264 		b.full = dfixed_const(mode->clock);
2265 		b.full = dfixed_div(b, a);
2266 		c.full = dfixed_const(latency_watermark_a);
2267 		c.full = dfixed_mul(c, b);
2268 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2269 		c.full = dfixed_div(c, a);
2270 		a.full = dfixed_const(16);
2271 		c.full = dfixed_div(c, a);
2272 		priority_a_mark = dfixed_trunc(c);
2273 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2274 
2275 		a.full = dfixed_const(1000);
2276 		b.full = dfixed_const(mode->clock);
2277 		b.full = dfixed_div(b, a);
2278 		c.full = dfixed_const(latency_watermark_b);
2279 		c.full = dfixed_mul(c, b);
2280 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2281 		c.full = dfixed_div(c, a);
2282 		a.full = dfixed_const(16);
2283 		c.full = dfixed_div(c, a);
2284 		priority_b_mark = dfixed_trunc(c);
2285 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2286 	}
2287 
2288 	/* select wm A */
2289 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2290 	tmp = arb_control3;
2291 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2292 	tmp |= LATENCY_WATERMARK_MASK(1);
2293 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2294 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2295 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2296 		LATENCY_HIGH_WATERMARK(line_time)));
2297 	/* select wm B */
2298 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2299 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2300 	tmp |= LATENCY_WATERMARK_MASK(2);
2301 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2302 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2303 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2304 		LATENCY_HIGH_WATERMARK(line_time)));
2305 	/* restore original selection */
2306 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2307 
2308 	/* write the priority marks */
2309 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2310 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2311 
2312 	/* save values for DPM */
2313 	radeon_crtc->line_time = line_time;
2314 	radeon_crtc->wm_high = latency_watermark_a;
2315 	radeon_crtc->wm_low = latency_watermark_b;
2316 }
2317 
2318 /**
2319  * evergreen_bandwidth_update - update display watermarks callback.
2320  *
2321  * @rdev: radeon_device pointer
2322  *
2323  * Update the display watermarks based on the requested mode(s)
2324  * (evergreen+).
2325  */
2326 void evergreen_bandwidth_update(struct radeon_device *rdev)
2327 {
2328 	struct drm_display_mode *mode0 = NULL;
2329 	struct drm_display_mode *mode1 = NULL;
2330 	u32 num_heads = 0, lb_size;
2331 	int i;
2332 
2333 	if (!rdev->mode_info.mode_config_initialized)
2334 		return;
2335 
2336 	radeon_update_display_priority(rdev);
2337 
2338 	for (i = 0; i < rdev->num_crtc; i++) {
2339 		if (rdev->mode_info.crtcs[i]->base.enabled)
2340 			num_heads++;
2341 	}
2342 	for (i = 0; i < rdev->num_crtc; i += 2) {
2343 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2344 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2345 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2346 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2347 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2348 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2349 	}
2350 }
2351 
2352 /**
2353  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2354  *
2355  * @rdev: radeon_device pointer
2356  *
2357  * Wait for the MC (memory controller) to be idle.
2358  * (evergreen+).
2359  * Returns 0 if the MC is idle, -1 if not.
2360  */
2361 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2362 {
2363 	unsigned i;
2364 	u32 tmp;
2365 
2366 	for (i = 0; i < rdev->usec_timeout; i++) {
2367 		/* read MC_STATUS */
2368 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2369 		if (!tmp)
2370 			return 0;
2371 		udelay(1);
2372 	}
2373 	return -1;
2374 }
2375 
2376 /*
2377  * GART
2378  */
2379 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2380 {
2381 	unsigned i;
2382 	u32 tmp;
2383 
2384 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2385 
2386 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2387 	for (i = 0; i < rdev->usec_timeout; i++) {
2388 		/* read MC_STATUS */
2389 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2390 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2391 		if (tmp == 2) {
2392 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2393 			return;
2394 		}
2395 		if (tmp) {
2396 			return;
2397 		}
2398 		udelay(1);
2399 	}
2400 }
2401 
2402 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2403 {
2404 	u32 tmp;
2405 	int r;
2406 
2407 	if (rdev->gart.robj == NULL) {
2408 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2409 		return -EINVAL;
2410 	}
2411 	r = radeon_gart_table_vram_pin(rdev);
2412 	if (r)
2413 		return r;
2414 	/* Setup L2 cache */
2415 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2416 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2417 				EFFECTIVE_L2_QUEUE_SIZE(7));
2418 	WREG32(VM_L2_CNTL2, 0);
2419 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2420 	/* Setup TLB control */
2421 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2422 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2423 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2424 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2425 	if (rdev->flags & RADEON_IS_IGP) {
2426 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2427 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2428 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2429 	} else {
2430 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2431 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2432 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2433 		if ((rdev->family == CHIP_JUNIPER) ||
2434 		    (rdev->family == CHIP_CYPRESS) ||
2435 		    (rdev->family == CHIP_HEMLOCK) ||
2436 		    (rdev->family == CHIP_BARTS))
2437 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2438 	}
2439 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2440 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2441 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2442 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2443 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2444 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2445 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2446 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2447 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2448 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2449 			(u32)(rdev->dummy_page.addr >> 12));
2450 	WREG32(VM_CONTEXT1_CNTL, 0);
2451 
2452 	evergreen_pcie_gart_tlb_flush(rdev);
2453 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2454 		 (unsigned)(rdev->mc.gtt_size >> 20),
2455 		 (unsigned long long)rdev->gart.table_addr);
2456 	rdev->gart.ready = true;
2457 	return 0;
2458 }
2459 
2460 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2461 {
2462 	u32 tmp;
2463 
2464 	/* Disable all tables */
2465 	WREG32(VM_CONTEXT0_CNTL, 0);
2466 	WREG32(VM_CONTEXT1_CNTL, 0);
2467 
2468 	/* Setup L2 cache */
2469 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2470 				EFFECTIVE_L2_QUEUE_SIZE(7));
2471 	WREG32(VM_L2_CNTL2, 0);
2472 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2473 	/* Setup TLB control */
2474 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2475 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2476 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2477 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2478 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2479 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2480 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2481 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2482 	radeon_gart_table_vram_unpin(rdev);
2483 }
2484 
2485 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2486 {
2487 	evergreen_pcie_gart_disable(rdev);
2488 	radeon_gart_table_vram_free(rdev);
2489 	radeon_gart_fini(rdev);
2490 }
2491 
2492 
2493 static void evergreen_agp_enable(struct radeon_device *rdev)
2494 {
2495 	u32 tmp;
2496 
2497 	/* Setup L2 cache */
2498 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2499 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2500 				EFFECTIVE_L2_QUEUE_SIZE(7));
2501 	WREG32(VM_L2_CNTL2, 0);
2502 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2503 	/* Setup TLB control */
2504 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2505 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2506 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2507 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2508 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2509 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2510 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2511 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2512 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2513 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2514 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2515 	WREG32(VM_CONTEXT0_CNTL, 0);
2516 	WREG32(VM_CONTEXT1_CNTL, 0);
2517 }
2518 
2519 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2520 {
2521 	u32 crtc_enabled, tmp, frame_count, blackout;
2522 	int i, j;
2523 
2524 	bzero(save, sizeof(*save));	/* avoid gcc warning */
2525 	if (!ASIC_IS_NODCE(rdev)) {
2526 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2527 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2528 
2529 		/* disable VGA render */
2530 		WREG32(VGA_RENDER_CONTROL, 0);
2531 	}
2532 	/* blank the display controllers */
2533 	for (i = 0; i < rdev->num_crtc; i++) {
2534 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2535 		if (crtc_enabled) {
2536 			save->crtc_enabled[i] = true;
2537 			if (ASIC_IS_DCE6(rdev)) {
2538 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2539 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2540 					radeon_wait_for_vblank(rdev, i);
2541 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2542 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2543 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2544 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2545 				}
2546 			} else {
2547 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2548 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2549 					radeon_wait_for_vblank(rdev, i);
2550 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2551 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2552 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2553 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2554 				}
2555 			}
2556 			/* wait for the next frame */
2557 			frame_count = radeon_get_vblank_counter(rdev, i);
2558 			for (j = 0; j < rdev->usec_timeout; j++) {
2559 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2560 					break;
2561 				udelay(1);
2562 			}
2563 
2564 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2565 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2566 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2567 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2568 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2569 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2570 			save->crtc_enabled[i] = false;
2571 			/* ***** */
2572 		} else {
2573 			save->crtc_enabled[i] = false;
2574 		}
2575 	}
2576 
2577 	radeon_mc_wait_for_idle(rdev);
2578 
2579 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2580 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2581 		/* Block CPU access */
2582 		WREG32(BIF_FB_EN, 0);
2583 		/* blackout the MC */
2584 		blackout &= ~BLACKOUT_MODE_MASK;
2585 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2586 	}
2587 	/* wait for the MC to settle */
2588 	udelay(100);
2589 
2590 	/* lock double buffered regs */
2591 	for (i = 0; i < rdev->num_crtc; i++) {
2592 		if (save->crtc_enabled[i]) {
2593 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2594 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2595 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2596 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2597 			}
2598 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2599 			if (!(tmp & 1)) {
2600 				tmp |= 1;
2601 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2602 			}
2603 		}
2604 	}
2605 }
2606 
2607 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2608 {
2609 	u32 tmp, frame_count;
2610 	int i, j;
2611 
2612 	/* update crtc base addresses */
2613 	for (i = 0; i < rdev->num_crtc; i++) {
2614 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2615 		       upper_32_bits(rdev->mc.vram_start));
2616 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2617 		       upper_32_bits(rdev->mc.vram_start));
2618 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2619 		       (u32)rdev->mc.vram_start);
2620 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2621 		       (u32)rdev->mc.vram_start);
2622 	}
2623 
2624 	if (!ASIC_IS_NODCE(rdev)) {
2625 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2626 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2627 	}
2628 
2629 	/* unlock regs and wait for update */
2630 	for (i = 0; i < rdev->num_crtc; i++) {
2631 		if (save->crtc_enabled[i]) {
2632 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2633 			if ((tmp & 0x7) != 3) {
2634 				tmp &= ~0x7;
2635 				tmp |= 0x3;
2636 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2637 			}
2638 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2639 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2640 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2641 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2642 			}
2643 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2644 			if (tmp & 1) {
2645 				tmp &= ~1;
2646 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2647 			}
2648 			for (j = 0; j < rdev->usec_timeout; j++) {
2649 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2650 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2651 					break;
2652 				udelay(1);
2653 			}
2654 		}
2655 	}
2656 
2657 	/* unblackout the MC */
2658 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2659 	tmp &= ~BLACKOUT_MODE_MASK;
2660 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2661 	/* allow CPU access */
2662 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2663 
2664 	for (i = 0; i < rdev->num_crtc; i++) {
2665 		if (save->crtc_enabled[i]) {
2666 			if (ASIC_IS_DCE6(rdev)) {
2667 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2668 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2669 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2670 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2671 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2672 			} else {
2673 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2674 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2675 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2676 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2677 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2678 			}
2679 			/* wait for the next frame */
2680 			frame_count = radeon_get_vblank_counter(rdev, i);
2681 			for (j = 0; j < rdev->usec_timeout; j++) {
2682 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2683 					break;
2684 				udelay(1);
2685 			}
2686 		}
2687 	}
2688 	if (!ASIC_IS_NODCE(rdev)) {
2689 		/* Unlock vga access */
2690 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2691 		mdelay(1);
2692 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2693 	}
2694 }
2695 
2696 void evergreen_mc_program(struct radeon_device *rdev)
2697 {
2698 	struct evergreen_mc_save save;
2699 	u32 tmp;
2700 	int i, j;
2701 
2702 	/* Initialize HDP */
2703 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2704 		WREG32((0x2c14 + j), 0x00000000);
2705 		WREG32((0x2c18 + j), 0x00000000);
2706 		WREG32((0x2c1c + j), 0x00000000);
2707 		WREG32((0x2c20 + j), 0x00000000);
2708 		WREG32((0x2c24 + j), 0x00000000);
2709 	}
2710 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2711 
2712 	evergreen_mc_stop(rdev, &save);
2713 	if (evergreen_mc_wait_for_idle(rdev)) {
2714 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2715 	}
2716 	/* Lockout access through VGA aperture*/
2717 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2718 	/* Update configuration */
2719 	if (rdev->flags & RADEON_IS_AGP) {
2720 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2721 			/* VRAM before AGP */
2722 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2723 				rdev->mc.vram_start >> 12);
2724 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2725 				rdev->mc.gtt_end >> 12);
2726 		} else {
2727 			/* VRAM after AGP */
2728 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2729 				rdev->mc.gtt_start >> 12);
2730 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2731 				rdev->mc.vram_end >> 12);
2732 		}
2733 	} else {
2734 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2735 			rdev->mc.vram_start >> 12);
2736 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2737 			rdev->mc.vram_end >> 12);
2738 	}
2739 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2740 	/* llano/ontario only */
2741 	if ((rdev->family == CHIP_PALM) ||
2742 	    (rdev->family == CHIP_SUMO) ||
2743 	    (rdev->family == CHIP_SUMO2)) {
2744 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2745 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2746 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2747 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2748 	}
2749 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2750 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2751 	WREG32(MC_VM_FB_LOCATION, tmp);
2752 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2753 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2754 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2755 	if (rdev->flags & RADEON_IS_AGP) {
2756 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2757 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2758 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2759 	} else {
2760 		WREG32(MC_VM_AGP_BASE, 0);
2761 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2762 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2763 	}
2764 	if (evergreen_mc_wait_for_idle(rdev)) {
2765 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2766 	}
2767 	evergreen_mc_resume(rdev, &save);
2768 	/* we need to own VRAM, so turn off the VGA renderer here
2769 	 * to stop it overwriting our objects */
2770 	rv515_vga_render_disable(rdev);
2771 }
2772 
2773 /*
2774  * CP.
2775  */
2776 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2777 {
2778 	struct radeon_ring *ring = &rdev->ring[ib->ring];
2779 	u32 next_rptr;
2780 
2781 	/* set to DX10/11 mode */
2782 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2783 	radeon_ring_write(ring, 1);
2784 
2785 	if (ring->rptr_save_reg) {
2786 		next_rptr = ring->wptr + 3 + 4;
2787 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2788 		radeon_ring_write(ring, ((ring->rptr_save_reg -
2789 					  PACKET3_SET_CONFIG_REG_START) >> 2));
2790 		radeon_ring_write(ring, next_rptr);
2791 	} else if (rdev->wb.enabled) {
2792 		next_rptr = ring->wptr + 5 + 4;
2793 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2794 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2795 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2796 		radeon_ring_write(ring, next_rptr);
2797 		radeon_ring_write(ring, 0);
2798 	}
2799 
2800 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2801 	radeon_ring_write(ring,
2802 #ifdef __BIG_ENDIAN
2803 			  (2 << 0) |
2804 #endif
2805 			  (ib->gpu_addr & 0xFFFFFFFC));
2806 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2807 	radeon_ring_write(ring, ib->length_dw);
2808 }
2809 
2810 
2811 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2812 {
2813 	const __be32 *fw_data;
2814 	int i;
2815 
2816 	if (!rdev->me_fw || !rdev->pfp_fw)
2817 		return -EINVAL;
2818 
2819 	r700_cp_stop(rdev);
2820 	WREG32(CP_RB_CNTL,
2821 #ifdef __BIG_ENDIAN
2822 	       BUF_SWAP_32BIT |
2823 #endif
2824 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2825 
2826 	fw_data = (const __be32 *)rdev->pfp_fw->data;
2827 	WREG32(CP_PFP_UCODE_ADDR, 0);
2828 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2829 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2830 	WREG32(CP_PFP_UCODE_ADDR, 0);
2831 
2832 	fw_data = (const __be32 *)rdev->me_fw->data;
2833 	WREG32(CP_ME_RAM_WADDR, 0);
2834 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2835 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2836 
2837 	WREG32(CP_PFP_UCODE_ADDR, 0);
2838 	WREG32(CP_ME_RAM_WADDR, 0);
2839 	WREG32(CP_ME_RAM_RADDR, 0);
2840 	return 0;
2841 }
2842 
2843 static int evergreen_cp_start(struct radeon_device *rdev)
2844 {
2845 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2846 	int r, i;
2847 	uint32_t cp_me;
2848 
2849 	r = radeon_ring_lock(rdev, ring, 7);
2850 	if (r) {
2851 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2852 		return r;
2853 	}
2854 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2855 	radeon_ring_write(ring, 0x1);
2856 	radeon_ring_write(ring, 0x0);
2857 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2858 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2859 	radeon_ring_write(ring, 0);
2860 	radeon_ring_write(ring, 0);
2861 	radeon_ring_unlock_commit(rdev, ring, false);
2862 
2863 	cp_me = 0xff;
2864 	WREG32(CP_ME_CNTL, cp_me);
2865 
2866 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2867 	if (r) {
2868 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2869 		return r;
2870 	}
2871 
2872 	/* setup clear context state */
2873 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2874 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2875 
2876 	for (i = 0; i < evergreen_default_size; i++)
2877 		radeon_ring_write(ring, evergreen_default_state[i]);
2878 
2879 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2880 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2881 
2882 	/* set clear context state */
2883 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2884 	radeon_ring_write(ring, 0);
2885 
2886 	/* SQ_VTX_BASE_VTX_LOC */
2887 	radeon_ring_write(ring, 0xc0026f00);
2888 	radeon_ring_write(ring, 0x00000000);
2889 	radeon_ring_write(ring, 0x00000000);
2890 	radeon_ring_write(ring, 0x00000000);
2891 
2892 	/* Clear consts */
2893 	radeon_ring_write(ring, 0xc0036f00);
2894 	radeon_ring_write(ring, 0x00000bc4);
2895 	radeon_ring_write(ring, 0xffffffff);
2896 	radeon_ring_write(ring, 0xffffffff);
2897 	radeon_ring_write(ring, 0xffffffff);
2898 
2899 	radeon_ring_write(ring, 0xc0026900);
2900 	radeon_ring_write(ring, 0x00000316);
2901 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2902 	radeon_ring_write(ring, 0x00000010); /*  */
2903 
2904 	radeon_ring_unlock_commit(rdev, ring, false);
2905 
2906 	return 0;
2907 }
2908 
2909 static int evergreen_cp_resume(struct radeon_device *rdev)
2910 {
2911 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2912 	u32 tmp;
2913 	u32 rb_bufsz;
2914 	int r;
2915 
2916 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2917 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2918 				 SOFT_RESET_PA |
2919 				 SOFT_RESET_SH |
2920 				 SOFT_RESET_VGT |
2921 				 SOFT_RESET_SPI |
2922 				 SOFT_RESET_SX));
2923 	RREG32(GRBM_SOFT_RESET);
2924 	mdelay(15);
2925 	WREG32(GRBM_SOFT_RESET, 0);
2926 	RREG32(GRBM_SOFT_RESET);
2927 
2928 	/* Set ring buffer size */
2929 	rb_bufsz = order_base_2(ring->ring_size / 8);
2930 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2931 #ifdef __BIG_ENDIAN
2932 	tmp |= BUF_SWAP_32BIT;
2933 #endif
2934 	WREG32(CP_RB_CNTL, tmp);
2935 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
2936 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2937 
2938 	/* Set the write pointer delay */
2939 	WREG32(CP_RB_WPTR_DELAY, 0);
2940 
2941 	/* Initialize the ring buffer's read and write pointers */
2942 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2943 	WREG32(CP_RB_RPTR_WR, 0);
2944 	ring->wptr = 0;
2945 	WREG32(CP_RB_WPTR, ring->wptr);
2946 
2947 	/* set the wb address whether it's enabled or not */
2948 	WREG32(CP_RB_RPTR_ADDR,
2949 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2950 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2951 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2952 
2953 	if (rdev->wb.enabled)
2954 		WREG32(SCRATCH_UMSK, 0xff);
2955 	else {
2956 		tmp |= RB_NO_UPDATE;
2957 		WREG32(SCRATCH_UMSK, 0);
2958 	}
2959 
2960 	mdelay(1);
2961 	WREG32(CP_RB_CNTL, tmp);
2962 
2963 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2964 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2965 
2966 	evergreen_cp_start(rdev);
2967 	ring->ready = true;
2968 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2969 	if (r) {
2970 		ring->ready = false;
2971 		return r;
2972 	}
2973 	return 0;
2974 }
2975 
2976 /*
2977  * Core functions
2978  */
2979 static void evergreen_gpu_init(struct radeon_device *rdev)
2980 {
2981 	u32 gb_addr_config;
2982 	u32 mc_shared_chmap, mc_arb_ramcfg;
2983 	u32 sx_debug_1;
2984 	u32 smx_dc_ctl0;
2985 	u32 sq_config;
2986 	u32 sq_lds_resource_mgmt;
2987 	u32 sq_gpr_resource_mgmt_1;
2988 	u32 sq_gpr_resource_mgmt_2;
2989 	u32 sq_gpr_resource_mgmt_3;
2990 	u32 sq_thread_resource_mgmt;
2991 	u32 sq_thread_resource_mgmt_2;
2992 	u32 sq_stack_resource_mgmt_1;
2993 	u32 sq_stack_resource_mgmt_2;
2994 	u32 sq_stack_resource_mgmt_3;
2995 	u32 vgt_cache_invalidation;
2996 	u32 hdp_host_path_cntl, tmp;
2997 	u32 disabled_rb_mask;
2998 	int i, j, ps_thread_count;
2999 
3000 	switch (rdev->family) {
3001 	case CHIP_CYPRESS:
3002 	case CHIP_HEMLOCK:
3003 		rdev->config.evergreen.num_ses = 2;
3004 		rdev->config.evergreen.max_pipes = 4;
3005 		rdev->config.evergreen.max_tile_pipes = 8;
3006 		rdev->config.evergreen.max_simds = 10;
3007 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3008 		rdev->config.evergreen.max_gprs = 256;
3009 		rdev->config.evergreen.max_threads = 248;
3010 		rdev->config.evergreen.max_gs_threads = 32;
3011 		rdev->config.evergreen.max_stack_entries = 512;
3012 		rdev->config.evergreen.sx_num_of_sets = 4;
3013 		rdev->config.evergreen.sx_max_export_size = 256;
3014 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3015 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3016 		rdev->config.evergreen.max_hw_contexts = 8;
3017 		rdev->config.evergreen.sq_num_cf_insts = 2;
3018 
3019 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3020 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3021 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3022 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3023 		break;
3024 	case CHIP_JUNIPER:
3025 		rdev->config.evergreen.num_ses = 1;
3026 		rdev->config.evergreen.max_pipes = 4;
3027 		rdev->config.evergreen.max_tile_pipes = 4;
3028 		rdev->config.evergreen.max_simds = 10;
3029 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3030 		rdev->config.evergreen.max_gprs = 256;
3031 		rdev->config.evergreen.max_threads = 248;
3032 		rdev->config.evergreen.max_gs_threads = 32;
3033 		rdev->config.evergreen.max_stack_entries = 512;
3034 		rdev->config.evergreen.sx_num_of_sets = 4;
3035 		rdev->config.evergreen.sx_max_export_size = 256;
3036 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3037 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3038 		rdev->config.evergreen.max_hw_contexts = 8;
3039 		rdev->config.evergreen.sq_num_cf_insts = 2;
3040 
3041 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3042 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3043 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3044 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3045 		break;
3046 	case CHIP_REDWOOD:
3047 		rdev->config.evergreen.num_ses = 1;
3048 		rdev->config.evergreen.max_pipes = 4;
3049 		rdev->config.evergreen.max_tile_pipes = 4;
3050 		rdev->config.evergreen.max_simds = 5;
3051 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3052 		rdev->config.evergreen.max_gprs = 256;
3053 		rdev->config.evergreen.max_threads = 248;
3054 		rdev->config.evergreen.max_gs_threads = 32;
3055 		rdev->config.evergreen.max_stack_entries = 256;
3056 		rdev->config.evergreen.sx_num_of_sets = 4;
3057 		rdev->config.evergreen.sx_max_export_size = 256;
3058 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3059 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3060 		rdev->config.evergreen.max_hw_contexts = 8;
3061 		rdev->config.evergreen.sq_num_cf_insts = 2;
3062 
3063 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3064 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3065 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3066 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3067 		break;
3068 	case CHIP_CEDAR:
3069 	default:
3070 		rdev->config.evergreen.num_ses = 1;
3071 		rdev->config.evergreen.max_pipes = 2;
3072 		rdev->config.evergreen.max_tile_pipes = 2;
3073 		rdev->config.evergreen.max_simds = 2;
3074 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3075 		rdev->config.evergreen.max_gprs = 256;
3076 		rdev->config.evergreen.max_threads = 192;
3077 		rdev->config.evergreen.max_gs_threads = 16;
3078 		rdev->config.evergreen.max_stack_entries = 256;
3079 		rdev->config.evergreen.sx_num_of_sets = 4;
3080 		rdev->config.evergreen.sx_max_export_size = 128;
3081 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3082 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3083 		rdev->config.evergreen.max_hw_contexts = 4;
3084 		rdev->config.evergreen.sq_num_cf_insts = 1;
3085 
3086 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3087 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3088 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3089 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3090 		break;
3091 	case CHIP_PALM:
3092 		rdev->config.evergreen.num_ses = 1;
3093 		rdev->config.evergreen.max_pipes = 2;
3094 		rdev->config.evergreen.max_tile_pipes = 2;
3095 		rdev->config.evergreen.max_simds = 2;
3096 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3097 		rdev->config.evergreen.max_gprs = 256;
3098 		rdev->config.evergreen.max_threads = 192;
3099 		rdev->config.evergreen.max_gs_threads = 16;
3100 		rdev->config.evergreen.max_stack_entries = 256;
3101 		rdev->config.evergreen.sx_num_of_sets = 4;
3102 		rdev->config.evergreen.sx_max_export_size = 128;
3103 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3104 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3105 		rdev->config.evergreen.max_hw_contexts = 4;
3106 		rdev->config.evergreen.sq_num_cf_insts = 1;
3107 
3108 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3109 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3110 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3111 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3112 		break;
3113 	case CHIP_SUMO:
3114 		rdev->config.evergreen.num_ses = 1;
3115 		rdev->config.evergreen.max_pipes = 4;
3116 		rdev->config.evergreen.max_tile_pipes = 4;
3117 		if (rdev->pdev->device == 0x9648)
3118 			rdev->config.evergreen.max_simds = 3;
3119 		else if ((rdev->pdev->device == 0x9647) ||
3120 			 (rdev->pdev->device == 0x964a))
3121 			rdev->config.evergreen.max_simds = 4;
3122 		else
3123 			rdev->config.evergreen.max_simds = 5;
3124 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3125 		rdev->config.evergreen.max_gprs = 256;
3126 		rdev->config.evergreen.max_threads = 248;
3127 		rdev->config.evergreen.max_gs_threads = 32;
3128 		rdev->config.evergreen.max_stack_entries = 256;
3129 		rdev->config.evergreen.sx_num_of_sets = 4;
3130 		rdev->config.evergreen.sx_max_export_size = 256;
3131 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3132 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3133 		rdev->config.evergreen.max_hw_contexts = 8;
3134 		rdev->config.evergreen.sq_num_cf_insts = 2;
3135 
3136 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3137 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3138 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3139 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3140 		break;
3141 	case CHIP_SUMO2:
3142 		rdev->config.evergreen.num_ses = 1;
3143 		rdev->config.evergreen.max_pipes = 4;
3144 		rdev->config.evergreen.max_tile_pipes = 4;
3145 		rdev->config.evergreen.max_simds = 2;
3146 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3147 		rdev->config.evergreen.max_gprs = 256;
3148 		rdev->config.evergreen.max_threads = 248;
3149 		rdev->config.evergreen.max_gs_threads = 32;
3150 		rdev->config.evergreen.max_stack_entries = 512;
3151 		rdev->config.evergreen.sx_num_of_sets = 4;
3152 		rdev->config.evergreen.sx_max_export_size = 256;
3153 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3154 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3155 		rdev->config.evergreen.max_hw_contexts = 4;
3156 		rdev->config.evergreen.sq_num_cf_insts = 2;
3157 
3158 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3159 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3160 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3161 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3162 		break;
3163 	case CHIP_BARTS:
3164 		rdev->config.evergreen.num_ses = 2;
3165 		rdev->config.evergreen.max_pipes = 4;
3166 		rdev->config.evergreen.max_tile_pipes = 8;
3167 		rdev->config.evergreen.max_simds = 7;
3168 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3169 		rdev->config.evergreen.max_gprs = 256;
3170 		rdev->config.evergreen.max_threads = 248;
3171 		rdev->config.evergreen.max_gs_threads = 32;
3172 		rdev->config.evergreen.max_stack_entries = 512;
3173 		rdev->config.evergreen.sx_num_of_sets = 4;
3174 		rdev->config.evergreen.sx_max_export_size = 256;
3175 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3176 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3177 		rdev->config.evergreen.max_hw_contexts = 8;
3178 		rdev->config.evergreen.sq_num_cf_insts = 2;
3179 
3180 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3181 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3182 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3183 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3184 		break;
3185 	case CHIP_TURKS:
3186 		rdev->config.evergreen.num_ses = 1;
3187 		rdev->config.evergreen.max_pipes = 4;
3188 		rdev->config.evergreen.max_tile_pipes = 4;
3189 		rdev->config.evergreen.max_simds = 6;
3190 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3191 		rdev->config.evergreen.max_gprs = 256;
3192 		rdev->config.evergreen.max_threads = 248;
3193 		rdev->config.evergreen.max_gs_threads = 32;
3194 		rdev->config.evergreen.max_stack_entries = 256;
3195 		rdev->config.evergreen.sx_num_of_sets = 4;
3196 		rdev->config.evergreen.sx_max_export_size = 256;
3197 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3198 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3199 		rdev->config.evergreen.max_hw_contexts = 8;
3200 		rdev->config.evergreen.sq_num_cf_insts = 2;
3201 
3202 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3203 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3204 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3205 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3206 		break;
3207 	case CHIP_CAICOS:
3208 		rdev->config.evergreen.num_ses = 1;
3209 		rdev->config.evergreen.max_pipes = 2;
3210 		rdev->config.evergreen.max_tile_pipes = 2;
3211 		rdev->config.evergreen.max_simds = 2;
3212 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3213 		rdev->config.evergreen.max_gprs = 256;
3214 		rdev->config.evergreen.max_threads = 192;
3215 		rdev->config.evergreen.max_gs_threads = 16;
3216 		rdev->config.evergreen.max_stack_entries = 256;
3217 		rdev->config.evergreen.sx_num_of_sets = 4;
3218 		rdev->config.evergreen.sx_max_export_size = 128;
3219 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3220 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3221 		rdev->config.evergreen.max_hw_contexts = 4;
3222 		rdev->config.evergreen.sq_num_cf_insts = 1;
3223 
3224 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3225 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3226 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3227 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3228 		break;
3229 	}
3230 
3231 	/* Initialize HDP */
3232 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3233 		WREG32((0x2c14 + j), 0x00000000);
3234 		WREG32((0x2c18 + j), 0x00000000);
3235 		WREG32((0x2c1c + j), 0x00000000);
3236 		WREG32((0x2c20 + j), 0x00000000);
3237 		WREG32((0x2c24 + j), 0x00000000);
3238 	}
3239 
3240 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3241 
3242 	evergreen_fix_pci_max_read_req_size(rdev);
3243 
3244 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3245 	if ((rdev->family == CHIP_PALM) ||
3246 	    (rdev->family == CHIP_SUMO) ||
3247 	    (rdev->family == CHIP_SUMO2))
3248 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3249 	else
3250 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3251 
3252 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3253 	 * not have bank info, so create a custom tiling dword.
3254 	 * bits 3:0   num_pipes
3255 	 * bits 7:4   num_banks
3256 	 * bits 11:8  group_size
3257 	 * bits 15:12 row_size
3258 	 */
3259 	rdev->config.evergreen.tile_config = 0;
3260 	switch (rdev->config.evergreen.max_tile_pipes) {
3261 	case 1:
3262 	default:
3263 		rdev->config.evergreen.tile_config |= (0 << 0);
3264 		break;
3265 	case 2:
3266 		rdev->config.evergreen.tile_config |= (1 << 0);
3267 		break;
3268 	case 4:
3269 		rdev->config.evergreen.tile_config |= (2 << 0);
3270 		break;
3271 	case 8:
3272 		rdev->config.evergreen.tile_config |= (3 << 0);
3273 		break;
3274 	}
3275 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3276 	if (rdev->flags & RADEON_IS_IGP)
3277 		rdev->config.evergreen.tile_config |= 1 << 4;
3278 	else {
3279 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3280 		case 0: /* four banks */
3281 			rdev->config.evergreen.tile_config |= 0 << 4;
3282 			break;
3283 		case 1: /* eight banks */
3284 			rdev->config.evergreen.tile_config |= 1 << 4;
3285 			break;
3286 		case 2: /* sixteen banks */
3287 		default:
3288 			rdev->config.evergreen.tile_config |= 2 << 4;
3289 			break;
3290 		}
3291 	}
3292 	rdev->config.evergreen.tile_config |= 0 << 8;
3293 	rdev->config.evergreen.tile_config |=
3294 		((gb_addr_config & 0x30000000) >> 28) << 12;
3295 
3296 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3297 		u32 efuse_straps_4;
3298 		u32 efuse_straps_3;
3299 
3300 		efuse_straps_4 = RREG32_RCU(0x204);
3301 		efuse_straps_3 = RREG32_RCU(0x203);
3302 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3303 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3304 	} else {
3305 		tmp = 0;
3306 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3307 			u32 rb_disable_bitmap;
3308 
3309 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3310 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3311 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3312 			tmp <<= 4;
3313 			tmp |= rb_disable_bitmap;
3314 		}
3315 	}
3316 	/* enabled rb are just the one not disabled :) */
3317 	disabled_rb_mask = tmp;
3318 	tmp = 0;
3319 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3320 		tmp |= (1 << i);
3321 	/* if all the backends are disabled, fix it up here */
3322 	if ((disabled_rb_mask & tmp) == tmp) {
3323 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3324 			disabled_rb_mask &= ~(1 << i);
3325 	}
3326 
3327 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3328 		u32 simd_disable_bitmap;
3329 
3330 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3331 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3332 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3333 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3334 		tmp <<= 16;
3335 		tmp |= simd_disable_bitmap;
3336 	}
3337 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3338 
3339 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3340 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3341 
3342 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3343 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3344 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3345 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3346 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3347 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3348 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3349 
3350 	if ((rdev->config.evergreen.max_backends == 1) &&
3351 	    (rdev->flags & RADEON_IS_IGP)) {
3352 		if ((disabled_rb_mask & 3) == 1) {
3353 			/* RB0 disabled, RB1 enabled */
3354 			tmp = 0x11111111;
3355 		} else {
3356 			/* RB1 disabled, RB0 enabled */
3357 			tmp = 0x00000000;
3358 		}
3359 	} else {
3360 		tmp = gb_addr_config & NUM_PIPES_MASK;
3361 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3362 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3363 	}
3364 	WREG32(GB_BACKEND_MAP, tmp);
3365 
3366 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3367 	WREG32(CGTS_TCC_DISABLE, 0);
3368 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3369 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3370 
3371 	/* set HW defaults for 3D engine */
3372 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3373 				     ROQ_IB2_START(0x2b)));
3374 
3375 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3376 
3377 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3378 			     SYNC_GRADIENT |
3379 			     SYNC_WALKER |
3380 			     SYNC_ALIGNER));
3381 
3382 	sx_debug_1 = RREG32(SX_DEBUG_1);
3383 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3384 	WREG32(SX_DEBUG_1, sx_debug_1);
3385 
3386 
3387 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3388 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3389 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3390 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3391 
3392 	if (rdev->family <= CHIP_SUMO2)
3393 		WREG32(SMX_SAR_CTL0, 0x00010000);
3394 
3395 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3396 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3397 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3398 
3399 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3400 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3401 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3402 
3403 	WREG32(VGT_NUM_INSTANCES, 1);
3404 	WREG32(SPI_CONFIG_CNTL, 0);
3405 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3406 	WREG32(CP_PERFMON_CNTL, 0);
3407 
3408 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3409 				  FETCH_FIFO_HIWATER(0x4) |
3410 				  DONE_FIFO_HIWATER(0xe0) |
3411 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3412 
3413 	sq_config = RREG32(SQ_CONFIG);
3414 	sq_config &= ~(PS_PRIO(3) |
3415 		       VS_PRIO(3) |
3416 		       GS_PRIO(3) |
3417 		       ES_PRIO(3));
3418 	sq_config |= (VC_ENABLE |
3419 		      EXPORT_SRC_C |
3420 		      PS_PRIO(0) |
3421 		      VS_PRIO(1) |
3422 		      GS_PRIO(2) |
3423 		      ES_PRIO(3));
3424 
3425 	switch (rdev->family) {
3426 	case CHIP_CEDAR:
3427 	case CHIP_PALM:
3428 	case CHIP_SUMO:
3429 	case CHIP_SUMO2:
3430 	case CHIP_CAICOS:
3431 		/* no vertex cache */
3432 		sq_config &= ~VC_ENABLE;
3433 		break;
3434 	default:
3435 		break;
3436 	}
3437 
3438 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3439 
3440 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3441 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3442 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3443 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3444 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3445 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3446 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3447 
3448 	switch (rdev->family) {
3449 	case CHIP_CEDAR:
3450 	case CHIP_PALM:
3451 	case CHIP_SUMO:
3452 	case CHIP_SUMO2:
3453 		ps_thread_count = 96;
3454 		break;
3455 	default:
3456 		ps_thread_count = 128;
3457 		break;
3458 	}
3459 
3460 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3461 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3462 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3463 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3464 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3465 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3466 
3467 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3468 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3469 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3470 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3471 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3472 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3473 
3474 	WREG32(SQ_CONFIG, sq_config);
3475 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3476 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3477 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3478 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3479 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3480 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3481 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3482 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3483 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3484 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3485 
3486 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3487 					  FORCE_EOV_MAX_REZ_CNT(255)));
3488 
3489 	switch (rdev->family) {
3490 	case CHIP_CEDAR:
3491 	case CHIP_PALM:
3492 	case CHIP_SUMO:
3493 	case CHIP_SUMO2:
3494 	case CHIP_CAICOS:
3495 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3496 		break;
3497 	default:
3498 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3499 		break;
3500 	}
3501 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3502 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3503 
3504 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3505 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3506 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3507 
3508 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3509 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3510 
3511 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3512 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3513 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3514 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3515 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3516 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3517 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3518 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3519 
3520 	/* clear render buffer base addresses */
3521 	WREG32(CB_COLOR0_BASE, 0);
3522 	WREG32(CB_COLOR1_BASE, 0);
3523 	WREG32(CB_COLOR2_BASE, 0);
3524 	WREG32(CB_COLOR3_BASE, 0);
3525 	WREG32(CB_COLOR4_BASE, 0);
3526 	WREG32(CB_COLOR5_BASE, 0);
3527 	WREG32(CB_COLOR6_BASE, 0);
3528 	WREG32(CB_COLOR7_BASE, 0);
3529 	WREG32(CB_COLOR8_BASE, 0);
3530 	WREG32(CB_COLOR9_BASE, 0);
3531 	WREG32(CB_COLOR10_BASE, 0);
3532 	WREG32(CB_COLOR11_BASE, 0);
3533 
3534 	/* set the shader const cache sizes to 0 */
3535 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3536 		WREG32(i, 0);
3537 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3538 		WREG32(i, 0);
3539 
3540 	tmp = RREG32(HDP_MISC_CNTL);
3541 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3542 	WREG32(HDP_MISC_CNTL, tmp);
3543 
3544 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3545 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3546 
3547 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3548 
3549 	udelay(50);
3550 
3551 }
3552 
3553 int evergreen_mc_init(struct radeon_device *rdev)
3554 {
3555 	u32 tmp;
3556 	int chansize, numchan;
3557 
3558 	/* Get VRAM informations */
3559 	rdev->mc.vram_is_ddr = true;
3560 	if ((rdev->family == CHIP_PALM) ||
3561 	    (rdev->family == CHIP_SUMO) ||
3562 	    (rdev->family == CHIP_SUMO2))
3563 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3564 	else
3565 		tmp = RREG32(MC_ARB_RAMCFG);
3566 	if (tmp & CHANSIZE_OVERRIDE) {
3567 		chansize = 16;
3568 	} else if (tmp & CHANSIZE_MASK) {
3569 		chansize = 64;
3570 	} else {
3571 		chansize = 32;
3572 	}
3573 	tmp = RREG32(MC_SHARED_CHMAP);
3574 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3575 	case 0:
3576 	default:
3577 		numchan = 1;
3578 		break;
3579 	case 1:
3580 		numchan = 2;
3581 		break;
3582 	case 2:
3583 		numchan = 4;
3584 		break;
3585 	case 3:
3586 		numchan = 8;
3587 		break;
3588 	}
3589 	rdev->mc.vram_width = numchan * chansize;
3590 	/* Could aper size report 0 ? */
3591 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3592 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3593 	/* Setup GPU memory space */
3594 	if ((rdev->family == CHIP_PALM) ||
3595 	    (rdev->family == CHIP_SUMO) ||
3596 	    (rdev->family == CHIP_SUMO2)) {
3597 		/* size in bytes on fusion */
3598 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3599 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3600 	} else {
3601 		/* size in MB on evergreen/cayman/tn */
3602 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3603 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3604 	}
3605 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3606 	r700_vram_gtt_location(rdev, &rdev->mc);
3607 	radeon_update_bandwidth_info(rdev);
3608 
3609 	return 0;
3610 }
3611 
3612 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3613 {
3614 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3615 		RREG32(GRBM_STATUS));
3616 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3617 		RREG32(GRBM_STATUS_SE0));
3618 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3619 		RREG32(GRBM_STATUS_SE1));
3620 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3621 		RREG32(SRBM_STATUS));
3622 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3623 		RREG32(SRBM_STATUS2));
3624 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3625 		RREG32(CP_STALLED_STAT1));
3626 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3627 		RREG32(CP_STALLED_STAT2));
3628 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3629 		RREG32(CP_BUSY_STAT));
3630 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3631 		RREG32(CP_STAT));
3632 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3633 		RREG32(DMA_STATUS_REG));
3634 	if (rdev->family >= CHIP_CAYMAN) {
3635 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3636 			 RREG32(DMA_STATUS_REG + 0x800));
3637 	}
3638 }
3639 
3640 bool evergreen_is_display_hung(struct radeon_device *rdev)
3641 {
3642 	u32 crtc_hung = 0;
3643 	u32 crtc_status[6];
3644 	u32 i, j, tmp;
3645 
3646 	for (i = 0; i < rdev->num_crtc; i++) {
3647 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3648 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3649 			crtc_hung |= (1 << i);
3650 		}
3651 	}
3652 
3653 	for (j = 0; j < 10; j++) {
3654 		for (i = 0; i < rdev->num_crtc; i++) {
3655 			if (crtc_hung & (1 << i)) {
3656 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3657 				if (tmp != crtc_status[i])
3658 					crtc_hung &= ~(1 << i);
3659 			}
3660 		}
3661 		if (crtc_hung == 0)
3662 			return false;
3663 		udelay(100);
3664 	}
3665 
3666 	return true;
3667 }
3668 
3669 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3670 {
3671 	u32 reset_mask = 0;
3672 	u32 tmp;
3673 
3674 	/* GRBM_STATUS */
3675 	tmp = RREG32(GRBM_STATUS);
3676 	if (tmp & (PA_BUSY | SC_BUSY |
3677 		   SH_BUSY | SX_BUSY |
3678 		   TA_BUSY | VGT_BUSY |
3679 		   DB_BUSY | CB_BUSY |
3680 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3681 		reset_mask |= RADEON_RESET_GFX;
3682 
3683 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3684 		   CP_BUSY | CP_COHERENCY_BUSY))
3685 		reset_mask |= RADEON_RESET_CP;
3686 
3687 	if (tmp & GRBM_EE_BUSY)
3688 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3689 
3690 	/* DMA_STATUS_REG */
3691 	tmp = RREG32(DMA_STATUS_REG);
3692 	if (!(tmp & DMA_IDLE))
3693 		reset_mask |= RADEON_RESET_DMA;
3694 
3695 	/* SRBM_STATUS2 */
3696 	tmp = RREG32(SRBM_STATUS2);
3697 	if (tmp & DMA_BUSY)
3698 		reset_mask |= RADEON_RESET_DMA;
3699 
3700 	/* SRBM_STATUS */
3701 	tmp = RREG32(SRBM_STATUS);
3702 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3703 		reset_mask |= RADEON_RESET_RLC;
3704 
3705 	if (tmp & IH_BUSY)
3706 		reset_mask |= RADEON_RESET_IH;
3707 
3708 	if (tmp & SEM_BUSY)
3709 		reset_mask |= RADEON_RESET_SEM;
3710 
3711 	if (tmp & GRBM_RQ_PENDING)
3712 		reset_mask |= RADEON_RESET_GRBM;
3713 
3714 	if (tmp & VMC_BUSY)
3715 		reset_mask |= RADEON_RESET_VMC;
3716 
3717 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3718 		   MCC_BUSY | MCD_BUSY))
3719 		reset_mask |= RADEON_RESET_MC;
3720 
3721 	if (evergreen_is_display_hung(rdev))
3722 		reset_mask |= RADEON_RESET_DISPLAY;
3723 
3724 	/* VM_L2_STATUS */
3725 	tmp = RREG32(VM_L2_STATUS);
3726 	if (tmp & L2_BUSY)
3727 		reset_mask |= RADEON_RESET_VMC;
3728 
3729 	/* Skip MC reset as it's mostly likely not hung, just busy */
3730 	if (reset_mask & RADEON_RESET_MC) {
3731 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3732 		reset_mask &= ~RADEON_RESET_MC;
3733 	}
3734 
3735 	return reset_mask;
3736 }
3737 
3738 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3739 {
3740 	struct evergreen_mc_save save;
3741 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3742 	u32 tmp;
3743 
3744 	if (reset_mask == 0)
3745 		return;
3746 
3747 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3748 
3749 	evergreen_print_gpu_status_regs(rdev);
3750 
3751 	/* Disable CP parsing/prefetching */
3752 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3753 
3754 	if (reset_mask & RADEON_RESET_DMA) {
3755 		/* Disable DMA */
3756 		tmp = RREG32(DMA_RB_CNTL);
3757 		tmp &= ~DMA_RB_ENABLE;
3758 		WREG32(DMA_RB_CNTL, tmp);
3759 	}
3760 
3761 	udelay(50);
3762 
3763 	evergreen_mc_stop(rdev, &save);
3764 	if (evergreen_mc_wait_for_idle(rdev)) {
3765 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3766 	}
3767 
3768 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3769 		grbm_soft_reset |= SOFT_RESET_DB |
3770 			SOFT_RESET_CB |
3771 			SOFT_RESET_PA |
3772 			SOFT_RESET_SC |
3773 			SOFT_RESET_SPI |
3774 			SOFT_RESET_SX |
3775 			SOFT_RESET_SH |
3776 			SOFT_RESET_TC |
3777 			SOFT_RESET_TA |
3778 			SOFT_RESET_VC |
3779 			SOFT_RESET_VGT;
3780 	}
3781 
3782 	if (reset_mask & RADEON_RESET_CP) {
3783 		grbm_soft_reset |= SOFT_RESET_CP |
3784 			SOFT_RESET_VGT;
3785 
3786 		srbm_soft_reset |= SOFT_RESET_GRBM;
3787 	}
3788 
3789 	if (reset_mask & RADEON_RESET_DMA)
3790 		srbm_soft_reset |= SOFT_RESET_DMA;
3791 
3792 	if (reset_mask & RADEON_RESET_DISPLAY)
3793 		srbm_soft_reset |= SOFT_RESET_DC;
3794 
3795 	if (reset_mask & RADEON_RESET_RLC)
3796 		srbm_soft_reset |= SOFT_RESET_RLC;
3797 
3798 	if (reset_mask & RADEON_RESET_SEM)
3799 		srbm_soft_reset |= SOFT_RESET_SEM;
3800 
3801 	if (reset_mask & RADEON_RESET_IH)
3802 		srbm_soft_reset |= SOFT_RESET_IH;
3803 
3804 	if (reset_mask & RADEON_RESET_GRBM)
3805 		srbm_soft_reset |= SOFT_RESET_GRBM;
3806 
3807 	if (reset_mask & RADEON_RESET_VMC)
3808 		srbm_soft_reset |= SOFT_RESET_VMC;
3809 
3810 	if (!(rdev->flags & RADEON_IS_IGP)) {
3811 		if (reset_mask & RADEON_RESET_MC)
3812 			srbm_soft_reset |= SOFT_RESET_MC;
3813 	}
3814 
3815 	if (grbm_soft_reset) {
3816 		tmp = RREG32(GRBM_SOFT_RESET);
3817 		tmp |= grbm_soft_reset;
3818 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3819 		WREG32(GRBM_SOFT_RESET, tmp);
3820 		tmp = RREG32(GRBM_SOFT_RESET);
3821 
3822 		udelay(50);
3823 
3824 		tmp &= ~grbm_soft_reset;
3825 		WREG32(GRBM_SOFT_RESET, tmp);
3826 		tmp = RREG32(GRBM_SOFT_RESET);
3827 	}
3828 
3829 	if (srbm_soft_reset) {
3830 		tmp = RREG32(SRBM_SOFT_RESET);
3831 		tmp |= srbm_soft_reset;
3832 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3833 		WREG32(SRBM_SOFT_RESET, tmp);
3834 		tmp = RREG32(SRBM_SOFT_RESET);
3835 
3836 		udelay(50);
3837 
3838 		tmp &= ~srbm_soft_reset;
3839 		WREG32(SRBM_SOFT_RESET, tmp);
3840 		tmp = RREG32(SRBM_SOFT_RESET);
3841 	}
3842 
3843 	/* Wait a little for things to settle down */
3844 	udelay(50);
3845 
3846 	evergreen_mc_resume(rdev, &save);
3847 	udelay(50);
3848 
3849 	evergreen_print_gpu_status_regs(rdev);
3850 }
3851 
3852 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3853 {
3854 	struct evergreen_mc_save save;
3855 	u32 tmp, i;
3856 
3857 	dev_info(rdev->dev, "GPU pci config reset\n");
3858 
3859 	/* disable dpm? */
3860 
3861 	/* Disable CP parsing/prefetching */
3862 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3863 	udelay(50);
3864 	/* Disable DMA */
3865 	tmp = RREG32(DMA_RB_CNTL);
3866 	tmp &= ~DMA_RB_ENABLE;
3867 	WREG32(DMA_RB_CNTL, tmp);
3868 	/* XXX other engines? */
3869 
3870 	/* halt the rlc */
3871 	r600_rlc_stop(rdev);
3872 
3873 	udelay(50);
3874 
3875 	/* set mclk/sclk to bypass */
3876 	rv770_set_clk_bypass_mode(rdev);
3877 	/* disable BM */
3878 	pci_disable_busmaster(rdev->pdev->dev);
3879 	/* disable mem access */
3880 	evergreen_mc_stop(rdev, &save);
3881 	if (evergreen_mc_wait_for_idle(rdev)) {
3882 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3883 	}
3884 	/* reset */
3885 	radeon_pci_config_reset(rdev);
3886 	/* wait for asic to come out of reset */
3887 	for (i = 0; i < rdev->usec_timeout; i++) {
3888 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3889 			break;
3890 		udelay(1);
3891 	}
3892 }
3893 
3894 int evergreen_asic_reset(struct radeon_device *rdev)
3895 {
3896 	u32 reset_mask;
3897 
3898 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3899 
3900 	if (reset_mask)
3901 		r600_set_bios_scratch_engine_hung(rdev, true);
3902 
3903 	/* try soft reset */
3904 	evergreen_gpu_soft_reset(rdev, reset_mask);
3905 
3906 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3907 
3908 	/* try pci config reset */
3909 	if (reset_mask && radeon_hard_reset)
3910 		evergreen_gpu_pci_config_reset(rdev);
3911 
3912 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3913 
3914 	if (!reset_mask)
3915 		r600_set_bios_scratch_engine_hung(rdev, false);
3916 
3917 	return 0;
3918 }
3919 
3920 /**
3921  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3922  *
3923  * @rdev: radeon_device pointer
3924  * @ring: radeon_ring structure holding ring information
3925  *
3926  * Check if the GFX engine is locked up.
3927  * Returns true if the engine appears to be locked up, false if not.
3928  */
3929 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3930 {
3931 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3932 
3933 	if (!(reset_mask & (RADEON_RESET_GFX |
3934 			    RADEON_RESET_COMPUTE |
3935 			    RADEON_RESET_CP))) {
3936 		radeon_ring_lockup_update(rdev, ring);
3937 		return false;
3938 	}
3939 	return radeon_ring_test_lockup(rdev, ring);
3940 }
3941 
3942 /*
3943  * RLC
3944  */
3945 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3946 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3947 
3948 void sumo_rlc_fini(struct radeon_device *rdev)
3949 {
3950 	int r;
3951 
3952 	/* save restore block */
3953 	if (rdev->rlc.save_restore_obj) {
3954 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3955 		if (unlikely(r != 0))
3956 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3957 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
3958 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3959 
3960 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
3961 		rdev->rlc.save_restore_obj = NULL;
3962 	}
3963 
3964 	/* clear state block */
3965 	if (rdev->rlc.clear_state_obj) {
3966 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3967 		if (unlikely(r != 0))
3968 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3969 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
3970 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3971 
3972 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
3973 		rdev->rlc.clear_state_obj = NULL;
3974 	}
3975 
3976 	/* clear state block */
3977 	if (rdev->rlc.cp_table_obj) {
3978 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3979 		if (unlikely(r != 0))
3980 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3981 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
3982 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3983 
3984 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
3985 		rdev->rlc.cp_table_obj = NULL;
3986 	}
3987 }
3988 
3989 #define CP_ME_TABLE_SIZE    96
3990 
3991 int sumo_rlc_init(struct radeon_device *rdev)
3992 {
3993 	const u32 *src_ptr;
3994 	volatile u32 *dst_ptr;
3995 	u32 dws, data, i, j, k, reg_num;
3996 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
3997 	u64 reg_list_mc_addr;
3998 	const struct cs_section_def *cs_data;
3999 	int r;
4000 	void *vptr;
4001 
4002 	vptr = NULL;
4003 	src_ptr = rdev->rlc.reg_list;
4004 	dws = rdev->rlc.reg_list_size;
4005 	if (rdev->family >= CHIP_BONAIRE) {
4006 		dws += (5 * 16) + 48 + 48 + 64;
4007 	}
4008 	cs_data = rdev->rlc.cs_data;
4009 
4010 	if (src_ptr) {
4011 		/* save restore block */
4012 		if (rdev->rlc.save_restore_obj == NULL) {
4013 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4014 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4015 					     &rdev->rlc.save_restore_obj);
4016 			if (r) {
4017 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4018 				return r;
4019 			}
4020 		}
4021 
4022 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4023 		if (unlikely(r != 0)) {
4024 			sumo_rlc_fini(rdev);
4025 			return r;
4026 		}
4027 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4028 				  &rdev->rlc.save_restore_gpu_addr);
4029 		if (r) {
4030 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4031 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4032 			sumo_rlc_fini(rdev);
4033 			return r;
4034 		}
4035 
4036 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void**)&vptr);
4037 		if (r) {
4038 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4039 			sumo_rlc_fini(rdev);
4040 			return r;
4041 		}
4042 		rdev->rlc.sr_ptr = vptr;
4043 		/* write the sr buffer */
4044 		dst_ptr = rdev->rlc.sr_ptr;
4045 		if (rdev->family >= CHIP_TAHITI) {
4046 			/* SI */
4047 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4048 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4049 		} else {
4050 			/* ON/LN/TN */
4051 			/* format:
4052 			 * dw0: (reg2 << 16) | reg1
4053 			 * dw1: reg1 save space
4054 			 * dw2: reg2 save space
4055 			 */
4056 			for (i = 0; i < dws; i++) {
4057 				data = src_ptr[i] >> 2;
4058 				i++;
4059 				if (i < dws)
4060 					data |= (src_ptr[i] >> 2) << 16;
4061 				j = (((i - 1) * 3) / 2);
4062 				dst_ptr[j] = cpu_to_le32(data);
4063 			}
4064 			j = ((i * 3) / 2);
4065 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4066 		}
4067 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4068 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4069 	}
4070 
4071 	if (cs_data) {
4072 		/* clear state block */
4073 		if (rdev->family >= CHIP_BONAIRE) {
4074 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4075 		} else if (rdev->family >= CHIP_TAHITI) {
4076 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4077 			dws = rdev->rlc.clear_state_size + (256 / 4);
4078 		} else {
4079 			reg_list_num = 0;
4080 			dws = 0;
4081 			for (i = 0; cs_data[i].section != NULL; i++) {
4082 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4083 					reg_list_num++;
4084 					dws += cs_data[i].section[j].reg_count;
4085 				}
4086 			}
4087 			reg_list_blk_index = (3 * reg_list_num + 2);
4088 			dws += reg_list_blk_index;
4089 			rdev->rlc.clear_state_size = dws;
4090 		}
4091 
4092 		if (rdev->rlc.clear_state_obj == NULL) {
4093 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4094 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4095 					     &rdev->rlc.clear_state_obj);
4096 			if (r) {
4097 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4098 				sumo_rlc_fini(rdev);
4099 				return r;
4100 			}
4101 		}
4102 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4103 		if (unlikely(r != 0)) {
4104 			sumo_rlc_fini(rdev);
4105 			return r;
4106 		}
4107 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4108 				  &rdev->rlc.clear_state_gpu_addr);
4109 		if (r) {
4110 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4111 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4112 			sumo_rlc_fini(rdev);
4113 			return r;
4114 		}
4115 
4116 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void**)&vptr);
4117 		if (r) {
4118 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4119 			sumo_rlc_fini(rdev);
4120 			return r;
4121 		}
4122 		rdev->rlc.cs_ptr = vptr;
4123 		/* set up the cs buffer */
4124 		dst_ptr = rdev->rlc.cs_ptr;
4125 		if (rdev->family >= CHIP_BONAIRE) {
4126 			cik_get_csb_buffer(rdev, dst_ptr);
4127 		} else if (rdev->family >= CHIP_TAHITI) {
4128 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4129 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4130 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4131 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4132 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4133 		} else {
4134 			reg_list_hdr_blk_index = 0;
4135 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4136 			data = upper_32_bits(reg_list_mc_addr);
4137 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4138 			reg_list_hdr_blk_index++;
4139 			for (i = 0; cs_data[i].section != NULL; i++) {
4140 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4141 					reg_num = cs_data[i].section[j].reg_count;
4142 					data = reg_list_mc_addr & 0xffffffff;
4143 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4144 					reg_list_hdr_blk_index++;
4145 
4146 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4147 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4148 					reg_list_hdr_blk_index++;
4149 
4150 					data = 0x08000000 | (reg_num * 4);
4151 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4152 					reg_list_hdr_blk_index++;
4153 
4154 					for (k = 0; k < reg_num; k++) {
4155 						data = cs_data[i].section[j].extent[k];
4156 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4157 					}
4158 					reg_list_mc_addr += reg_num * 4;
4159 					reg_list_blk_index += reg_num;
4160 				}
4161 			}
4162 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4163 		}
4164 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4165 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4166 	}
4167 
4168 	if (rdev->rlc.cp_table_size) {
4169 		if (rdev->rlc.cp_table_obj == NULL) {
4170 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4171 					     PAGE_SIZE, true,
4172 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4173 					     &rdev->rlc.cp_table_obj);
4174 			if (r) {
4175 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4176 				sumo_rlc_fini(rdev);
4177 				return r;
4178 			}
4179 		}
4180 
4181 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4182 		if (unlikely(r != 0)) {
4183 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4184 			sumo_rlc_fini(rdev);
4185 			return r;
4186 		}
4187 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4188 				  &rdev->rlc.cp_table_gpu_addr);
4189 		if (r) {
4190 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4191 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4192 			sumo_rlc_fini(rdev);
4193 			return r;
4194 		}
4195 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void**)&vptr);
4196 		if (r) {
4197 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4198 			sumo_rlc_fini(rdev);
4199 			return r;
4200 		}
4201 		rdev->rlc.cp_table_ptr = vptr;
4202 
4203 		cik_init_cp_pg_table(rdev);
4204 
4205 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4206 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4207 
4208 	}
4209 
4210 	return 0;
4211 }
4212 
4213 static void evergreen_rlc_start(struct radeon_device *rdev)
4214 {
4215 	u32 mask = RLC_ENABLE;
4216 
4217 	if (rdev->flags & RADEON_IS_IGP) {
4218 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4219 	}
4220 
4221 	WREG32(RLC_CNTL, mask);
4222 }
4223 
4224 int evergreen_rlc_resume(struct radeon_device *rdev)
4225 {
4226 	u32 i;
4227 	const __be32 *fw_data;
4228 
4229 	if (!rdev->rlc_fw)
4230 		return -EINVAL;
4231 
4232 	r600_rlc_stop(rdev);
4233 
4234 	WREG32(RLC_HB_CNTL, 0);
4235 
4236 	if (rdev->flags & RADEON_IS_IGP) {
4237 		if (rdev->family == CHIP_ARUBA) {
4238 			u32 always_on_bitmap =
4239 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4240 			/* find out the number of active simds */
4241 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4242 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4243 			tmp = hweight32(~tmp);
4244 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4245 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4246 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4247 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4248 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4249 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4250 			}
4251 		} else {
4252 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4253 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4254 		}
4255 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4256 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4257 	} else {
4258 		WREG32(RLC_HB_BASE, 0);
4259 		WREG32(RLC_HB_RPTR, 0);
4260 		WREG32(RLC_HB_WPTR, 0);
4261 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4262 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4263 	}
4264 	WREG32(RLC_MC_CNTL, 0);
4265 	WREG32(RLC_UCODE_CNTL, 0);
4266 
4267 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4268 	if (rdev->family >= CHIP_ARUBA) {
4269 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4270 			WREG32(RLC_UCODE_ADDR, i);
4271 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4272 		}
4273 	} else if (rdev->family >= CHIP_CAYMAN) {
4274 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4275 			WREG32(RLC_UCODE_ADDR, i);
4276 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4277 		}
4278 	} else {
4279 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4280 			WREG32(RLC_UCODE_ADDR, i);
4281 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4282 		}
4283 	}
4284 	WREG32(RLC_UCODE_ADDR, 0);
4285 
4286 	evergreen_rlc_start(rdev);
4287 
4288 	return 0;
4289 }
4290 
4291 /* Interrupts */
4292 
4293 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4294 {
4295 	if (crtc >= rdev->num_crtc)
4296 		return 0;
4297 	else
4298 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4299 }
4300 
4301 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4302 {
4303 	u32 tmp;
4304 
4305 	if (rdev->family >= CHIP_CAYMAN) {
4306 		cayman_cp_int_cntl_setup(rdev, 0,
4307 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4308 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4309 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4310 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4311 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4312 	} else
4313 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4314 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4315 	WREG32(DMA_CNTL, tmp);
4316 	WREG32(GRBM_INT_CNTL, 0);
4317 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4318 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4319 	if (rdev->num_crtc >= 4) {
4320 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4321 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4322 	}
4323 	if (rdev->num_crtc >= 6) {
4324 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4325 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4326 	}
4327 
4328 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4329 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4330 	if (rdev->num_crtc >= 4) {
4331 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4332 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4333 	}
4334 	if (rdev->num_crtc >= 6) {
4335 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4336 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4337 	}
4338 
4339 	/* only one DAC on DCE5 */
4340 	if (!ASIC_IS_DCE5(rdev))
4341 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4342 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4343 
4344 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4345 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4346 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4347 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4348 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4349 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4350 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4351 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4352 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4353 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4354 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4355 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4356 
4357 }
4358 
4359 int evergreen_irq_set(struct radeon_device *rdev)
4360 {
4361 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4362 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4363 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4364 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4365 	u32 grbm_int_cntl = 0;
4366 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4367 	u32 dma_cntl, dma_cntl1 = 0;
4368 	u32 thermal_int = 0;
4369 
4370 	if (!rdev->irq.installed) {
4371 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4372 		return -EINVAL;
4373 	}
4374 	/* don't enable anything if the ih is disabled */
4375 	if (!rdev->ih.enabled) {
4376 		r600_disable_interrupts(rdev);
4377 		/* force the active interrupt state to all disabled */
4378 		evergreen_disable_interrupt_state(rdev);
4379 		return 0;
4380 	}
4381 
4382 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4383 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4384 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4385 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4386 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4387 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4388 	if (rdev->family == CHIP_ARUBA)
4389 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4390 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4391 	else
4392 		thermal_int = RREG32(CG_THERMAL_INT) &
4393 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4394 
4395 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4396 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4397 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4398 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4399 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4400 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4401 
4402 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4403 
4404 	if (rdev->family >= CHIP_CAYMAN) {
4405 		/* enable CP interrupts on all rings */
4406 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4407 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4408 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4409 		}
4410 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4411 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4412 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4413 		}
4414 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4415 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4416 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4417 		}
4418 	} else {
4419 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4420 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4421 			cp_int_cntl |= RB_INT_ENABLE;
4422 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4423 		}
4424 	}
4425 
4426 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4427 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4428 		dma_cntl |= TRAP_ENABLE;
4429 	}
4430 
4431 	if (rdev->family >= CHIP_CAYMAN) {
4432 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4433 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4434 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4435 			dma_cntl1 |= TRAP_ENABLE;
4436 		}
4437 	}
4438 
4439 	if (rdev->irq.dpm_thermal) {
4440 		DRM_DEBUG("dpm thermal\n");
4441 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4442 	}
4443 
4444 	if (rdev->irq.crtc_vblank_int[0] ||
4445 	    atomic_read(&rdev->irq.pflip[0])) {
4446 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4447 		crtc1 |= VBLANK_INT_MASK;
4448 	}
4449 	if (rdev->irq.crtc_vblank_int[1] ||
4450 	    atomic_read(&rdev->irq.pflip[1])) {
4451 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4452 		crtc2 |= VBLANK_INT_MASK;
4453 	}
4454 	if (rdev->irq.crtc_vblank_int[2] ||
4455 	    atomic_read(&rdev->irq.pflip[2])) {
4456 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4457 		crtc3 |= VBLANK_INT_MASK;
4458 	}
4459 	if (rdev->irq.crtc_vblank_int[3] ||
4460 	    atomic_read(&rdev->irq.pflip[3])) {
4461 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4462 		crtc4 |= VBLANK_INT_MASK;
4463 	}
4464 	if (rdev->irq.crtc_vblank_int[4] ||
4465 	    atomic_read(&rdev->irq.pflip[4])) {
4466 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4467 		crtc5 |= VBLANK_INT_MASK;
4468 	}
4469 	if (rdev->irq.crtc_vblank_int[5] ||
4470 	    atomic_read(&rdev->irq.pflip[5])) {
4471 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4472 		crtc6 |= VBLANK_INT_MASK;
4473 	}
4474 	if (rdev->irq.hpd[0]) {
4475 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4476 		hpd1 |= DC_HPDx_INT_EN;
4477 	}
4478 	if (rdev->irq.hpd[1]) {
4479 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4480 		hpd2 |= DC_HPDx_INT_EN;
4481 	}
4482 	if (rdev->irq.hpd[2]) {
4483 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4484 		hpd3 |= DC_HPDx_INT_EN;
4485 	}
4486 	if (rdev->irq.hpd[3]) {
4487 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4488 		hpd4 |= DC_HPDx_INT_EN;
4489 	}
4490 	if (rdev->irq.hpd[4]) {
4491 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4492 		hpd5 |= DC_HPDx_INT_EN;
4493 	}
4494 	if (rdev->irq.hpd[5]) {
4495 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4496 		hpd6 |= DC_HPDx_INT_EN;
4497 	}
4498 	if (rdev->irq.afmt[0]) {
4499 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4500 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4501 	}
4502 	if (rdev->irq.afmt[1]) {
4503 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4504 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4505 	}
4506 	if (rdev->irq.afmt[2]) {
4507 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4508 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4509 	}
4510 	if (rdev->irq.afmt[3]) {
4511 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4512 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4513 	}
4514 	if (rdev->irq.afmt[4]) {
4515 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4516 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4517 	}
4518 	if (rdev->irq.afmt[5]) {
4519 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4520 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4521 	}
4522 
4523 	if (rdev->family >= CHIP_CAYMAN) {
4524 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4525 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4526 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4527 	} else
4528 		WREG32(CP_INT_CNTL, cp_int_cntl);
4529 
4530 	WREG32(DMA_CNTL, dma_cntl);
4531 
4532 	if (rdev->family >= CHIP_CAYMAN)
4533 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4534 
4535 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4536 
4537 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4538 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4539 	if (rdev->num_crtc >= 4) {
4540 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4541 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4542 	}
4543 	if (rdev->num_crtc >= 6) {
4544 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4545 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4546 	}
4547 
4548 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4549 	       GRPH_PFLIP_INT_MASK);
4550 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4551 	       GRPH_PFLIP_INT_MASK);
4552 	if (rdev->num_crtc >= 4) {
4553 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4554 		       GRPH_PFLIP_INT_MASK);
4555 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4556 		       GRPH_PFLIP_INT_MASK);
4557 	}
4558 	if (rdev->num_crtc >= 6) {
4559 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4560 		       GRPH_PFLIP_INT_MASK);
4561 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4562 		       GRPH_PFLIP_INT_MASK);
4563 	}
4564 
4565 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4566 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4567 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4568 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4569 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4570 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4571 	if (rdev->family == CHIP_ARUBA)
4572 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4573 	else
4574 		WREG32(CG_THERMAL_INT, thermal_int);
4575 
4576 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4577 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4578 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4579 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4580 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4581 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4582 
4583 	return 0;
4584 }
4585 
4586 static void evergreen_irq_ack(struct radeon_device *rdev)
4587 {
4588 	u32 tmp;
4589 
4590 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4591 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4592 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4593 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4594 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4595 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4596 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4597 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4598 	if (rdev->num_crtc >= 4) {
4599 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4600 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4601 	}
4602 	if (rdev->num_crtc >= 6) {
4603 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4604 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4605 	}
4606 
4607 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4608 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4609 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4610 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4611 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4612 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4613 
4614 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4615 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4616 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4617 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4618 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4619 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4620 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4621 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4622 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4623 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4624 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4625 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4626 
4627 	if (rdev->num_crtc >= 4) {
4628 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4629 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4630 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4631 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4632 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4633 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4634 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4635 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4636 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4637 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4638 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4639 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4640 	}
4641 
4642 	if (rdev->num_crtc >= 6) {
4643 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4644 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4645 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4646 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4647 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4648 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4649 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4650 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4651 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4652 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4653 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4654 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4655 	}
4656 
4657 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4658 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4659 		tmp |= DC_HPDx_INT_ACK;
4660 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4661 	}
4662 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4663 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4664 		tmp |= DC_HPDx_INT_ACK;
4665 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4666 	}
4667 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4668 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4669 		tmp |= DC_HPDx_INT_ACK;
4670 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4671 	}
4672 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4673 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4674 		tmp |= DC_HPDx_INT_ACK;
4675 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4676 	}
4677 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4678 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4679 		tmp |= DC_HPDx_INT_ACK;
4680 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4681 	}
4682 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4683 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4684 		tmp |= DC_HPDx_INT_ACK;
4685 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4686 	}
4687 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4688 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4689 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4690 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4691 	}
4692 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4693 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4694 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4695 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4696 	}
4697 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4698 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4699 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4700 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4701 	}
4702 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4703 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4704 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4705 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4706 	}
4707 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4708 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4709 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4710 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4711 	}
4712 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4713 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4714 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4715 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4716 	}
4717 }
4718 
4719 static void evergreen_irq_disable(struct radeon_device *rdev)
4720 {
4721 	r600_disable_interrupts(rdev);
4722 	/* Wait and acknowledge irq */
4723 	mdelay(1);
4724 	evergreen_irq_ack(rdev);
4725 	evergreen_disable_interrupt_state(rdev);
4726 }
4727 
4728 void evergreen_irq_suspend(struct radeon_device *rdev)
4729 {
4730 	evergreen_irq_disable(rdev);
4731 	r600_rlc_stop(rdev);
4732 }
4733 
4734 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4735 {
4736 	u32 wptr, tmp;
4737 
4738 	if (rdev->wb.enabled)
4739 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4740 	else
4741 		wptr = RREG32(IH_RB_WPTR);
4742 
4743 	if (wptr & RB_OVERFLOW) {
4744 		wptr &= ~RB_OVERFLOW;
4745 		/* When a ring buffer overflow happen start parsing interrupt
4746 		 * from the last not overwritten vector (wptr + 16). Hopefully
4747 		 * this should allow us to catchup.
4748 		 */
4749 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4750 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4751 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4752 		tmp = RREG32(IH_RB_CNTL);
4753 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4754 		WREG32(IH_RB_CNTL, tmp);
4755 	}
4756 	return (wptr & rdev->ih.ptr_mask);
4757 }
4758 
4759 irqreturn_t evergreen_irq_process(struct radeon_device *rdev)
4760 {
4761 	u32 wptr;
4762 	u32 rptr;
4763 	u32 src_id, src_data;
4764 	u32 ring_index;
4765 	bool queue_hotplug = false;
4766 	bool queue_hdmi = false;
4767 	bool queue_thermal = false;
4768 	u32 status, addr;
4769 
4770 	if (!rdev->ih.enabled || rdev->shutdown)
4771 		return IRQ_NONE;
4772 
4773 	wptr = evergreen_get_ih_wptr(rdev);
4774 
4775 restart_ih:
4776 	/* is somebody else already processing irqs? */
4777 	if (atomic_xchg(&rdev->ih.lock, 1))
4778 		return IRQ_NONE;
4779 
4780 	rptr = rdev->ih.rptr;
4781 	DRM_DEBUG_VBLANK("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4782 
4783 	/* Order reading of wptr vs. reading of IH ring data */
4784 	rmb();
4785 
4786 	/* display interrupts */
4787 	evergreen_irq_ack(rdev);
4788 
4789 	while (rptr != wptr) {
4790 		/* wptr/rptr are in bytes! */
4791 		ring_index = rptr / 4;
4792 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4793 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4794 
4795 		switch (src_id) {
4796 		case 1: /* D1 vblank/vline */
4797 			switch (src_data) {
4798 			case 0: /* D1 vblank */
4799 				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4800 					if (rdev->irq.crtc_vblank_int[0]) {
4801 						drm_handle_vblank(rdev->ddev, 0);
4802 						rdev->pm.vblank_sync = true;
4803 						wake_up(&rdev->irq.vblank_queue);
4804 					}
4805 					if (atomic_read(&rdev->irq.pflip[0]))
4806 						radeon_crtc_handle_vblank(rdev, 0);
4807 					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4808 					DRM_DEBUG_VBLANK("IH: D1 vblank\n");
4809 				}
4810 				break;
4811 			case 1: /* D1 vline */
4812 				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4813 					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4814 					DRM_DEBUG_VBLANK("IH: D1 vline\n");
4815 				}
4816 				break;
4817 			default:
4818 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4819 				break;
4820 			}
4821 			break;
4822 		case 2: /* D2 vblank/vline */
4823 			switch (src_data) {
4824 			case 0: /* D2 vblank */
4825 				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4826 					if (rdev->irq.crtc_vblank_int[1]) {
4827 						drm_handle_vblank(rdev->ddev, 1);
4828 						rdev->pm.vblank_sync = true;
4829 						wake_up(&rdev->irq.vblank_queue);
4830 					}
4831 					if (atomic_read(&rdev->irq.pflip[1]))
4832 						radeon_crtc_handle_vblank(rdev, 1);
4833 					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4834 					DRM_DEBUG_VBLANK("IH: D2 vblank\n");
4835 				}
4836 				break;
4837 			case 1: /* D2 vline */
4838 				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4839 					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4840 					DRM_DEBUG_VBLANK("IH: D2 vline\n");
4841 				}
4842 				break;
4843 			default:
4844 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4845 				break;
4846 			}
4847 			break;
4848 		case 3: /* D3 vblank/vline */
4849 			switch (src_data) {
4850 			case 0: /* D3 vblank */
4851 				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4852 					if (rdev->irq.crtc_vblank_int[2]) {
4853 						drm_handle_vblank(rdev->ddev, 2);
4854 						rdev->pm.vblank_sync = true;
4855 						wake_up(&rdev->irq.vblank_queue);
4856 					}
4857 					if (atomic_read(&rdev->irq.pflip[2]))
4858 						radeon_crtc_handle_vblank(rdev, 2);
4859 					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4860 					DRM_DEBUG_VBLANK("IH: D3 vblank\n");
4861 				}
4862 				break;
4863 			case 1: /* D3 vline */
4864 				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4865 					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4866 					DRM_DEBUG_VBLANK("IH: D3 vline\n");
4867 				}
4868 				break;
4869 			default:
4870 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4871 				break;
4872 			}
4873 			break;
4874 		case 4: /* D4 vblank/vline */
4875 			switch (src_data) {
4876 			case 0: /* D4 vblank */
4877 				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4878 					if (rdev->irq.crtc_vblank_int[3]) {
4879 						drm_handle_vblank(rdev->ddev, 3);
4880 						rdev->pm.vblank_sync = true;
4881 						wake_up(&rdev->irq.vblank_queue);
4882 					}
4883 					if (atomic_read(&rdev->irq.pflip[3]))
4884 						radeon_crtc_handle_vblank(rdev, 3);
4885 					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4886 					DRM_DEBUG_VBLANK("IH: D4 vblank\n");
4887 				}
4888 				break;
4889 			case 1: /* D4 vline */
4890 				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4891 					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4892 					DRM_DEBUG_VBLANK("IH: D4 vline\n");
4893 				}
4894 				break;
4895 			default:
4896 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4897 				break;
4898 			}
4899 			break;
4900 		case 5: /* D5 vblank/vline */
4901 			switch (src_data) {
4902 			case 0: /* D5 vblank */
4903 				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4904 					if (rdev->irq.crtc_vblank_int[4]) {
4905 						drm_handle_vblank(rdev->ddev, 4);
4906 						rdev->pm.vblank_sync = true;
4907 						wake_up(&rdev->irq.vblank_queue);
4908 					}
4909 					if (atomic_read(&rdev->irq.pflip[4]))
4910 						radeon_crtc_handle_vblank(rdev, 4);
4911 					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4912 					DRM_DEBUG_VBLANK("IH: D5 vblank\n");
4913 				}
4914 				break;
4915 			case 1: /* D5 vline */
4916 				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4917 					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4918 					DRM_DEBUG_VBLANK("IH: D5 vline\n");
4919 				}
4920 				break;
4921 			default:
4922 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4923 				break;
4924 			}
4925 			break;
4926 		case 6: /* D6 vblank/vline */
4927 			switch (src_data) {
4928 			case 0: /* D6 vblank */
4929 				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4930 					if (rdev->irq.crtc_vblank_int[5]) {
4931 						drm_handle_vblank(rdev->ddev, 5);
4932 						rdev->pm.vblank_sync = true;
4933 						wake_up(&rdev->irq.vblank_queue);
4934 					}
4935 					if (atomic_read(&rdev->irq.pflip[5]))
4936 						radeon_crtc_handle_vblank(rdev, 5);
4937 					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4938 					DRM_DEBUG_VBLANK("IH: D6 vblank\n");
4939 				}
4940 				break;
4941 			case 1: /* D6 vline */
4942 				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4943 					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4944 					DRM_DEBUG_VBLANK("IH: D6 vline\n");
4945 				}
4946 				break;
4947 			default:
4948 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4949 				break;
4950 			}
4951 			break;
4952 		case 8: /* D1 page flip */
4953 		case 10: /* D2 page flip */
4954 		case 12: /* D3 page flip */
4955 		case 14: /* D4 page flip */
4956 		case 16: /* D5 page flip */
4957 		case 18: /* D6 page flip */
4958 			DRM_DEBUG_VBLANK("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4959 			if (radeon_use_pflipirq > 0)
4960 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
4961 			break;
4962 		case 42: /* HPD hotplug */
4963 			switch (src_data) {
4964 			case 0:
4965 				if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4966 					rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4967 					queue_hotplug = true;
4968 					DRM_DEBUG("IH: HPD1\n");
4969 				}
4970 				break;
4971 			case 1:
4972 				if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4973 					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4974 					queue_hotplug = true;
4975 					DRM_DEBUG("IH: HPD2\n");
4976 				}
4977 				break;
4978 			case 2:
4979 				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4980 					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4981 					queue_hotplug = true;
4982 					DRM_DEBUG("IH: HPD3\n");
4983 				}
4984 				break;
4985 			case 3:
4986 				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4987 					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4988 					queue_hotplug = true;
4989 					DRM_DEBUG("IH: HPD4\n");
4990 				}
4991 				break;
4992 			case 4:
4993 				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4994 					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4995 					queue_hotplug = true;
4996 					DRM_DEBUG("IH: HPD5\n");
4997 				}
4998 				break;
4999 			case 5:
5000 				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
5001 					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5002 					queue_hotplug = true;
5003 					DRM_DEBUG("IH: HPD6\n");
5004 				}
5005 				break;
5006 			default:
5007 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5008 				break;
5009 			}
5010 			break;
5011 		case 44: /* hdmi */
5012 			switch (src_data) {
5013 			case 0:
5014 				if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
5015 					rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5016 					queue_hdmi = true;
5017 					DRM_DEBUG("IH: HDMI0\n");
5018 				}
5019 				break;
5020 			case 1:
5021 				if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
5022 					rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5023 					queue_hdmi = true;
5024 					DRM_DEBUG("IH: HDMI1\n");
5025 				}
5026 				break;
5027 			case 2:
5028 				if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
5029 					rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5030 					queue_hdmi = true;
5031 					DRM_DEBUG("IH: HDMI2\n");
5032 				}
5033 				break;
5034 			case 3:
5035 				if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
5036 					rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5037 					queue_hdmi = true;
5038 					DRM_DEBUG("IH: HDMI3\n");
5039 				}
5040 				break;
5041 			case 4:
5042 				if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
5043 					rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5044 					queue_hdmi = true;
5045 					DRM_DEBUG("IH: HDMI4\n");
5046 				}
5047 				break;
5048 			case 5:
5049 				if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5050 					rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5051 					queue_hdmi = true;
5052 					DRM_DEBUG("IH: HDMI5\n");
5053 				}
5054 				break;
5055 			default:
5056 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5057 				break;
5058 			}
5059 		case 124: /* UVD */
5060 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5061 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5062 			break;
5063 		case 146:
5064 		case 147:
5065 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5066 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5067 			/* reset addr and status */
5068 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5069 			if (addr == 0x0 && status == 0x0)
5070 				break;
5071 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5072 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5073 				addr);
5074 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5075 				status);
5076 			cayman_vm_decode_fault(rdev, status, addr);
5077 			break;
5078 		case 176: /* CP_INT in ring buffer */
5079 		case 177: /* CP_INT in IB1 */
5080 		case 178: /* CP_INT in IB2 */
5081 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5082 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5083 			break;
5084 		case 181: /* CP EOP event */
5085 			DRM_DEBUG("IH: CP EOP\n");
5086 			if (rdev->family >= CHIP_CAYMAN) {
5087 				switch (src_data) {
5088 				case 0:
5089 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5090 					break;
5091 				case 1:
5092 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5093 					break;
5094 				case 2:
5095 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5096 					break;
5097 				}
5098 			} else
5099 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5100 			break;
5101 		case 224: /* DMA trap event */
5102 			DRM_DEBUG("IH: DMA trap\n");
5103 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5104 			break;
5105 		case 230: /* thermal low to high */
5106 			DRM_DEBUG("IH: thermal low to high\n");
5107 			rdev->pm.dpm.thermal.high_to_low = false;
5108 			queue_thermal = true;
5109 			break;
5110 		case 231: /* thermal high to low */
5111 			DRM_DEBUG("IH: thermal high to low\n");
5112 			rdev->pm.dpm.thermal.high_to_low = true;
5113 			queue_thermal = true;
5114 			break;
5115 		case 233: /* GUI IDLE */
5116 			DRM_DEBUG("IH: GUI idle\n");
5117 			break;
5118 		case 244: /* DMA trap event */
5119 			if (rdev->family >= CHIP_CAYMAN) {
5120 				DRM_DEBUG("IH: DMA1 trap\n");
5121 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5122 			}
5123 			break;
5124 		default:
5125 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5126 			break;
5127 		}
5128 
5129 		/* wptr/rptr are in bytes! */
5130 		rptr += 16;
5131 		rptr &= rdev->ih.ptr_mask;
5132 		WREG32(IH_RB_RPTR, rptr);
5133 	}
5134 	if (queue_hotplug)
5135 		taskqueue_enqueue(rdev->tq, &rdev->hotplug_work);
5136 	if (queue_hdmi)
5137 		taskqueue_enqueue(rdev->tq, &rdev->audio_work);
5138 	if (queue_thermal && rdev->pm.dpm_enabled)
5139 		taskqueue_enqueue(rdev->tq, &rdev->pm.dpm.thermal.work);
5140 	rdev->ih.rptr = rptr;
5141 	atomic_set(&rdev->ih.lock, 0);
5142 
5143 	/* make sure wptr hasn't changed while processing */
5144 	wptr = evergreen_get_ih_wptr(rdev);
5145 	if (wptr != rptr)
5146 		goto restart_ih;
5147 
5148 	return IRQ_HANDLED;
5149 }
5150 
5151 static int evergreen_startup(struct radeon_device *rdev)
5152 {
5153 	struct radeon_ring *ring;
5154 	int r;
5155 
5156 	/* enable pcie gen2 link */
5157 	evergreen_pcie_gen2_enable(rdev);
5158 	/* enable aspm */
5159 	evergreen_program_aspm(rdev);
5160 
5161 	/* scratch needs to be initialized before MC */
5162 	r = r600_vram_scratch_init(rdev);
5163 	if (r)
5164 		return r;
5165 
5166 	evergreen_mc_program(rdev);
5167 
5168 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5169 		r = ni_mc_load_microcode(rdev);
5170 		if (r) {
5171 			DRM_ERROR("Failed to load MC firmware!\n");
5172 			return r;
5173 		}
5174 	}
5175 
5176 	if (rdev->flags & RADEON_IS_AGP) {
5177 		evergreen_agp_enable(rdev);
5178 	} else {
5179 		r = evergreen_pcie_gart_enable(rdev);
5180 		if (r)
5181 			return r;
5182 	}
5183 	evergreen_gpu_init(rdev);
5184 
5185 	/* allocate rlc buffers */
5186 	if (rdev->flags & RADEON_IS_IGP) {
5187 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5188 		rdev->rlc.reg_list_size =
5189 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5190 		rdev->rlc.cs_data = evergreen_cs_data;
5191 		r = sumo_rlc_init(rdev);
5192 		if (r) {
5193 			DRM_ERROR("Failed to init rlc BOs!\n");
5194 			return r;
5195 		}
5196 	}
5197 
5198 	/* allocate wb buffer */
5199 	r = radeon_wb_init(rdev);
5200 	if (r)
5201 		return r;
5202 
5203 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5204 	if (r) {
5205 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5206 		return r;
5207 	}
5208 
5209 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5210 	if (r) {
5211 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5212 		return r;
5213 	}
5214 
5215 	r = uvd_v2_2_resume(rdev);
5216 	if (!r) {
5217 		r = radeon_fence_driver_start_ring(rdev,
5218 						   R600_RING_TYPE_UVD_INDEX);
5219 		if (r)
5220 			dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5221 	}
5222 
5223 	if (r)
5224 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5225 
5226 	/* Enable IRQ */
5227 	if (!rdev->irq.installed) {
5228 		r = radeon_irq_kms_init(rdev);
5229 		if (r)
5230 			return r;
5231 	}
5232 
5233 	r = r600_irq_init(rdev);
5234 	if (r) {
5235 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5236 		radeon_irq_kms_fini(rdev);
5237 		return r;
5238 	}
5239 	evergreen_irq_set(rdev);
5240 
5241 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5242 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5243 			     RADEON_CP_PACKET2);
5244 	if (r)
5245 		return r;
5246 
5247 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5248 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5249 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5250 	if (r)
5251 		return r;
5252 
5253 	r = evergreen_cp_load_microcode(rdev);
5254 	if (r)
5255 		return r;
5256 	r = evergreen_cp_resume(rdev);
5257 	if (r)
5258 		return r;
5259 	r = r600_dma_resume(rdev);
5260 	if (r)
5261 		return r;
5262 
5263 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5264 	if (ring->ring_size) {
5265 		r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5266 				     RADEON_CP_PACKET2);
5267 		if (!r)
5268 			r = uvd_v1_0_init(rdev);
5269 
5270 		if (r)
5271 			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5272 	}
5273 
5274 	r = radeon_ib_pool_init(rdev);
5275 	if (r) {
5276 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5277 		return r;
5278 	}
5279 
5280 	r = r600_audio_init(rdev);
5281 	if (r) {
5282 		DRM_ERROR("radeon: audio init failed\n");
5283 		return r;
5284 	}
5285 
5286 	return 0;
5287 }
5288 
5289 int evergreen_resume(struct radeon_device *rdev)
5290 {
5291 	int r;
5292 
5293 	/* reset the asic, the gfx blocks are often in a bad state
5294 	 * after the driver is unloaded or after a resume
5295 	 */
5296 	if (radeon_asic_reset(rdev))
5297 		dev_warn(rdev->dev, "GPU reset failed !\n");
5298 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5299 	 * posting will perform necessary task to bring back GPU into good
5300 	 * shape.
5301 	 */
5302 	/* post card */
5303 	atom_asic_init(rdev->mode_info.atom_context);
5304 
5305 	/* init golden registers */
5306 	evergreen_init_golden_registers(rdev);
5307 
5308 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5309 		radeon_pm_resume(rdev);
5310 
5311 	rdev->accel_working = true;
5312 	r = evergreen_startup(rdev);
5313 	if (r) {
5314 		DRM_ERROR("evergreen startup failed on resume\n");
5315 		rdev->accel_working = false;
5316 		return r;
5317 	}
5318 
5319 	return r;
5320 
5321 }
5322 
5323 int evergreen_suspend(struct radeon_device *rdev)
5324 {
5325 	radeon_pm_suspend(rdev);
5326 	r600_audio_fini(rdev);
5327 	uvd_v1_0_fini(rdev);
5328 	radeon_uvd_suspend(rdev);
5329 	r700_cp_stop(rdev);
5330 	r600_dma_stop(rdev);
5331 	evergreen_irq_suspend(rdev);
5332 	radeon_wb_disable(rdev);
5333 	evergreen_pcie_gart_disable(rdev);
5334 
5335 	return 0;
5336 }
5337 
5338 /* Plan is to move initialization in that function and use
5339  * helper function so that radeon_device_init pretty much
5340  * do nothing more than calling asic specific function. This
5341  * should also allow to remove a bunch of callback function
5342  * like vram_info.
5343  */
5344 int evergreen_init(struct radeon_device *rdev)
5345 {
5346 	int r;
5347 
5348 	/* Read BIOS */
5349 	if (!radeon_get_bios(rdev)) {
5350 		if (ASIC_IS_AVIVO(rdev))
5351 			return -EINVAL;
5352 	}
5353 	/* Must be an ATOMBIOS */
5354 	if (!rdev->is_atom_bios) {
5355 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5356 		return -EINVAL;
5357 	}
5358 	r = radeon_atombios_init(rdev);
5359 	if (r)
5360 		return r;
5361 	/* reset the asic, the gfx blocks are often in a bad state
5362 	 * after the driver is unloaded or after a resume
5363 	 */
5364 	if (radeon_asic_reset(rdev))
5365 		dev_warn(rdev->dev, "GPU reset failed !\n");
5366 	/* Post card if necessary */
5367 	if (!radeon_card_posted(rdev)) {
5368 		if (!rdev->bios) {
5369 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5370 			return -EINVAL;
5371 		}
5372 		DRM_INFO("GPU not posted. posting now...\n");
5373 		atom_asic_init(rdev->mode_info.atom_context);
5374 	}
5375 	/* init golden registers */
5376 	evergreen_init_golden_registers(rdev);
5377 	/* Initialize scratch registers */
5378 	r600_scratch_init(rdev);
5379 	/* Initialize surface registers */
5380 	radeon_surface_init(rdev);
5381 	/* Initialize clocks */
5382 	radeon_get_clock_info(rdev->ddev);
5383 	/* Fence driver */
5384 	r = radeon_fence_driver_init(rdev);
5385 	if (r)
5386 		return r;
5387 	/* initialize AGP */
5388 	if (rdev->flags & RADEON_IS_AGP) {
5389 		r = radeon_agp_init(rdev);
5390 		if (r)
5391 			radeon_agp_disable(rdev);
5392 	}
5393 	/* initialize memory controller */
5394 	r = evergreen_mc_init(rdev);
5395 	if (r)
5396 		return r;
5397 	/* Memory manager */
5398 	r = radeon_bo_init(rdev);
5399 	if (r)
5400 		return r;
5401 
5402 	if (ASIC_IS_DCE5(rdev)) {
5403 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5404 			r = ni_init_microcode(rdev);
5405 			if (r) {
5406 				DRM_ERROR("Failed to load firmware!\n");
5407 				return r;
5408 			}
5409 		}
5410 	} else {
5411 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5412 			r = r600_init_microcode(rdev);
5413 			if (r) {
5414 				DRM_ERROR("Failed to load firmware!\n");
5415 				return r;
5416 			}
5417 		}
5418 	}
5419 
5420 	/* Initialize power management */
5421 	radeon_pm_init(rdev);
5422 
5423 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5424 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5425 
5426 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5427 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5428 
5429 	r = radeon_uvd_init(rdev);
5430 	if (!r) {
5431 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5432 		r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5433 			       4096);
5434 	}
5435 
5436 	rdev->ih.ring_obj = NULL;
5437 	r600_ih_ring_init(rdev, 64 * 1024);
5438 
5439 	r = r600_pcie_gart_init(rdev);
5440 	if (r)
5441 		return r;
5442 
5443 	rdev->accel_working = true;
5444 	r = evergreen_startup(rdev);
5445 	if (r) {
5446 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5447 		r700_cp_fini(rdev);
5448 		r600_dma_fini(rdev);
5449 		r600_irq_fini(rdev);
5450 		if (rdev->flags & RADEON_IS_IGP)
5451 			sumo_rlc_fini(rdev);
5452 		radeon_wb_fini(rdev);
5453 		radeon_ib_pool_fini(rdev);
5454 		radeon_irq_kms_fini(rdev);
5455 		evergreen_pcie_gart_fini(rdev);
5456 		rdev->accel_working = false;
5457 	}
5458 
5459 	/* Don't start up if the MC ucode is missing on BTC parts.
5460 	 * The default clocks and voltages before the MC ucode
5461 	 * is loaded are not suffient for advanced operations.
5462 	 */
5463 	if (ASIC_IS_DCE5(rdev)) {
5464 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5465 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5466 			return -EINVAL;
5467 		}
5468 	}
5469 
5470 	return 0;
5471 }
5472 
5473 void evergreen_fini(struct radeon_device *rdev)
5474 {
5475 	radeon_pm_fini(rdev);
5476 	r600_audio_fini(rdev);
5477 	r700_cp_fini(rdev);
5478 	r600_dma_fini(rdev);
5479 	r600_irq_fini(rdev);
5480 	if (rdev->flags & RADEON_IS_IGP)
5481 		sumo_rlc_fini(rdev);
5482 	radeon_wb_fini(rdev);
5483 	radeon_ib_pool_fini(rdev);
5484 	radeon_irq_kms_fini(rdev);
5485 	uvd_v1_0_fini(rdev);
5486 	radeon_uvd_fini(rdev);
5487 	evergreen_pcie_gart_fini(rdev);
5488 	r600_vram_scratch_fini(rdev);
5489 	radeon_gem_fini(rdev);
5490 	radeon_fence_driver_fini(rdev);
5491 	radeon_agp_fini(rdev);
5492 	radeon_bo_fini(rdev);
5493 	radeon_atombios_fini(rdev);
5494 	if (ASIC_IS_DCE5(rdev))
5495 		ni_fini_microcode(rdev);
5496 	else
5497 		r600_fini_microcode(rdev);
5498 	kfree(rdev->bios);
5499 	rdev->bios = NULL;
5500 }
5501 
5502 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5503 {
5504 	u32 link_width_cntl, speed_cntl, mask;
5505 	int ret;
5506 
5507 	if (radeon_pcie_gen2 == 0)
5508 		return;
5509 
5510 	if (rdev->flags & RADEON_IS_IGP)
5511 		return;
5512 
5513 	if (!(rdev->flags & RADEON_IS_PCIE))
5514 		return;
5515 
5516 	/* x2 cards have a special sequence */
5517 	if (ASIC_IS_X2(rdev))
5518 		return;
5519 
5520 	ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
5521 	if (ret != 0)
5522 		return;
5523 
5524 	if (!(mask & DRM_PCIE_SPEED_50))
5525 		return;
5526 
5527 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5528 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5529 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5530 		return;
5531 	}
5532 
5533 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5534 
5535 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5536 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5537 
5538 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5539 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5540 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5541 
5542 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5543 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5544 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5545 
5546 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5547 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5548 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5549 
5550 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5551 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5552 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5553 
5554 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5555 		speed_cntl |= LC_GEN2_EN_STRAP;
5556 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5557 
5558 	} else {
5559 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5560 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5561 		if (1)
5562 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5563 		else
5564 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5565 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5566 	}
5567 }
5568 
5569 void evergreen_program_aspm(struct radeon_device *rdev)
5570 {
5571 	u32 data, orig;
5572 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5573 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5574 	/* fusion_platform = true
5575 	 * if the system is a fusion system
5576 	 * (APU or DGPU in a fusion system).
5577 	 * todo: check if the system is a fusion platform.
5578 	 */
5579 	bool fusion_platform = false;
5580 
5581 	if (radeon_aspm == 0)
5582 		return;
5583 
5584 	if (!(rdev->flags & RADEON_IS_PCIE))
5585 		return;
5586 
5587 	switch (rdev->family) {
5588 	case CHIP_CYPRESS:
5589 	case CHIP_HEMLOCK:
5590 	case CHIP_JUNIPER:
5591 	case CHIP_REDWOOD:
5592 	case CHIP_CEDAR:
5593 	case CHIP_SUMO:
5594 	case CHIP_SUMO2:
5595 	case CHIP_PALM:
5596 	case CHIP_ARUBA:
5597 		disable_l0s = true;
5598 		break;
5599 	default:
5600 		disable_l0s = false;
5601 		break;
5602 	}
5603 
5604 	if (rdev->flags & RADEON_IS_IGP)
5605 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5606 
5607 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5608 	if (fusion_platform)
5609 		data &= ~MULTI_PIF;
5610 	else
5611 		data |= MULTI_PIF;
5612 	if (data != orig)
5613 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5614 
5615 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5616 	if (fusion_platform)
5617 		data &= ~MULTI_PIF;
5618 	else
5619 		data |= MULTI_PIF;
5620 	if (data != orig)
5621 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5622 
5623 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5624 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5625 	if (!disable_l0s) {
5626 		if (rdev->family >= CHIP_BARTS)
5627 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5628 		else
5629 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5630 	}
5631 
5632 	if (!disable_l1) {
5633 		if (rdev->family >= CHIP_BARTS)
5634 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5635 		else
5636 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5637 
5638 		if (!disable_plloff_in_l1) {
5639 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5640 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5641 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5642 			if (data != orig)
5643 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5644 
5645 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5646 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5647 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5648 			if (data != orig)
5649 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5650 
5651 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5652 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5653 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5654 			if (data != orig)
5655 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5656 
5657 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5658 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5659 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5660 			if (data != orig)
5661 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5662 
5663 			if (rdev->family >= CHIP_BARTS) {
5664 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5665 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5666 				data |= PLL_RAMP_UP_TIME_0(4);
5667 				if (data != orig)
5668 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5669 
5670 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5671 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5672 				data |= PLL_RAMP_UP_TIME_1(4);
5673 				if (data != orig)
5674 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5675 
5676 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5677 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5678 				data |= PLL_RAMP_UP_TIME_0(4);
5679 				if (data != orig)
5680 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5681 
5682 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5683 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5684 				data |= PLL_RAMP_UP_TIME_1(4);
5685 				if (data != orig)
5686 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5687 			}
5688 
5689 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5690 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5691 			data |= LC_DYN_LANES_PWR_STATE(3);
5692 			if (data != orig)
5693 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5694 
5695 			if (rdev->family >= CHIP_BARTS) {
5696 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5697 				data &= ~LS2_EXIT_TIME_MASK;
5698 				data |= LS2_EXIT_TIME(1);
5699 				if (data != orig)
5700 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5701 
5702 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5703 				data &= ~LS2_EXIT_TIME_MASK;
5704 				data |= LS2_EXIT_TIME(1);
5705 				if (data != orig)
5706 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5707 			}
5708 		}
5709 	}
5710 
5711 	/* evergreen parts only */
5712 	if (rdev->family < CHIP_BARTS)
5713 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5714 
5715 	if (pcie_lc_cntl != pcie_lc_cntl_old)
5716 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5717 }
5718