xref: /dragonfly/sys/dev/drm/radeon/ni_dpm.c (revision e98bdfd3)
1 /*
2  * Copyright 2012 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23 
24 #include <drm/drmP.h>
25 #include "radeon.h"
26 #include "radeon_asic.h"
27 #include "nid.h"
28 #include "r600_dpm.h"
29 #include "ni_dpm.h"
30 #include "atom.h"
31 #include <linux/math64.h>
32 #include <linux/seq_file.h>
33 
34 #define MC_CG_ARB_FREQ_F0           0x0a
35 #define MC_CG_ARB_FREQ_F1           0x0b
36 #define MC_CG_ARB_FREQ_F2           0x0c
37 #define MC_CG_ARB_FREQ_F3           0x0d
38 
39 #define SMC_RAM_END 0xC000
40 
41 static const struct ni_cac_weights cac_weights_cayman_xt =
42 {
43 	0x15,
44 	0x2,
45 	0x19,
46 	0x2,
47 	0x8,
48 	0x14,
49 	0x2,
50 	0x16,
51 	0xE,
52 	0x17,
53 	0x13,
54 	0x2B,
55 	0x10,
56 	0x7,
57 	0x5,
58 	0x5,
59 	0x5,
60 	0x2,
61 	0x3,
62 	0x9,
63 	0x10,
64 	0x10,
65 	0x2B,
66 	0xA,
67 	0x9,
68 	0x4,
69 	0xD,
70 	0xD,
71 	0x3E,
72 	0x18,
73 	0x14,
74 	0,
75 	0x3,
76 	0x3,
77 	0x5,
78 	0,
79 	0x2,
80 	0,
81 	0,
82 	0,
83 	0,
84 	0,
85 	0,
86 	0,
87 	0,
88 	0,
89 	0x1CC,
90 	0,
91 	0x164,
92 	1,
93 	1,
94 	1,
95 	1,
96 	12,
97 	12,
98 	12,
99 	0x12,
100 	0x1F,
101 	132,
102 	5,
103 	7,
104 	0,
105 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
106 	{ 0, 0, 0, 0 },
107 	true
108 };
109 
110 static const struct ni_cac_weights cac_weights_cayman_pro =
111 {
112 	0x16,
113 	0x4,
114 	0x10,
115 	0x2,
116 	0xA,
117 	0x16,
118 	0x2,
119 	0x18,
120 	0x10,
121 	0x1A,
122 	0x16,
123 	0x2D,
124 	0x12,
125 	0xA,
126 	0x6,
127 	0x6,
128 	0x6,
129 	0x2,
130 	0x4,
131 	0xB,
132 	0x11,
133 	0x11,
134 	0x2D,
135 	0xC,
136 	0xC,
137 	0x7,
138 	0x10,
139 	0x10,
140 	0x3F,
141 	0x1A,
142 	0x16,
143 	0,
144 	0x7,
145 	0x4,
146 	0x6,
147 	1,
148 	0x2,
149 	0x1,
150 	0,
151 	0,
152 	0,
153 	0,
154 	0,
155 	0,
156 	0x30,
157 	0,
158 	0x1CF,
159 	0,
160 	0x166,
161 	1,
162 	1,
163 	1,
164 	1,
165 	12,
166 	12,
167 	12,
168 	0x15,
169 	0x1F,
170 	132,
171 	6,
172 	6,
173 	0,
174 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
175 	{ 0, 0, 0, 0 },
176 	true
177 };
178 
179 static const struct ni_cac_weights cac_weights_cayman_le =
180 {
181 	0x7,
182 	0xE,
183 	0x1,
184 	0xA,
185 	0x1,
186 	0x3F,
187 	0x2,
188 	0x18,
189 	0x10,
190 	0x1A,
191 	0x1,
192 	0x3F,
193 	0x1,
194 	0xE,
195 	0x6,
196 	0x6,
197 	0x6,
198 	0x2,
199 	0x4,
200 	0x9,
201 	0x1A,
202 	0x1A,
203 	0x2C,
204 	0xA,
205 	0x11,
206 	0x8,
207 	0x19,
208 	0x19,
209 	0x1,
210 	0x1,
211 	0x1A,
212 	0,
213 	0x8,
214 	0x5,
215 	0x8,
216 	0x1,
217 	0x3,
218 	0x1,
219 	0,
220 	0,
221 	0,
222 	0,
223 	0,
224 	0,
225 	0x38,
226 	0x38,
227 	0x239,
228 	0x3,
229 	0x18A,
230 	1,
231 	1,
232 	1,
233 	1,
234 	12,
235 	12,
236 	12,
237 	0x15,
238 	0x22,
239 	132,
240 	6,
241 	6,
242 	0,
243 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
244 	{ 0, 0, 0, 0 },
245 	true
246 };
247 
248 #define NISLANDS_MGCG_SEQUENCE  300
249 
250 static const u32 cayman_cgcg_cgls_default[] =
251 {
252 	0x000008f8, 0x00000010, 0xffffffff,
253 	0x000008fc, 0x00000000, 0xffffffff,
254 	0x000008f8, 0x00000011, 0xffffffff,
255 	0x000008fc, 0x00000000, 0xffffffff,
256 	0x000008f8, 0x00000012, 0xffffffff,
257 	0x000008fc, 0x00000000, 0xffffffff,
258 	0x000008f8, 0x00000013, 0xffffffff,
259 	0x000008fc, 0x00000000, 0xffffffff,
260 	0x000008f8, 0x00000014, 0xffffffff,
261 	0x000008fc, 0x00000000, 0xffffffff,
262 	0x000008f8, 0x00000015, 0xffffffff,
263 	0x000008fc, 0x00000000, 0xffffffff,
264 	0x000008f8, 0x00000016, 0xffffffff,
265 	0x000008fc, 0x00000000, 0xffffffff,
266 	0x000008f8, 0x00000017, 0xffffffff,
267 	0x000008fc, 0x00000000, 0xffffffff,
268 	0x000008f8, 0x00000018, 0xffffffff,
269 	0x000008fc, 0x00000000, 0xffffffff,
270 	0x000008f8, 0x00000019, 0xffffffff,
271 	0x000008fc, 0x00000000, 0xffffffff,
272 	0x000008f8, 0x0000001a, 0xffffffff,
273 	0x000008fc, 0x00000000, 0xffffffff,
274 	0x000008f8, 0x0000001b, 0xffffffff,
275 	0x000008fc, 0x00000000, 0xffffffff,
276 	0x000008f8, 0x00000020, 0xffffffff,
277 	0x000008fc, 0x00000000, 0xffffffff,
278 	0x000008f8, 0x00000021, 0xffffffff,
279 	0x000008fc, 0x00000000, 0xffffffff,
280 	0x000008f8, 0x00000022, 0xffffffff,
281 	0x000008fc, 0x00000000, 0xffffffff,
282 	0x000008f8, 0x00000023, 0xffffffff,
283 	0x000008fc, 0x00000000, 0xffffffff,
284 	0x000008f8, 0x00000024, 0xffffffff,
285 	0x000008fc, 0x00000000, 0xffffffff,
286 	0x000008f8, 0x00000025, 0xffffffff,
287 	0x000008fc, 0x00000000, 0xffffffff,
288 	0x000008f8, 0x00000026, 0xffffffff,
289 	0x000008fc, 0x00000000, 0xffffffff,
290 	0x000008f8, 0x00000027, 0xffffffff,
291 	0x000008fc, 0x00000000, 0xffffffff,
292 	0x000008f8, 0x00000028, 0xffffffff,
293 	0x000008fc, 0x00000000, 0xffffffff,
294 	0x000008f8, 0x00000029, 0xffffffff,
295 	0x000008fc, 0x00000000, 0xffffffff,
296 	0x000008f8, 0x0000002a, 0xffffffff,
297 	0x000008fc, 0x00000000, 0xffffffff,
298 	0x000008f8, 0x0000002b, 0xffffffff,
299 	0x000008fc, 0x00000000, 0xffffffff
300 };
301 #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
302 
303 static const u32 cayman_cgcg_cgls_disable[] =
304 {
305 	0x000008f8, 0x00000010, 0xffffffff,
306 	0x000008fc, 0xffffffff, 0xffffffff,
307 	0x000008f8, 0x00000011, 0xffffffff,
308 	0x000008fc, 0xffffffff, 0xffffffff,
309 	0x000008f8, 0x00000012, 0xffffffff,
310 	0x000008fc, 0xffffffff, 0xffffffff,
311 	0x000008f8, 0x00000013, 0xffffffff,
312 	0x000008fc, 0xffffffff, 0xffffffff,
313 	0x000008f8, 0x00000014, 0xffffffff,
314 	0x000008fc, 0xffffffff, 0xffffffff,
315 	0x000008f8, 0x00000015, 0xffffffff,
316 	0x000008fc, 0xffffffff, 0xffffffff,
317 	0x000008f8, 0x00000016, 0xffffffff,
318 	0x000008fc, 0xffffffff, 0xffffffff,
319 	0x000008f8, 0x00000017, 0xffffffff,
320 	0x000008fc, 0xffffffff, 0xffffffff,
321 	0x000008f8, 0x00000018, 0xffffffff,
322 	0x000008fc, 0xffffffff, 0xffffffff,
323 	0x000008f8, 0x00000019, 0xffffffff,
324 	0x000008fc, 0xffffffff, 0xffffffff,
325 	0x000008f8, 0x0000001a, 0xffffffff,
326 	0x000008fc, 0xffffffff, 0xffffffff,
327 	0x000008f8, 0x0000001b, 0xffffffff,
328 	0x000008fc, 0xffffffff, 0xffffffff,
329 	0x000008f8, 0x00000020, 0xffffffff,
330 	0x000008fc, 0x00000000, 0xffffffff,
331 	0x000008f8, 0x00000021, 0xffffffff,
332 	0x000008fc, 0x00000000, 0xffffffff,
333 	0x000008f8, 0x00000022, 0xffffffff,
334 	0x000008fc, 0x00000000, 0xffffffff,
335 	0x000008f8, 0x00000023, 0xffffffff,
336 	0x000008fc, 0x00000000, 0xffffffff,
337 	0x000008f8, 0x00000024, 0xffffffff,
338 	0x000008fc, 0x00000000, 0xffffffff,
339 	0x000008f8, 0x00000025, 0xffffffff,
340 	0x000008fc, 0x00000000, 0xffffffff,
341 	0x000008f8, 0x00000026, 0xffffffff,
342 	0x000008fc, 0x00000000, 0xffffffff,
343 	0x000008f8, 0x00000027, 0xffffffff,
344 	0x000008fc, 0x00000000, 0xffffffff,
345 	0x000008f8, 0x00000028, 0xffffffff,
346 	0x000008fc, 0x00000000, 0xffffffff,
347 	0x000008f8, 0x00000029, 0xffffffff,
348 	0x000008fc, 0x00000000, 0xffffffff,
349 	0x000008f8, 0x0000002a, 0xffffffff,
350 	0x000008fc, 0x00000000, 0xffffffff,
351 	0x000008f8, 0x0000002b, 0xffffffff,
352 	0x000008fc, 0x00000000, 0xffffffff,
353 	0x00000644, 0x000f7902, 0x001f4180,
354 	0x00000644, 0x000f3802, 0x001f4180
355 };
356 #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
357 
358 static const u32 cayman_cgcg_cgls_enable[] =
359 {
360 	0x00000644, 0x000f7882, 0x001f4080,
361 	0x000008f8, 0x00000010, 0xffffffff,
362 	0x000008fc, 0x00000000, 0xffffffff,
363 	0x000008f8, 0x00000011, 0xffffffff,
364 	0x000008fc, 0x00000000, 0xffffffff,
365 	0x000008f8, 0x00000012, 0xffffffff,
366 	0x000008fc, 0x00000000, 0xffffffff,
367 	0x000008f8, 0x00000013, 0xffffffff,
368 	0x000008fc, 0x00000000, 0xffffffff,
369 	0x000008f8, 0x00000014, 0xffffffff,
370 	0x000008fc, 0x00000000, 0xffffffff,
371 	0x000008f8, 0x00000015, 0xffffffff,
372 	0x000008fc, 0x00000000, 0xffffffff,
373 	0x000008f8, 0x00000016, 0xffffffff,
374 	0x000008fc, 0x00000000, 0xffffffff,
375 	0x000008f8, 0x00000017, 0xffffffff,
376 	0x000008fc, 0x00000000, 0xffffffff,
377 	0x000008f8, 0x00000018, 0xffffffff,
378 	0x000008fc, 0x00000000, 0xffffffff,
379 	0x000008f8, 0x00000019, 0xffffffff,
380 	0x000008fc, 0x00000000, 0xffffffff,
381 	0x000008f8, 0x0000001a, 0xffffffff,
382 	0x000008fc, 0x00000000, 0xffffffff,
383 	0x000008f8, 0x0000001b, 0xffffffff,
384 	0x000008fc, 0x00000000, 0xffffffff,
385 	0x000008f8, 0x00000020, 0xffffffff,
386 	0x000008fc, 0xffffffff, 0xffffffff,
387 	0x000008f8, 0x00000021, 0xffffffff,
388 	0x000008fc, 0xffffffff, 0xffffffff,
389 	0x000008f8, 0x00000022, 0xffffffff,
390 	0x000008fc, 0xffffffff, 0xffffffff,
391 	0x000008f8, 0x00000023, 0xffffffff,
392 	0x000008fc, 0xffffffff, 0xffffffff,
393 	0x000008f8, 0x00000024, 0xffffffff,
394 	0x000008fc, 0xffffffff, 0xffffffff,
395 	0x000008f8, 0x00000025, 0xffffffff,
396 	0x000008fc, 0xffffffff, 0xffffffff,
397 	0x000008f8, 0x00000026, 0xffffffff,
398 	0x000008fc, 0xffffffff, 0xffffffff,
399 	0x000008f8, 0x00000027, 0xffffffff,
400 	0x000008fc, 0xffffffff, 0xffffffff,
401 	0x000008f8, 0x00000028, 0xffffffff,
402 	0x000008fc, 0xffffffff, 0xffffffff,
403 	0x000008f8, 0x00000029, 0xffffffff,
404 	0x000008fc, 0xffffffff, 0xffffffff,
405 	0x000008f8, 0x0000002a, 0xffffffff,
406 	0x000008fc, 0xffffffff, 0xffffffff,
407 	0x000008f8, 0x0000002b, 0xffffffff,
408 	0x000008fc, 0xffffffff, 0xffffffff
409 };
410 #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
411 
412 static const u32 cayman_mgcg_default[] =
413 {
414 	0x0000802c, 0xc0000000, 0xffffffff,
415 	0x00003fc4, 0xc0000000, 0xffffffff,
416 	0x00005448, 0x00000100, 0xffffffff,
417 	0x000055e4, 0x00000100, 0xffffffff,
418 	0x0000160c, 0x00000100, 0xffffffff,
419 	0x00008984, 0x06000100, 0xffffffff,
420 	0x0000c164, 0x00000100, 0xffffffff,
421 	0x00008a18, 0x00000100, 0xffffffff,
422 	0x0000897c, 0x06000100, 0xffffffff,
423 	0x00008b28, 0x00000100, 0xffffffff,
424 	0x00009144, 0x00800200, 0xffffffff,
425 	0x00009a60, 0x00000100, 0xffffffff,
426 	0x00009868, 0x00000100, 0xffffffff,
427 	0x00008d58, 0x00000100, 0xffffffff,
428 	0x00009510, 0x00000100, 0xffffffff,
429 	0x0000949c, 0x00000100, 0xffffffff,
430 	0x00009654, 0x00000100, 0xffffffff,
431 	0x00009030, 0x00000100, 0xffffffff,
432 	0x00009034, 0x00000100, 0xffffffff,
433 	0x00009038, 0x00000100, 0xffffffff,
434 	0x0000903c, 0x00000100, 0xffffffff,
435 	0x00009040, 0x00000100, 0xffffffff,
436 	0x0000a200, 0x00000100, 0xffffffff,
437 	0x0000a204, 0x00000100, 0xffffffff,
438 	0x0000a208, 0x00000100, 0xffffffff,
439 	0x0000a20c, 0x00000100, 0xffffffff,
440 	0x00009744, 0x00000100, 0xffffffff,
441 	0x00003f80, 0x00000100, 0xffffffff,
442 	0x0000a210, 0x00000100, 0xffffffff,
443 	0x0000a214, 0x00000100, 0xffffffff,
444 	0x000004d8, 0x00000100, 0xffffffff,
445 	0x00009664, 0x00000100, 0xffffffff,
446 	0x00009698, 0x00000100, 0xffffffff,
447 	0x000004d4, 0x00000200, 0xffffffff,
448 	0x000004d0, 0x00000000, 0xffffffff,
449 	0x000030cc, 0x00000104, 0xffffffff,
450 	0x0000d0c0, 0x00000100, 0xffffffff,
451 	0x0000d8c0, 0x00000100, 0xffffffff,
452 	0x0000802c, 0x40000000, 0xffffffff,
453 	0x00003fc4, 0x40000000, 0xffffffff,
454 	0x0000915c, 0x00010000, 0xffffffff,
455 	0x00009160, 0x00030002, 0xffffffff,
456 	0x00009164, 0x00050004, 0xffffffff,
457 	0x00009168, 0x00070006, 0xffffffff,
458 	0x00009178, 0x00070000, 0xffffffff,
459 	0x0000917c, 0x00030002, 0xffffffff,
460 	0x00009180, 0x00050004, 0xffffffff,
461 	0x0000918c, 0x00010006, 0xffffffff,
462 	0x00009190, 0x00090008, 0xffffffff,
463 	0x00009194, 0x00070000, 0xffffffff,
464 	0x00009198, 0x00030002, 0xffffffff,
465 	0x0000919c, 0x00050004, 0xffffffff,
466 	0x000091a8, 0x00010006, 0xffffffff,
467 	0x000091ac, 0x00090008, 0xffffffff,
468 	0x000091b0, 0x00070000, 0xffffffff,
469 	0x000091b4, 0x00030002, 0xffffffff,
470 	0x000091b8, 0x00050004, 0xffffffff,
471 	0x000091c4, 0x00010006, 0xffffffff,
472 	0x000091c8, 0x00090008, 0xffffffff,
473 	0x000091cc, 0x00070000, 0xffffffff,
474 	0x000091d0, 0x00030002, 0xffffffff,
475 	0x000091d4, 0x00050004, 0xffffffff,
476 	0x000091e0, 0x00010006, 0xffffffff,
477 	0x000091e4, 0x00090008, 0xffffffff,
478 	0x000091e8, 0x00000000, 0xffffffff,
479 	0x000091ec, 0x00070000, 0xffffffff,
480 	0x000091f0, 0x00030002, 0xffffffff,
481 	0x000091f4, 0x00050004, 0xffffffff,
482 	0x00009200, 0x00010006, 0xffffffff,
483 	0x00009204, 0x00090008, 0xffffffff,
484 	0x00009208, 0x00070000, 0xffffffff,
485 	0x0000920c, 0x00030002, 0xffffffff,
486 	0x00009210, 0x00050004, 0xffffffff,
487 	0x0000921c, 0x00010006, 0xffffffff,
488 	0x00009220, 0x00090008, 0xffffffff,
489 	0x00009224, 0x00070000, 0xffffffff,
490 	0x00009228, 0x00030002, 0xffffffff,
491 	0x0000922c, 0x00050004, 0xffffffff,
492 	0x00009238, 0x00010006, 0xffffffff,
493 	0x0000923c, 0x00090008, 0xffffffff,
494 	0x00009240, 0x00070000, 0xffffffff,
495 	0x00009244, 0x00030002, 0xffffffff,
496 	0x00009248, 0x00050004, 0xffffffff,
497 	0x00009254, 0x00010006, 0xffffffff,
498 	0x00009258, 0x00090008, 0xffffffff,
499 	0x0000925c, 0x00070000, 0xffffffff,
500 	0x00009260, 0x00030002, 0xffffffff,
501 	0x00009264, 0x00050004, 0xffffffff,
502 	0x00009270, 0x00010006, 0xffffffff,
503 	0x00009274, 0x00090008, 0xffffffff,
504 	0x00009278, 0x00070000, 0xffffffff,
505 	0x0000927c, 0x00030002, 0xffffffff,
506 	0x00009280, 0x00050004, 0xffffffff,
507 	0x0000928c, 0x00010006, 0xffffffff,
508 	0x00009290, 0x00090008, 0xffffffff,
509 	0x000092a8, 0x00070000, 0xffffffff,
510 	0x000092ac, 0x00030002, 0xffffffff,
511 	0x000092b0, 0x00050004, 0xffffffff,
512 	0x000092bc, 0x00010006, 0xffffffff,
513 	0x000092c0, 0x00090008, 0xffffffff,
514 	0x000092c4, 0x00070000, 0xffffffff,
515 	0x000092c8, 0x00030002, 0xffffffff,
516 	0x000092cc, 0x00050004, 0xffffffff,
517 	0x000092d8, 0x00010006, 0xffffffff,
518 	0x000092dc, 0x00090008, 0xffffffff,
519 	0x00009294, 0x00000000, 0xffffffff,
520 	0x0000802c, 0x40010000, 0xffffffff,
521 	0x00003fc4, 0x40010000, 0xffffffff,
522 	0x0000915c, 0x00010000, 0xffffffff,
523 	0x00009160, 0x00030002, 0xffffffff,
524 	0x00009164, 0x00050004, 0xffffffff,
525 	0x00009168, 0x00070006, 0xffffffff,
526 	0x00009178, 0x00070000, 0xffffffff,
527 	0x0000917c, 0x00030002, 0xffffffff,
528 	0x00009180, 0x00050004, 0xffffffff,
529 	0x0000918c, 0x00010006, 0xffffffff,
530 	0x00009190, 0x00090008, 0xffffffff,
531 	0x00009194, 0x00070000, 0xffffffff,
532 	0x00009198, 0x00030002, 0xffffffff,
533 	0x0000919c, 0x00050004, 0xffffffff,
534 	0x000091a8, 0x00010006, 0xffffffff,
535 	0x000091ac, 0x00090008, 0xffffffff,
536 	0x000091b0, 0x00070000, 0xffffffff,
537 	0x000091b4, 0x00030002, 0xffffffff,
538 	0x000091b8, 0x00050004, 0xffffffff,
539 	0x000091c4, 0x00010006, 0xffffffff,
540 	0x000091c8, 0x00090008, 0xffffffff,
541 	0x000091cc, 0x00070000, 0xffffffff,
542 	0x000091d0, 0x00030002, 0xffffffff,
543 	0x000091d4, 0x00050004, 0xffffffff,
544 	0x000091e0, 0x00010006, 0xffffffff,
545 	0x000091e4, 0x00090008, 0xffffffff,
546 	0x000091e8, 0x00000000, 0xffffffff,
547 	0x000091ec, 0x00070000, 0xffffffff,
548 	0x000091f0, 0x00030002, 0xffffffff,
549 	0x000091f4, 0x00050004, 0xffffffff,
550 	0x00009200, 0x00010006, 0xffffffff,
551 	0x00009204, 0x00090008, 0xffffffff,
552 	0x00009208, 0x00070000, 0xffffffff,
553 	0x0000920c, 0x00030002, 0xffffffff,
554 	0x00009210, 0x00050004, 0xffffffff,
555 	0x0000921c, 0x00010006, 0xffffffff,
556 	0x00009220, 0x00090008, 0xffffffff,
557 	0x00009224, 0x00070000, 0xffffffff,
558 	0x00009228, 0x00030002, 0xffffffff,
559 	0x0000922c, 0x00050004, 0xffffffff,
560 	0x00009238, 0x00010006, 0xffffffff,
561 	0x0000923c, 0x00090008, 0xffffffff,
562 	0x00009240, 0x00070000, 0xffffffff,
563 	0x00009244, 0x00030002, 0xffffffff,
564 	0x00009248, 0x00050004, 0xffffffff,
565 	0x00009254, 0x00010006, 0xffffffff,
566 	0x00009258, 0x00090008, 0xffffffff,
567 	0x0000925c, 0x00070000, 0xffffffff,
568 	0x00009260, 0x00030002, 0xffffffff,
569 	0x00009264, 0x00050004, 0xffffffff,
570 	0x00009270, 0x00010006, 0xffffffff,
571 	0x00009274, 0x00090008, 0xffffffff,
572 	0x00009278, 0x00070000, 0xffffffff,
573 	0x0000927c, 0x00030002, 0xffffffff,
574 	0x00009280, 0x00050004, 0xffffffff,
575 	0x0000928c, 0x00010006, 0xffffffff,
576 	0x00009290, 0x00090008, 0xffffffff,
577 	0x000092a8, 0x00070000, 0xffffffff,
578 	0x000092ac, 0x00030002, 0xffffffff,
579 	0x000092b0, 0x00050004, 0xffffffff,
580 	0x000092bc, 0x00010006, 0xffffffff,
581 	0x000092c0, 0x00090008, 0xffffffff,
582 	0x000092c4, 0x00070000, 0xffffffff,
583 	0x000092c8, 0x00030002, 0xffffffff,
584 	0x000092cc, 0x00050004, 0xffffffff,
585 	0x000092d8, 0x00010006, 0xffffffff,
586 	0x000092dc, 0x00090008, 0xffffffff,
587 	0x00009294, 0x00000000, 0xffffffff,
588 	0x0000802c, 0xc0000000, 0xffffffff,
589 	0x00003fc4, 0xc0000000, 0xffffffff,
590 	0x000008f8, 0x00000010, 0xffffffff,
591 	0x000008fc, 0x00000000, 0xffffffff,
592 	0x000008f8, 0x00000011, 0xffffffff,
593 	0x000008fc, 0x00000000, 0xffffffff,
594 	0x000008f8, 0x00000012, 0xffffffff,
595 	0x000008fc, 0x00000000, 0xffffffff,
596 	0x000008f8, 0x00000013, 0xffffffff,
597 	0x000008fc, 0x00000000, 0xffffffff,
598 	0x000008f8, 0x00000014, 0xffffffff,
599 	0x000008fc, 0x00000000, 0xffffffff,
600 	0x000008f8, 0x00000015, 0xffffffff,
601 	0x000008fc, 0x00000000, 0xffffffff,
602 	0x000008f8, 0x00000016, 0xffffffff,
603 	0x000008fc, 0x00000000, 0xffffffff,
604 	0x000008f8, 0x00000017, 0xffffffff,
605 	0x000008fc, 0x00000000, 0xffffffff,
606 	0x000008f8, 0x00000018, 0xffffffff,
607 	0x000008fc, 0x00000000, 0xffffffff,
608 	0x000008f8, 0x00000019, 0xffffffff,
609 	0x000008fc, 0x00000000, 0xffffffff,
610 	0x000008f8, 0x0000001a, 0xffffffff,
611 	0x000008fc, 0x00000000, 0xffffffff,
612 	0x000008f8, 0x0000001b, 0xffffffff,
613 	0x000008fc, 0x00000000, 0xffffffff
614 };
615 #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
616 
617 static const u32 cayman_mgcg_disable[] =
618 {
619 	0x0000802c, 0xc0000000, 0xffffffff,
620 	0x000008f8, 0x00000000, 0xffffffff,
621 	0x000008fc, 0xffffffff, 0xffffffff,
622 	0x000008f8, 0x00000001, 0xffffffff,
623 	0x000008fc, 0xffffffff, 0xffffffff,
624 	0x000008f8, 0x00000002, 0xffffffff,
625 	0x000008fc, 0xffffffff, 0xffffffff,
626 	0x000008f8, 0x00000003, 0xffffffff,
627 	0x000008fc, 0xffffffff, 0xffffffff,
628 	0x00009150, 0x00600000, 0xffffffff
629 };
630 #define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
631 
632 static const u32 cayman_mgcg_enable[] =
633 {
634 	0x0000802c, 0xc0000000, 0xffffffff,
635 	0x000008f8, 0x00000000, 0xffffffff,
636 	0x000008fc, 0x00000000, 0xffffffff,
637 	0x000008f8, 0x00000001, 0xffffffff,
638 	0x000008fc, 0x00000000, 0xffffffff,
639 	0x000008f8, 0x00000002, 0xffffffff,
640 	0x000008fc, 0x00600000, 0xffffffff,
641 	0x000008f8, 0x00000003, 0xffffffff,
642 	0x000008fc, 0x00000000, 0xffffffff,
643 	0x00009150, 0x96944200, 0xffffffff
644 };
645 
646 #define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
647 
648 #define NISLANDS_SYSLS_SEQUENCE  100
649 
650 static const u32 cayman_sysls_default[] =
651 {
652 	/* Register,   Value,     Mask bits */
653 	0x000055e8, 0x00000000, 0xffffffff,
654 	0x0000d0bc, 0x00000000, 0xffffffff,
655 	0x0000d8bc, 0x00000000, 0xffffffff,
656 	0x000015c0, 0x000c1401, 0xffffffff,
657 	0x0000264c, 0x000c0400, 0xffffffff,
658 	0x00002648, 0x000c0400, 0xffffffff,
659 	0x00002650, 0x000c0400, 0xffffffff,
660 	0x000020b8, 0x000c0400, 0xffffffff,
661 	0x000020bc, 0x000c0400, 0xffffffff,
662 	0x000020c0, 0x000c0c80, 0xffffffff,
663 	0x0000f4a0, 0x000000c0, 0xffffffff,
664 	0x0000f4a4, 0x00680fff, 0xffffffff,
665 	0x00002f50, 0x00000404, 0xffffffff,
666 	0x000004c8, 0x00000001, 0xffffffff,
667 	0x000064ec, 0x00000000, 0xffffffff,
668 	0x00000c7c, 0x00000000, 0xffffffff,
669 	0x00008dfc, 0x00000000, 0xffffffff
670 };
671 #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
672 
673 static const u32 cayman_sysls_disable[] =
674 {
675 	/* Register,   Value,     Mask bits */
676 	0x0000d0c0, 0x00000000, 0xffffffff,
677 	0x0000d8c0, 0x00000000, 0xffffffff,
678 	0x000055e8, 0x00000000, 0xffffffff,
679 	0x0000d0bc, 0x00000000, 0xffffffff,
680 	0x0000d8bc, 0x00000000, 0xffffffff,
681 	0x000015c0, 0x00041401, 0xffffffff,
682 	0x0000264c, 0x00040400, 0xffffffff,
683 	0x00002648, 0x00040400, 0xffffffff,
684 	0x00002650, 0x00040400, 0xffffffff,
685 	0x000020b8, 0x00040400, 0xffffffff,
686 	0x000020bc, 0x00040400, 0xffffffff,
687 	0x000020c0, 0x00040c80, 0xffffffff,
688 	0x0000f4a0, 0x000000c0, 0xffffffff,
689 	0x0000f4a4, 0x00680000, 0xffffffff,
690 	0x00002f50, 0x00000404, 0xffffffff,
691 	0x000004c8, 0x00000001, 0xffffffff,
692 	0x000064ec, 0x00007ffd, 0xffffffff,
693 	0x00000c7c, 0x0000ff00, 0xffffffff,
694 	0x00008dfc, 0x0000007f, 0xffffffff
695 };
696 #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
697 
698 static const u32 cayman_sysls_enable[] =
699 {
700 	/* Register,   Value,     Mask bits */
701 	0x000055e8, 0x00000001, 0xffffffff,
702 	0x0000d0bc, 0x00000100, 0xffffffff,
703 	0x0000d8bc, 0x00000100, 0xffffffff,
704 	0x000015c0, 0x000c1401, 0xffffffff,
705 	0x0000264c, 0x000c0400, 0xffffffff,
706 	0x00002648, 0x000c0400, 0xffffffff,
707 	0x00002650, 0x000c0400, 0xffffffff,
708 	0x000020b8, 0x000c0400, 0xffffffff,
709 	0x000020bc, 0x000c0400, 0xffffffff,
710 	0x000020c0, 0x000c0c80, 0xffffffff,
711 	0x0000f4a0, 0x000000c0, 0xffffffff,
712 	0x0000f4a4, 0x00680fff, 0xffffffff,
713 	0x00002f50, 0x00000903, 0xffffffff,
714 	0x000004c8, 0x00000000, 0xffffffff,
715 	0x000064ec, 0x00000000, 0xffffffff,
716 	0x00000c7c, 0x00000000, 0xffffffff,
717 	0x00008dfc, 0x00000000, 0xffffffff
718 };
719 #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
720 
721 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
722 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
723 struct ni_power_info *ni_get_pi(struct radeon_device *rdev);
724 struct ni_ps *ni_get_ps(struct radeon_ps *rps);
725 void ni_dpm_reset_asic(struct radeon_device *rdev);
726 
727 struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
728 {
729         struct ni_power_info *pi = rdev->pm.dpm.priv;
730 
731         return pi;
732 }
733 
734 struct ni_ps *ni_get_ps(struct radeon_ps *rps)
735 {
736 	struct ni_ps *ps = rps->ps_priv;
737 
738 	return ps;
739 }
740 
741 static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
742 						     u16 v, s32 t,
743 						     u32 ileakage,
744 						     u32 *leakage)
745 {
746 	s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
747 
748 	i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
749 	vddc = div64_s64(drm_int2fixp(v), 1000);
750 	temperature = div64_s64(drm_int2fixp(t), 1000);
751 
752 	kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
753 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
754 	kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
755 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
756 
757 	leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
758 
759 	*leakage = drm_fixp2int(leakage_w * 1000);
760 }
761 
762 static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
763 					     const struct ni_leakage_coeffients *coeff,
764 					     u16 v,
765 					     s32 t,
766 					     u32 i_leakage,
767 					     u32 *leakage)
768 {
769 	ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
770 }
771 
772 bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
773 {
774 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
775 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
776 	/* we never hit the non-gddr5 limit so disable it */
777 	u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
778 
779 	if (vblank_time < switch_limit)
780 		return true;
781 	else
782 		return false;
783 
784 }
785 
786 static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
787 					struct radeon_ps *rps)
788 {
789 	struct ni_ps *ps = ni_get_ps(rps);
790 	struct radeon_clock_and_voltage_limits *max_limits;
791 	bool disable_mclk_switching;
792 	u32 mclk;
793 	u16 vddci;
794 	int i;
795 
796 	if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
797 	    ni_dpm_vblank_too_short(rdev))
798 		disable_mclk_switching = true;
799 	else
800 		disable_mclk_switching = false;
801 
802 	if (rdev->pm.dpm.ac_power)
803 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
804 	else
805 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
806 
807 	if (rdev->pm.dpm.ac_power == false) {
808 		for (i = 0; i < ps->performance_level_count; i++) {
809 			if (ps->performance_levels[i].mclk > max_limits->mclk)
810 				ps->performance_levels[i].mclk = max_limits->mclk;
811 			if (ps->performance_levels[i].sclk > max_limits->sclk)
812 				ps->performance_levels[i].sclk = max_limits->sclk;
813 			if (ps->performance_levels[i].vddc > max_limits->vddc)
814 				ps->performance_levels[i].vddc = max_limits->vddc;
815 			if (ps->performance_levels[i].vddci > max_limits->vddci)
816 				ps->performance_levels[i].vddci = max_limits->vddci;
817 		}
818 	}
819 
820 	/* XXX validate the min clocks required for display */
821 
822 	/* adjust low state */
823 	if (disable_mclk_switching) {
824 		ps->performance_levels[0].mclk =
825 			ps->performance_levels[ps->performance_level_count - 1].mclk;
826 		ps->performance_levels[0].vddci =
827 			ps->performance_levels[ps->performance_level_count - 1].vddci;
828 	}
829 
830 	btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
831 				  &ps->performance_levels[0].sclk,
832 				  &ps->performance_levels[0].mclk);
833 
834 	for (i = 1; i < ps->performance_level_count; i++) {
835 		if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
836 			ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
837 		if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
838 			ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
839 	}
840 
841 	/* adjust remaining states */
842 	if (disable_mclk_switching) {
843 		mclk = ps->performance_levels[0].mclk;
844 		vddci = ps->performance_levels[0].vddci;
845 		for (i = 1; i < ps->performance_level_count; i++) {
846 			if (mclk < ps->performance_levels[i].mclk)
847 				mclk = ps->performance_levels[i].mclk;
848 			if (vddci < ps->performance_levels[i].vddci)
849 				vddci = ps->performance_levels[i].vddci;
850 		}
851 		for (i = 0; i < ps->performance_level_count; i++) {
852 			ps->performance_levels[i].mclk = mclk;
853 			ps->performance_levels[i].vddci = vddci;
854 		}
855 	} else {
856 		for (i = 1; i < ps->performance_level_count; i++) {
857 			if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
858 				ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
859 			if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
860 				ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
861 		}
862 	}
863 
864 	for (i = 1; i < ps->performance_level_count; i++)
865 		btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
866 					  &ps->performance_levels[i].sclk,
867 					  &ps->performance_levels[i].mclk);
868 
869 	for (i = 0; i < ps->performance_level_count; i++)
870 		btc_adjust_clock_combinations(rdev, max_limits,
871 					      &ps->performance_levels[i]);
872 
873 	for (i = 0; i < ps->performance_level_count; i++) {
874 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
875 						   ps->performance_levels[i].sclk,
876 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
877 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
878 						   ps->performance_levels[i].mclk,
879 						   max_limits->vddci, &ps->performance_levels[i].vddci);
880 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
881 						   ps->performance_levels[i].mclk,
882 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
883 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
884 						   rdev->clock.current_dispclk,
885 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
886 	}
887 
888 	for (i = 0; i < ps->performance_level_count; i++) {
889 		btc_apply_voltage_delta_rules(rdev,
890 					      max_limits->vddc, max_limits->vddci,
891 					      &ps->performance_levels[i].vddc,
892 					      &ps->performance_levels[i].vddci);
893 	}
894 
895 	ps->dc_compatible = true;
896 	for (i = 0; i < ps->performance_level_count; i++) {
897 		if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
898 			ps->dc_compatible = false;
899 
900 		if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
901 			ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
902 	}
903 }
904 
905 static void ni_cg_clockgating_default(struct radeon_device *rdev)
906 {
907 	u32 count;
908 	const u32 *ps = NULL;
909 
910 	ps = (const u32 *)&cayman_cgcg_cgls_default;
911 	count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
912 
913 	btc_program_mgcg_hw_sequence(rdev, ps, count);
914 }
915 
916 static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
917 				      bool enable)
918 {
919 	u32 count;
920 	const u32 *ps = NULL;
921 
922 	if (enable) {
923 		ps = (const u32 *)&cayman_cgcg_cgls_enable;
924 		count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
925 	} else {
926 		ps = (const u32 *)&cayman_cgcg_cgls_disable;
927 		count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
928 	}
929 
930 	btc_program_mgcg_hw_sequence(rdev, ps, count);
931 }
932 
933 static void ni_mg_clockgating_default(struct radeon_device *rdev)
934 {
935 	u32 count;
936 	const u32 *ps = NULL;
937 
938 	ps = (const u32 *)&cayman_mgcg_default;
939 	count = CAYMAN_MGCG_DEFAULT_LENGTH;
940 
941 	btc_program_mgcg_hw_sequence(rdev, ps, count);
942 }
943 
944 static void ni_mg_clockgating_enable(struct radeon_device *rdev,
945 				     bool enable)
946 {
947 	u32 count;
948 	const u32 *ps = NULL;
949 
950 	if (enable) {
951 		ps = (const u32 *)&cayman_mgcg_enable;
952 		count = CAYMAN_MGCG_ENABLE_LENGTH;
953 	} else {
954 		ps = (const u32 *)&cayman_mgcg_disable;
955 		count = CAYMAN_MGCG_DISABLE_LENGTH;
956 	}
957 
958 	btc_program_mgcg_hw_sequence(rdev, ps, count);
959 }
960 
961 static void ni_ls_clockgating_default(struct radeon_device *rdev)
962 {
963 	u32 count;
964 	const u32 *ps = NULL;
965 
966 	ps = (const u32 *)&cayman_sysls_default;
967 	count = CAYMAN_SYSLS_DEFAULT_LENGTH;
968 
969 	btc_program_mgcg_hw_sequence(rdev, ps, count);
970 }
971 
972 static void ni_ls_clockgating_enable(struct radeon_device *rdev,
973 				     bool enable)
974 {
975 	u32 count;
976 	const u32 *ps = NULL;
977 
978 	if (enable) {
979 		ps = (const u32 *)&cayman_sysls_enable;
980 		count = CAYMAN_SYSLS_ENABLE_LENGTH;
981 	} else {
982 		ps = (const u32 *)&cayman_sysls_disable;
983 		count = CAYMAN_SYSLS_DISABLE_LENGTH;
984 	}
985 
986 	btc_program_mgcg_hw_sequence(rdev, ps, count);
987 
988 }
989 
990 static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
991 							     struct radeon_clock_voltage_dependency_table *table)
992 {
993 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
994 	u32 i;
995 
996 	if (table) {
997 		for (i = 0; i < table->count; i++) {
998 			if (0xff01 == table->entries[i].v) {
999 				if (pi->max_vddc == 0)
1000 					return -EINVAL;
1001 				table->entries[i].v = pi->max_vddc;
1002 			}
1003 		}
1004 	}
1005 	return 0;
1006 }
1007 
1008 static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1009 {
1010 	int ret = 0;
1011 
1012 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1013 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1014 
1015 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1016 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1017 	return ret;
1018 }
1019 
1020 static void ni_stop_dpm(struct radeon_device *rdev)
1021 {
1022 	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1023 }
1024 
1025 #if 0
1026 static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1027 					bool ac_power)
1028 {
1029 	if (ac_power)
1030 		return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1031 			0 : -EINVAL;
1032 
1033 	return 0;
1034 }
1035 #endif
1036 
1037 static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1038 						      PPSMC_Msg msg, u32 parameter)
1039 {
1040 	WREG32(SMC_SCRATCH0, parameter);
1041 	return rv770_send_msg_to_smc(rdev, msg);
1042 }
1043 
1044 static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1045 {
1046 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1047 		return -EINVAL;
1048 
1049 	return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1050 		0 : -EINVAL;
1051 }
1052 
1053 int ni_dpm_force_performance_level(struct radeon_device *rdev,
1054 				   enum radeon_dpm_forced_level level)
1055 {
1056 	if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1057 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1058 			return -EINVAL;
1059 
1060 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
1061 			return -EINVAL;
1062 	} else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1063 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1064 			return -EINVAL;
1065 
1066 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
1067 			return -EINVAL;
1068 	} else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
1069 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1070 			return -EINVAL;
1071 
1072 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1073 			return -EINVAL;
1074 	}
1075 
1076 	rdev->pm.dpm.forced_level = level;
1077 
1078 	return 0;
1079 }
1080 
1081 static void ni_stop_smc(struct radeon_device *rdev)
1082 {
1083 	u32 tmp;
1084 	int i;
1085 
1086 	for (i = 0; i < rdev->usec_timeout; i++) {
1087 		tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1088 		if (tmp != 1)
1089 			break;
1090 		udelay(1);
1091 	}
1092 
1093 	udelay(100);
1094 
1095 	r7xx_stop_smc(rdev);
1096 }
1097 
1098 static int ni_process_firmware_header(struct radeon_device *rdev)
1099 {
1100         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1101         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1102         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1103 	u32 tmp;
1104 	int ret;
1105 
1106 	ret = rv770_read_smc_sram_dword(rdev,
1107 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1108 					NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1109 					&tmp, pi->sram_end);
1110 
1111 	if (ret)
1112 		return ret;
1113 
1114 	pi->state_table_start = (u16)tmp;
1115 
1116 	ret = rv770_read_smc_sram_dword(rdev,
1117 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1118 					NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1119 					&tmp, pi->sram_end);
1120 
1121 	if (ret)
1122 		return ret;
1123 
1124 	pi->soft_regs_start = (u16)tmp;
1125 
1126 	ret = rv770_read_smc_sram_dword(rdev,
1127 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1128 					NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1129 					&tmp, pi->sram_end);
1130 
1131 	if (ret)
1132 		return ret;
1133 
1134 	eg_pi->mc_reg_table_start = (u16)tmp;
1135 
1136 	ret = rv770_read_smc_sram_dword(rdev,
1137 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1138 					NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1139 					&tmp, pi->sram_end);
1140 
1141 	if (ret)
1142 		return ret;
1143 
1144 	ni_pi->fan_table_start = (u16)tmp;
1145 
1146 	ret = rv770_read_smc_sram_dword(rdev,
1147 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1148 					NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1149 					&tmp, pi->sram_end);
1150 
1151 	if (ret)
1152 		return ret;
1153 
1154 	ni_pi->arb_table_start = (u16)tmp;
1155 
1156 	ret = rv770_read_smc_sram_dword(rdev,
1157 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1158 					NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1159 					&tmp, pi->sram_end);
1160 
1161 	if (ret)
1162 		return ret;
1163 
1164 	ni_pi->cac_table_start = (u16)tmp;
1165 
1166 	ret = rv770_read_smc_sram_dword(rdev,
1167 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1168 					NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1169 					&tmp, pi->sram_end);
1170 
1171 	if (ret)
1172 		return ret;
1173 
1174 	ni_pi->spll_table_start = (u16)tmp;
1175 
1176 
1177 	return ret;
1178 }
1179 
1180 static void ni_read_clock_registers(struct radeon_device *rdev)
1181 {
1182 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1183 
1184 	ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1185 	ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1186 	ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1187 	ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1188 	ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1189 	ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1190 	ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1191 	ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1192 	ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1193 	ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1194 	ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1195 	ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1196 	ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1197 	ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1198 }
1199 
1200 #if 0
1201 static int ni_enter_ulp_state(struct radeon_device *rdev)
1202 {
1203 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1204 
1205 	if (pi->gfx_clock_gating) {
1206                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1207 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1208                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1209 		RREG32(GB_ADDR_CONFIG);
1210         }
1211 
1212 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1213                  ~HOST_SMC_MSG_MASK);
1214 
1215 	udelay(25000);
1216 
1217 	return 0;
1218 }
1219 #endif
1220 
1221 static void ni_program_response_times(struct radeon_device *rdev)
1222 {
1223 	u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1224 	u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1225 	u32 reference_clock;
1226 
1227 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1228 
1229 	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1230 	backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1231 
1232 	if (voltage_response_time == 0)
1233 		voltage_response_time = 1000;
1234 
1235 	if (backbias_response_time == 0)
1236 		backbias_response_time = 1000;
1237 
1238 	acpi_delay_time = 15000;
1239 	vbi_time_out = 100000;
1240 
1241 	reference_clock = radeon_get_xclk(rdev);
1242 
1243 	vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1244 	bb_dly   = (backbias_response_time * reference_clock) / 1600;
1245 	acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1246 	vbi_dly  = (vbi_time_out * reference_clock) / 1600;
1247 
1248 	mclk_switch_limit = (460 * reference_clock) / 100;
1249 
1250 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
1251 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1252 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
1253 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1254 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1255 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1256 }
1257 
1258 static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1259 					  struct atom_voltage_table *voltage_table,
1260 					  NISLANDS_SMC_STATETABLE *table)
1261 {
1262 	unsigned int i;
1263 
1264 	for (i = 0; i < voltage_table->count; i++) {
1265 		table->highSMIO[i] = 0;
1266 		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1267 	}
1268 }
1269 
1270 static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1271 					   NISLANDS_SMC_STATETABLE *table)
1272 {
1273 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1274 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1275 	unsigned char i;
1276 
1277 	if (eg_pi->vddc_voltage_table.count) {
1278 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1279 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1280 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1281 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1282 
1283 		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1284 			if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1285 				table->maxVDDCIndexInPPTable = i;
1286 				break;
1287 			}
1288 		}
1289 	}
1290 
1291 	if (eg_pi->vddci_voltage_table.count) {
1292 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1293 
1294 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1295 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1296 			cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
1297 	}
1298 }
1299 
1300 static int ni_populate_voltage_value(struct radeon_device *rdev,
1301 				     struct atom_voltage_table *table,
1302 				     u16 value,
1303 				     NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1304 {
1305 	unsigned int i;
1306 
1307 	for (i = 0; i < table->count; i++) {
1308 		if (value <= table->entries[i].value) {
1309 			voltage->index = (u8)i;
1310 			voltage->value = cpu_to_be16(table->entries[i].value);
1311 			break;
1312 		}
1313 	}
1314 
1315 	if (i >= table->count)
1316 		return -EINVAL;
1317 
1318 	return 0;
1319 }
1320 
1321 static void ni_populate_mvdd_value(struct radeon_device *rdev,
1322 				   u32 mclk,
1323 				   NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1324 {
1325         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1326 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1327 
1328 	if (!pi->mvdd_control) {
1329 		voltage->index = eg_pi->mvdd_high_index;
1330                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1331 		return;
1332 	}
1333 
1334 	if (mclk <= pi->mvdd_split_frequency) {
1335 		voltage->index = eg_pi->mvdd_low_index;
1336 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1337 	} else {
1338 		voltage->index = eg_pi->mvdd_high_index;
1339 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1340 	}
1341 }
1342 
1343 static int ni_get_std_voltage_value(struct radeon_device *rdev,
1344 				    NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1345 				    u16 *std_voltage)
1346 {
1347 	if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1348 	    ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1349 		*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1350 	else
1351 		*std_voltage = be16_to_cpu(voltage->value);
1352 
1353 	return 0;
1354 }
1355 
1356 static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1357 					  u16 value, u8 index,
1358 					  NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1359 {
1360 	voltage->index = index;
1361 	voltage->value = cpu_to_be16(value);
1362 }
1363 
1364 static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1365 {
1366 	u32 xclk_period;
1367 	u32 xclk = radeon_get_xclk(rdev);
1368 	u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1369 
1370 	xclk_period = (1000000000UL / xclk);
1371 	xclk_period /= 10000UL;
1372 
1373 	return tmp * xclk_period;
1374 }
1375 
1376 static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1377 {
1378 	return (power_in_watts * scaling_factor) << 2;
1379 }
1380 
1381 static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1382 					  struct radeon_ps *radeon_state,
1383 					  u32 near_tdp_limit)
1384 {
1385 	struct ni_ps *state = ni_get_ps(radeon_state);
1386 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1387 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1388 	u32 power_boost_limit = 0;
1389 	int ret;
1390 
1391 	if (ni_pi->enable_power_containment &&
1392 	    ni_pi->use_power_boost_limit) {
1393 		NISLANDS_SMC_VOLTAGE_VALUE vddc;
1394 		u16 std_vddc_med;
1395 		u16 std_vddc_high;
1396 		u64 tmp, n, d;
1397 
1398 		if (state->performance_level_count < 3)
1399 			return 0;
1400 
1401 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1402 						state->performance_levels[state->performance_level_count - 2].vddc,
1403 						&vddc);
1404 		if (ret)
1405 			return 0;
1406 
1407 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1408 		if (ret)
1409 			return 0;
1410 
1411 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1412 						state->performance_levels[state->performance_level_count - 1].vddc,
1413 						&vddc);
1414 		if (ret)
1415 			return 0;
1416 
1417 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1418 		if (ret)
1419 			return 0;
1420 
1421 		n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1422 		d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1423 		tmp = div64_u64(n, d);
1424 
1425 		if (tmp >> 32)
1426 			return 0;
1427 		power_boost_limit = (u32)tmp;
1428 	}
1429 
1430 	return power_boost_limit;
1431 }
1432 
1433 static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1434 					    bool adjust_polarity,
1435 					    u32 tdp_adjustment,
1436 					    u32 *tdp_limit,
1437 					    u32 *near_tdp_limit)
1438 {
1439 	if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1440 		return -EINVAL;
1441 
1442 	if (adjust_polarity) {
1443 		*tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1444 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1445 	} else {
1446 		*tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1447 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1448 	}
1449 
1450 	return 0;
1451 }
1452 
1453 static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1454 				      struct radeon_ps *radeon_state)
1455 {
1456 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1457 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1458 
1459 	if (ni_pi->enable_power_containment) {
1460 		NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1461 		u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1462 		u32 tdp_limit;
1463 		u32 near_tdp_limit;
1464 		u32 power_boost_limit;
1465 		int ret;
1466 
1467 		if (scaling_factor == 0)
1468 			return -EINVAL;
1469 
1470 		memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1471 
1472 		ret = ni_calculate_adjusted_tdp_limits(rdev,
1473 						       false, /* ??? */
1474 						       rdev->pm.dpm.tdp_adjustment,
1475 						       &tdp_limit,
1476 						       &near_tdp_limit);
1477 		if (ret)
1478 			return ret;
1479 
1480 		power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1481 								   near_tdp_limit);
1482 
1483 		smc_table->dpm2Params.TDPLimit =
1484 			cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1485 		smc_table->dpm2Params.NearTDPLimit =
1486 			cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1487 		smc_table->dpm2Params.SafePowerLimit =
1488 			cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1489 							   scaling_factor));
1490 		smc_table->dpm2Params.PowerBoostLimit =
1491 			cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1492 
1493 		ret = rv770_copy_bytes_to_smc(rdev,
1494 					      (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1495 						    offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1496 					      (u8 *)(&smc_table->dpm2Params.TDPLimit),
1497 					      sizeof(u32) * 4, pi->sram_end);
1498 		if (ret)
1499 			return ret;
1500 	}
1501 
1502 	return 0;
1503 }
1504 
1505 int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1506 				u32 arb_freq_src, u32 arb_freq_dest)
1507 {
1508 	u32 mc_arb_dram_timing;
1509 	u32 mc_arb_dram_timing2;
1510 	u32 burst_time;
1511 	u32 mc_cg_config;
1512 
1513 	switch (arb_freq_src) {
1514         case MC_CG_ARB_FREQ_F0:
1515 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
1516 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1517 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1518 		break;
1519         case MC_CG_ARB_FREQ_F1:
1520 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
1521 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1522 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1523 		break;
1524         case MC_CG_ARB_FREQ_F2:
1525 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
1526 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1527 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1528 		break;
1529         case MC_CG_ARB_FREQ_F3:
1530 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
1531 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1532 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1533 		break;
1534         default:
1535 		return -EINVAL;
1536 	}
1537 
1538 	switch (arb_freq_dest) {
1539         case MC_CG_ARB_FREQ_F0:
1540 		WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1541 		WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1542 		WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1543 		break;
1544         case MC_CG_ARB_FREQ_F1:
1545 		WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1546 		WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1547 		WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1548 		break;
1549         case MC_CG_ARB_FREQ_F2:
1550 		WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1551 		WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1552 		WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1553 		break;
1554         case MC_CG_ARB_FREQ_F3:
1555 		WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1556 		WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1557 		WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1558 		break;
1559 	default:
1560 		return -EINVAL;
1561 	}
1562 
1563 	mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1564 	WREG32(MC_CG_CONFIG, mc_cg_config);
1565 	WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1566 
1567 	return 0;
1568 }
1569 
1570 static int ni_init_arb_table_index(struct radeon_device *rdev)
1571 {
1572 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1573 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1574 	u32 tmp;
1575 	int ret;
1576 
1577 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1578 					&tmp, pi->sram_end);
1579 	if (ret)
1580 		return ret;
1581 
1582 	tmp &= 0x00FFFFFF;
1583 	tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1584 
1585 	return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1586 					  tmp, pi->sram_end);
1587 }
1588 
1589 static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1590 {
1591 	return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1592 }
1593 
1594 static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1595 {
1596 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1597 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1598 	u32 tmp;
1599 	int ret;
1600 
1601 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1602 					&tmp, pi->sram_end);
1603 	if (ret)
1604 		return ret;
1605 
1606 	tmp = (tmp >> 24) & 0xff;
1607 
1608 	if (tmp == MC_CG_ARB_FREQ_F0)
1609 		return 0;
1610 
1611 	return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1612 }
1613 
1614 static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1615 						struct rv7xx_pl *pl,
1616 						SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1617 {
1618 	u32 dram_timing;
1619 	u32 dram_timing2;
1620 
1621 	arb_regs->mc_arb_rfsh_rate =
1622 		(u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1623 
1624 
1625 	radeon_atom_set_engine_dram_timings(rdev,
1626                                             pl->sclk,
1627                                             pl->mclk);
1628 
1629 	dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1630 	dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1631 
1632 	arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
1633 	arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1634 
1635 	return 0;
1636 }
1637 
1638 static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1639 						  struct radeon_ps *radeon_state,
1640 						  unsigned int first_arb_set)
1641 {
1642 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1643 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1644 	struct ni_ps *state = ni_get_ps(radeon_state);
1645 	SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1646 	int i, ret = 0;
1647 
1648 	for (i = 0; i < state->performance_level_count; i++) {
1649 		ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1650 		if (ret)
1651 			break;
1652 
1653 		ret = rv770_copy_bytes_to_smc(rdev,
1654 					      (u16)(ni_pi->arb_table_start +
1655 						    offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1656 						    sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1657 					      (u8 *)&arb_regs,
1658 					      (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1659 					      pi->sram_end);
1660 		if (ret)
1661 			break;
1662 	}
1663 	return ret;
1664 }
1665 
1666 static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1667 					       struct radeon_ps *radeon_new_state)
1668 {
1669 	return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1670 						      NISLANDS_DRIVER_STATE_ARB_INDEX);
1671 }
1672 
1673 static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1674 					   struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1675 {
1676 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1677 
1678 	voltage->index = eg_pi->mvdd_high_index;
1679 	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1680 }
1681 
1682 static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1683 					 struct radeon_ps *radeon_initial_state,
1684 					 NISLANDS_SMC_STATETABLE *table)
1685 {
1686 	struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1687 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1688 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1689 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1690 	u32 reg;
1691 	int ret;
1692 
1693 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1694 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1695 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1696 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1697 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1698 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1699 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1700 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1701 	table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1702 		cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1703 	table->initialState.levels[0].mclk.vDLL_CNTL =
1704 		cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1705 	table->initialState.levels[0].mclk.vMPLL_SS =
1706 		cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1707 	table->initialState.levels[0].mclk.vMPLL_SS2 =
1708 		cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1709 	table->initialState.levels[0].mclk.mclk_value =
1710 		cpu_to_be32(initial_state->performance_levels[0].mclk);
1711 
1712 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1713 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1714 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1715 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1716 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1717 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1718 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1719 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1720 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1721 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1722 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1723 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1724 	table->initialState.levels[0].sclk.sclk_value =
1725 		cpu_to_be32(initial_state->performance_levels[0].sclk);
1726 	table->initialState.levels[0].arbRefreshState =
1727 		NISLANDS_INITIAL_STATE_ARB_INDEX;
1728 
1729 	table->initialState.levels[0].ACIndex = 0;
1730 
1731 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1732 					initial_state->performance_levels[0].vddc,
1733 					&table->initialState.levels[0].vddc);
1734 	if (!ret) {
1735 		u16 std_vddc;
1736 
1737 		ret = ni_get_std_voltage_value(rdev,
1738 					       &table->initialState.levels[0].vddc,
1739 					       &std_vddc);
1740 		if (!ret)
1741 			ni_populate_std_voltage_value(rdev, std_vddc,
1742 						      table->initialState.levels[0].vddc.index,
1743 						      &table->initialState.levels[0].std_vddc);
1744 	}
1745 
1746 	if (eg_pi->vddci_control)
1747 		ni_populate_voltage_value(rdev,
1748 					  &eg_pi->vddci_voltage_table,
1749 					  initial_state->performance_levels[0].vddci,
1750 					  &table->initialState.levels[0].vddci);
1751 
1752 	ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1753 
1754 	reg = CG_R(0xffff) | CG_L(0);
1755 	table->initialState.levels[0].aT = cpu_to_be32(reg);
1756 
1757 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1758 
1759 	if (pi->boot_in_gen2)
1760 		table->initialState.levels[0].gen2PCIE = 1;
1761 	else
1762 		table->initialState.levels[0].gen2PCIE = 0;
1763 
1764 	if (pi->mem_gddr5) {
1765 		table->initialState.levels[0].strobeMode =
1766 			cypress_get_strobe_mode_settings(rdev,
1767 							 initial_state->performance_levels[0].mclk);
1768 
1769 		if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1770 			table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1771 		else
1772 			table->initialState.levels[0].mcFlags =  0;
1773 	}
1774 
1775 	table->initialState.levelCount = 1;
1776 
1777 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1778 
1779 	table->initialState.levels[0].dpm2.MaxPS = 0;
1780 	table->initialState.levels[0].dpm2.NearTDPDec = 0;
1781 	table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1782 	table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1783 
1784 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1785 	table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1786 
1787 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1788 	table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1789 
1790 	return 0;
1791 }
1792 
1793 static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1794 				      NISLANDS_SMC_STATETABLE *table)
1795 {
1796 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1797 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1798 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1799 	u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
1800 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1801 	u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
1802 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1803 	u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
1804 	u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
1805 	u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
1806 	u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
1807 	u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1808 	u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
1809 	u32 reg;
1810 	int ret;
1811 
1812 	table->ACPIState = table->initialState;
1813 
1814 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1815 
1816 	if (pi->acpi_vddc) {
1817 		ret = ni_populate_voltage_value(rdev,
1818 						&eg_pi->vddc_voltage_table,
1819 						pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1820 		if (!ret) {
1821 			u16 std_vddc;
1822 
1823 			ret = ni_get_std_voltage_value(rdev,
1824 						       &table->ACPIState.levels[0].vddc, &std_vddc);
1825 			if (!ret)
1826 				ni_populate_std_voltage_value(rdev, std_vddc,
1827 							      table->ACPIState.levels[0].vddc.index,
1828 							      &table->ACPIState.levels[0].std_vddc);
1829 		}
1830 
1831 		if (pi->pcie_gen2) {
1832 			if (pi->acpi_pcie_gen2)
1833 				table->ACPIState.levels[0].gen2PCIE = 1;
1834 			else
1835 				table->ACPIState.levels[0].gen2PCIE = 0;
1836 		} else {
1837 			table->ACPIState.levels[0].gen2PCIE = 0;
1838 		}
1839 	} else {
1840 		ret = ni_populate_voltage_value(rdev,
1841 						&eg_pi->vddc_voltage_table,
1842 						pi->min_vddc_in_table,
1843 						&table->ACPIState.levels[0].vddc);
1844 		if (!ret) {
1845 			u16 std_vddc;
1846 
1847 			ret = ni_get_std_voltage_value(rdev,
1848 						       &table->ACPIState.levels[0].vddc,
1849 						       &std_vddc);
1850 			if (!ret)
1851 				ni_populate_std_voltage_value(rdev, std_vddc,
1852 							      table->ACPIState.levels[0].vddc.index,
1853 							      &table->ACPIState.levels[0].std_vddc);
1854 		}
1855 		table->ACPIState.levels[0].gen2PCIE = 0;
1856 	}
1857 
1858 	if (eg_pi->acpi_vddci) {
1859 		if (eg_pi->vddci_control)
1860 			ni_populate_voltage_value(rdev,
1861 						  &eg_pi->vddci_voltage_table,
1862 						  eg_pi->acpi_vddci,
1863 						  &table->ACPIState.levels[0].vddci);
1864 	}
1865 
1866 
1867 	mpll_ad_func_cntl &= ~PDNB;
1868 
1869 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1870 
1871         if (pi->mem_gddr5)
1872                 mpll_dq_func_cntl &= ~PDNB;
1873         mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1874 
1875 
1876 	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1877 			     MRDCKA1_RESET |
1878 			     MRDCKB0_RESET |
1879 			     MRDCKB1_RESET |
1880 			     MRDCKC0_RESET |
1881 			     MRDCKC1_RESET |
1882 			     MRDCKD0_RESET |
1883 			     MRDCKD1_RESET);
1884 
1885 	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1886 			      MRDCKA1_PDNB |
1887 			      MRDCKB0_PDNB |
1888 			      MRDCKB1_PDNB |
1889 			      MRDCKC0_PDNB |
1890 			      MRDCKC1_PDNB |
1891 			      MRDCKD0_PDNB |
1892 			      MRDCKD1_PDNB);
1893 
1894 	dll_cntl |= (MRDCKA0_BYPASS |
1895                      MRDCKA1_BYPASS |
1896                      MRDCKB0_BYPASS |
1897                      MRDCKB1_BYPASS |
1898                      MRDCKC0_BYPASS |
1899                      MRDCKC1_BYPASS |
1900                      MRDCKD0_BYPASS |
1901                      MRDCKD1_BYPASS);
1902 
1903         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1904 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1905 
1906 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1907 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1908 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1909 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1910 	table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1911 	table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1912 
1913 	table->ACPIState.levels[0].mclk.mclk_value = 0;
1914 
1915 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1916 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1917 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1918 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1919 
1920 	table->ACPIState.levels[0].sclk.sclk_value = 0;
1921 
1922 	ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1923 
1924 	if (eg_pi->dynamic_ac_timing)
1925 		table->ACPIState.levels[0].ACIndex = 1;
1926 
1927 	table->ACPIState.levels[0].dpm2.MaxPS = 0;
1928 	table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1929 	table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1930 	table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1931 
1932 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1933 	table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1934 
1935 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1936 	table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1937 
1938 	return 0;
1939 }
1940 
1941 static int ni_init_smc_table(struct radeon_device *rdev)
1942 {
1943 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1944 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1945 	int ret;
1946 	struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1947 	NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1948 
1949 	memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1950 
1951 	ni_populate_smc_voltage_tables(rdev, table);
1952 
1953 	switch (rdev->pm.int_thermal_type) {
1954 	case THERMAL_TYPE_NI:
1955 	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1956 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1957 		break;
1958 	case THERMAL_TYPE_NONE:
1959 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1960 		break;
1961 	default:
1962 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1963 		break;
1964 	}
1965 
1966 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1967 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1968 
1969 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1970 		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1971 
1972 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1973 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1974 
1975 	if (pi->mem_gddr5)
1976 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1977 
1978 	ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1979 	if (ret)
1980 		return ret;
1981 
1982 	ret = ni_populate_smc_acpi_state(rdev, table);
1983 	if (ret)
1984 		return ret;
1985 
1986 	table->driverState = table->initialState;
1987 
1988 	table->ULVState = table->initialState;
1989 
1990 	ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
1991 						     NISLANDS_INITIAL_STATE_ARB_INDEX);
1992 	if (ret)
1993 		return ret;
1994 
1995 	return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
1996 				       sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
1997 }
1998 
1999 static int ni_calculate_sclk_params(struct radeon_device *rdev,
2000 				    u32 engine_clock,
2001 				    NISLANDS_SMC_SCLK_VALUE *sclk)
2002 {
2003 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2004 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2005 	struct atom_clock_dividers dividers;
2006 	u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
2007 	u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
2008 	u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2009 	u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2010 	u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2011 	u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2012 	u64 tmp;
2013 	u32 reference_clock = rdev->clock.spll.reference_freq;
2014 	u32 reference_divider;
2015 	u32 fbdiv;
2016 	int ret;
2017 
2018 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2019 					     engine_clock, false, &dividers);
2020 	if (ret)
2021 		return ret;
2022 
2023 	reference_divider = 1 + dividers.ref_div;
2024 
2025 
2026 	tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2027 	do_div(tmp, reference_clock);
2028 	fbdiv = (u32) tmp;
2029 
2030 	spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2031 	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2032 	spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2033 
2034 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2035 	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2036 
2037 	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2038 	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2039 	spll_func_cntl_3 |= SPLL_DITHEN;
2040 
2041 	if (pi->sclk_ss) {
2042 		struct radeon_atom_ss ss;
2043 		u32 vco_freq = engine_clock * dividers.post_div;
2044 
2045 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2046 						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2047 			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2048 			u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2049 
2050 			cg_spll_spread_spectrum &= ~CLK_S_MASK;
2051 			cg_spll_spread_spectrum |= CLK_S(clk_s);
2052 			cg_spll_spread_spectrum |= SSEN;
2053 
2054 			cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2055 			cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2056 		}
2057 	}
2058 
2059 	sclk->sclk_value = engine_clock;
2060 	sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2061 	sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2062 	sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2063 	sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2064 	sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2065 	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2066 
2067 	return 0;
2068 }
2069 
2070 static int ni_populate_sclk_value(struct radeon_device *rdev,
2071 				  u32 engine_clock,
2072 				  NISLANDS_SMC_SCLK_VALUE *sclk)
2073 {
2074 	NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2075 	int ret;
2076 
2077 	ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2078 	if (!ret) {
2079 		sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2080 		sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2081 		sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2082 		sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2083 		sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2084 		sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2085 		sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2086 	}
2087 
2088 	return ret;
2089 }
2090 
2091 static int ni_init_smc_spll_table(struct radeon_device *rdev)
2092 {
2093         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2094 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2095 	SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2096 	NISLANDS_SMC_SCLK_VALUE sclk_params;
2097 	u32 fb_div;
2098 	u32 p_div;
2099 	u32 clk_s;
2100 	u32 clk_v;
2101 	u32 sclk = 0;
2102 	int i, ret;
2103 	u32 tmp;
2104 
2105 	if (ni_pi->spll_table_start == 0)
2106 		return -EINVAL;
2107 
2108 	spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2109 	if (spll_table == NULL)
2110 		return -ENOMEM;
2111 
2112 	for (i = 0; i < 256; i++) {
2113 		ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2114 		if (ret)
2115 			break;
2116 
2117 		p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2118 		fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2119 		clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2120 		clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2121 
2122 		fb_div &= ~0x00001FFF;
2123 		fb_div >>= 1;
2124 		clk_v >>= 6;
2125 
2126 		if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2127 			ret = -EINVAL;
2128 
2129 		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2130 			ret = -EINVAL;
2131 
2132 		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2133 			ret = -EINVAL;
2134 
2135 		if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2136 			ret = -EINVAL;
2137 
2138 		if (ret)
2139 			break;
2140 
2141 		tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2142 			((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2143 		spll_table->freq[i] = cpu_to_be32(tmp);
2144 
2145 		tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2146 			((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2147 		spll_table->ss[i] = cpu_to_be32(tmp);
2148 
2149 		sclk += 512;
2150 	}
2151 
2152 	if (!ret)
2153 		ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2154 					      sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2155 
2156 	kfree(spll_table);
2157 
2158 	return ret;
2159 }
2160 
2161 static int ni_populate_mclk_value(struct radeon_device *rdev,
2162 				  u32 engine_clock,
2163 				  u32 memory_clock,
2164 				  NISLANDS_SMC_MCLK_VALUE *mclk,
2165 				  bool strobe_mode,
2166 				  bool dll_state_on)
2167 {
2168 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2169 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2170 	u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2171 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2172 	u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2173 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2174 	u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2175 	u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2176 	u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2177 	u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2178 	struct atom_clock_dividers dividers;
2179 	u32 ibias;
2180 	u32 dll_speed;
2181 	int ret;
2182 	u32 mc_seq_misc7;
2183 
2184 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2185 					     memory_clock, strobe_mode, &dividers);
2186 	if (ret)
2187 		return ret;
2188 
2189 	if (!strobe_mode) {
2190 		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2191 
2192 		if (mc_seq_misc7 & 0x8000000)
2193 			dividers.post_div = 1;
2194 	}
2195 
2196 	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2197 
2198 	mpll_ad_func_cntl &= ~(CLKR_MASK |
2199 			       YCLK_POST_DIV_MASK |
2200 			       CLKF_MASK |
2201 			       CLKFRAC_MASK |
2202 			       IBIAS_MASK);
2203 	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2204 	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2205 	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2206 	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2207 	mpll_ad_func_cntl |= IBIAS(ibias);
2208 
2209 	if (dividers.vco_mode)
2210 		mpll_ad_func_cntl_2 |= VCO_MODE;
2211 	else
2212 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
2213 
2214 	if (pi->mem_gddr5) {
2215 		mpll_dq_func_cntl &= ~(CLKR_MASK |
2216 				       YCLK_POST_DIV_MASK |
2217 				       CLKF_MASK |
2218 				       CLKFRAC_MASK |
2219 				       IBIAS_MASK);
2220 		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2221 		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2222 		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2223 		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2224 		mpll_dq_func_cntl |= IBIAS(ibias);
2225 
2226 		if (strobe_mode)
2227 			mpll_dq_func_cntl &= ~PDNB;
2228 		else
2229 			mpll_dq_func_cntl |= PDNB;
2230 
2231 		if (dividers.vco_mode)
2232 			mpll_dq_func_cntl_2 |= VCO_MODE;
2233 		else
2234 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
2235 	}
2236 
2237 	if (pi->mclk_ss) {
2238 		struct radeon_atom_ss ss;
2239 		u32 vco_freq = memory_clock * dividers.post_div;
2240 
2241 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2242 						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2243 			u32 reference_clock = rdev->clock.mpll.reference_freq;
2244 			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2245 			u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2246 			u32 clk_v = ss.percentage *
2247 				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2248 
2249 			mpll_ss1 &= ~CLKV_MASK;
2250 			mpll_ss1 |= CLKV(clk_v);
2251 
2252 			mpll_ss2 &= ~CLKS_MASK;
2253 			mpll_ss2 |= CLKS(clk_s);
2254 		}
2255 	}
2256 
2257 	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2258 					memory_clock);
2259 
2260 	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2261 	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2262 	if (dll_state_on)
2263 		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2264 				     MRDCKA1_PDNB |
2265 				     MRDCKB0_PDNB |
2266 				     MRDCKB1_PDNB |
2267 				     MRDCKC0_PDNB |
2268 				     MRDCKC1_PDNB |
2269 				     MRDCKD0_PDNB |
2270 				     MRDCKD1_PDNB);
2271 	else
2272 		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2273 				      MRDCKA1_PDNB |
2274 				      MRDCKB0_PDNB |
2275 				      MRDCKB1_PDNB |
2276 				      MRDCKC0_PDNB |
2277 				      MRDCKC1_PDNB |
2278 				      MRDCKD0_PDNB |
2279 				      MRDCKD1_PDNB);
2280 
2281 
2282 	mclk->mclk_value = cpu_to_be32(memory_clock);
2283 	mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2284 	mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2285 	mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2286 	mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2287 	mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2288 	mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2289 	mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2290 	mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2291 
2292 	return 0;
2293 }
2294 
2295 static void ni_populate_smc_sp(struct radeon_device *rdev,
2296 			       struct radeon_ps *radeon_state,
2297 			       NISLANDS_SMC_SWSTATE *smc_state)
2298 {
2299 	struct ni_ps *ps = ni_get_ps(radeon_state);
2300 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2301 	int i;
2302 
2303 	for (i = 0; i < ps->performance_level_count - 1; i++)
2304 		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2305 
2306 	smc_state->levels[ps->performance_level_count - 1].bSP =
2307 		cpu_to_be32(pi->psp);
2308 }
2309 
2310 static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2311 					 struct rv7xx_pl *pl,
2312 					 NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2313 {
2314 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2315         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2316         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2317 	int ret;
2318 	bool dll_state_on;
2319 	u16 std_vddc;
2320 	u32 tmp = RREG32(DC_STUTTER_CNTL);
2321 
2322 	level->gen2PCIE = pi->pcie_gen2 ?
2323 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2324 
2325 	ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2326 	if (ret)
2327 		return ret;
2328 
2329 	level->mcFlags =  0;
2330 	if (pi->mclk_stutter_mode_threshold &&
2331 	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2332 	    !eg_pi->uvd_enabled &&
2333 	    (tmp & DC_STUTTER_ENABLE_A) &&
2334 	    (tmp & DC_STUTTER_ENABLE_B))
2335 		level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2336 
2337 	if (pi->mem_gddr5) {
2338 		if (pl->mclk > pi->mclk_edc_enable_threshold)
2339 			level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2340 		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2341 			level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2342 
2343 		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2344 
2345 		if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2346 			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2347 			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2348 				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2349 			else
2350 				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2351 		} else {
2352 			dll_state_on = false;
2353 			if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2354 				level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2355 		}
2356 
2357 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2358 					     &level->mclk,
2359 					     (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2360 					     dll_state_on);
2361 	} else
2362 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2363 
2364 	if (ret)
2365 		return ret;
2366 
2367 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2368 					pl->vddc, &level->vddc);
2369 	if (ret)
2370 		return ret;
2371 
2372 	ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2373 	if (ret)
2374 		return ret;
2375 
2376 	ni_populate_std_voltage_value(rdev, std_vddc,
2377 				      level->vddc.index, &level->std_vddc);
2378 
2379 	if (eg_pi->vddci_control) {
2380 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2381 						pl->vddci, &level->vddci);
2382 		if (ret)
2383 			return ret;
2384 	}
2385 
2386 	ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2387 
2388 	return ret;
2389 }
2390 
2391 static int ni_populate_smc_t(struct radeon_device *rdev,
2392 			     struct radeon_ps *radeon_state,
2393 			     NISLANDS_SMC_SWSTATE *smc_state)
2394 {
2395         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2396         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2397 	struct ni_ps *state = ni_get_ps(radeon_state);
2398 	u32 a_t;
2399 	u32 t_l, t_h;
2400 	u32 high_bsp;
2401 	int i, ret;
2402 
2403 	if (state->performance_level_count >= 9)
2404 		return -EINVAL;
2405 
2406 	if (state->performance_level_count < 2) {
2407 		a_t = CG_R(0xffff) | CG_L(0);
2408 		smc_state->levels[0].aT = cpu_to_be32(a_t);
2409 		return 0;
2410 	}
2411 
2412 	smc_state->levels[0].aT = cpu_to_be32(0);
2413 
2414 	for (i = 0; i <= state->performance_level_count - 2; i++) {
2415 		if (eg_pi->uvd_enabled)
2416 			ret = r600_calculate_at(
2417 				1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2418 				100 * R600_AH_DFLT,
2419 				state->performance_levels[i + 1].sclk,
2420 				state->performance_levels[i].sclk,
2421 				&t_l,
2422 				&t_h);
2423 		else
2424 			ret = r600_calculate_at(
2425 				1000 * (i + 1),
2426 				100 * R600_AH_DFLT,
2427 				state->performance_levels[i + 1].sclk,
2428 				state->performance_levels[i].sclk,
2429 				&t_l,
2430 				&t_h);
2431 
2432 		if (ret) {
2433 			t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2434 			t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2435 		}
2436 
2437 		a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2438 		a_t |= CG_R(t_l * pi->bsp / 20000);
2439 		smc_state->levels[i].aT = cpu_to_be32(a_t);
2440 
2441 		high_bsp = (i == state->performance_level_count - 2) ?
2442 			pi->pbsp : pi->bsp;
2443 
2444 		a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2445 		smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2446 	}
2447 
2448 	return 0;
2449 }
2450 
2451 static int ni_populate_power_containment_values(struct radeon_device *rdev,
2452 						struct radeon_ps *radeon_state,
2453 						NISLANDS_SMC_SWSTATE *smc_state)
2454 {
2455         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2456         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2457 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2458 	struct ni_ps *state = ni_get_ps(radeon_state);
2459 	u32 prev_sclk;
2460 	u32 max_sclk;
2461 	u32 min_sclk;
2462 	int i, ret;
2463 	u32 tdp_limit;
2464 	u32 near_tdp_limit;
2465 	u32 power_boost_limit;
2466 	u8 max_ps_percent;
2467 
2468 	if (ni_pi->enable_power_containment == false)
2469 		return 0;
2470 
2471 	if (state->performance_level_count == 0)
2472 		return -EINVAL;
2473 
2474 	if (smc_state->levelCount != state->performance_level_count)
2475 		return -EINVAL;
2476 
2477 	ret = ni_calculate_adjusted_tdp_limits(rdev,
2478 					       false, /* ??? */
2479 					       rdev->pm.dpm.tdp_adjustment,
2480 					       &tdp_limit,
2481 					       &near_tdp_limit);
2482 	if (ret)
2483 		return ret;
2484 
2485 	power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2486 
2487 	ret = rv770_write_smc_sram_dword(rdev,
2488 					 pi->state_table_start +
2489 					 offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2490 					 offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2491 					 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2492 					 pi->sram_end);
2493 	if (ret)
2494 		power_boost_limit = 0;
2495 
2496 	smc_state->levels[0].dpm2.MaxPS = 0;
2497 	smc_state->levels[0].dpm2.NearTDPDec = 0;
2498 	smc_state->levels[0].dpm2.AboveSafeInc = 0;
2499 	smc_state->levels[0].dpm2.BelowSafeInc = 0;
2500 	smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2501 
2502 	for (i = 1; i < state->performance_level_count; i++) {
2503 		prev_sclk = state->performance_levels[i-1].sclk;
2504 		max_sclk  = state->performance_levels[i].sclk;
2505 		max_ps_percent = (i != (state->performance_level_count - 1)) ?
2506 			NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2507 
2508 		if (max_sclk < prev_sclk)
2509 			return -EINVAL;
2510 
2511 		if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2512 			min_sclk = max_sclk;
2513 		else if (1 == i)
2514 			min_sclk = prev_sclk;
2515 		else
2516 			min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2517 
2518 		if (min_sclk < state->performance_levels[0].sclk)
2519 			min_sclk = state->performance_levels[0].sclk;
2520 
2521 		if (min_sclk == 0)
2522 			return -EINVAL;
2523 
2524 		smc_state->levels[i].dpm2.MaxPS =
2525 			(u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2526 		smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2527 		smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2528 		smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2529 		smc_state->levels[i].stateFlags |=
2530 			((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2531 			PPSMC_STATEFLAG_POWERBOOST : 0;
2532 	}
2533 
2534 	return 0;
2535 }
2536 
2537 static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2538 					 struct radeon_ps *radeon_state,
2539 					 NISLANDS_SMC_SWSTATE *smc_state)
2540 {
2541 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2542 	struct ni_ps *state = ni_get_ps(radeon_state);
2543 	u32 sq_power_throttle;
2544 	u32 sq_power_throttle2;
2545 	bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2546 	int i;
2547 
2548 	if (state->performance_level_count == 0)
2549 		return -EINVAL;
2550 
2551 	if (smc_state->levelCount != state->performance_level_count)
2552 		return -EINVAL;
2553 
2554 	if (rdev->pm.dpm.sq_ramping_threshold == 0)
2555 		return -EINVAL;
2556 
2557 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2558 		enable_sq_ramping = false;
2559 
2560 	if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2561 		enable_sq_ramping = false;
2562 
2563 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2564 		enable_sq_ramping = false;
2565 
2566 	if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2567 		enable_sq_ramping = false;
2568 
2569 	if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO > (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2570 		enable_sq_ramping = false;
2571 
2572 	for (i = 0; i < state->performance_level_count; i++) {
2573 		sq_power_throttle  = 0;
2574 		sq_power_throttle2 = 0;
2575 
2576 		if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2577 		    enable_sq_ramping) {
2578 			sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2579 			sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2580 			sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2581 			sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2582 			sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2583 		} else {
2584 			sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2585 			sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2586 		}
2587 
2588 		smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
2589 		smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2590 	}
2591 
2592 	return 0;
2593 }
2594 
2595 static int ni_enable_power_containment(struct radeon_device *rdev,
2596 				       struct radeon_ps *radeon_new_state,
2597 				       bool enable)
2598 {
2599         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2600 	PPSMC_Result smc_result;
2601 	int ret = 0;
2602 
2603 	if (ni_pi->enable_power_containment) {
2604 		if (enable) {
2605 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2606 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2607 				if (smc_result != PPSMC_Result_OK) {
2608 					ret = -EINVAL;
2609 					ni_pi->pc_enabled = false;
2610 				} else {
2611 					ni_pi->pc_enabled = true;
2612 				}
2613 			}
2614 		} else {
2615 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2616 			if (smc_result != PPSMC_Result_OK)
2617 				ret = -EINVAL;
2618 			ni_pi->pc_enabled = false;
2619 		}
2620 	}
2621 
2622 	return ret;
2623 }
2624 
2625 static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2626 					 struct radeon_ps *radeon_state,
2627 					 NISLANDS_SMC_SWSTATE *smc_state)
2628 {
2629         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2630 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2631 	struct ni_ps *state = ni_get_ps(radeon_state);
2632 	int i, ret;
2633 	u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2634 
2635 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2636 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2637 
2638 	smc_state->levelCount = 0;
2639 
2640 	if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2641 		return -EINVAL;
2642 
2643 	for (i = 0; i < state->performance_level_count; i++) {
2644 		ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2645 						    &smc_state->levels[i]);
2646 		smc_state->levels[i].arbRefreshState =
2647 			(u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2648 
2649 		if (ret)
2650 			return ret;
2651 
2652 		if (ni_pi->enable_power_containment)
2653 			smc_state->levels[i].displayWatermark =
2654 				(state->performance_levels[i].sclk < threshold) ?
2655 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2656 		else
2657 			smc_state->levels[i].displayWatermark = (i < 2) ?
2658 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2659 
2660 		if (eg_pi->dynamic_ac_timing)
2661 			smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2662 		else
2663 			smc_state->levels[i].ACIndex = 0;
2664 
2665 		smc_state->levelCount++;
2666 	}
2667 
2668 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2669 				      cpu_to_be32(threshold / 512));
2670 
2671 	ni_populate_smc_sp(rdev, radeon_state, smc_state);
2672 
2673 	ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2674 	if (ret)
2675 		ni_pi->enable_power_containment = false;
2676 
2677 	ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2678 	if (ret)
2679 		ni_pi->enable_sq_ramping = false;
2680 
2681 	return ni_populate_smc_t(rdev, radeon_state, smc_state);
2682 }
2683 
2684 static int ni_upload_sw_state(struct radeon_device *rdev,
2685 			      struct radeon_ps *radeon_new_state)
2686 {
2687 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2688 	u16 address = pi->state_table_start +
2689 		offsetof(NISLANDS_SMC_STATETABLE, driverState);
2690 	u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2691 		((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2692 	int ret;
2693 	NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2694 
2695 	if (smc_state == NULL)
2696 		return -ENOMEM;
2697 
2698 	ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2699 	if (ret)
2700 		goto done;
2701 
2702 	ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2703 
2704 done:
2705 	kfree(smc_state);
2706 
2707 	return ret;
2708 }
2709 
2710 static int ni_set_mc_special_registers(struct radeon_device *rdev,
2711 				       struct ni_mc_reg_table *table)
2712 {
2713 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2714 	u8 i, j, k;
2715 	u32 temp_reg;
2716 
2717 	for (i = 0, j = table->last; i < table->last; i++) {
2718 		switch (table->mc_reg_address[i].s1) {
2719 		case MC_SEQ_MISC1 >> 2:
2720 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2721 				return -EINVAL;
2722 			temp_reg = RREG32(MC_PMG_CMD_EMRS);
2723 			table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2724 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2725 			for (k = 0; k < table->num_entries; k++)
2726 				table->mc_reg_table_entry[k].mc_data[j] =
2727 					((temp_reg & 0xffff0000)) |
2728 					((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2729 			j++;
2730 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2731 				return -EINVAL;
2732 
2733 			temp_reg = RREG32(MC_PMG_CMD_MRS);
2734 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2735 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2736 			for(k = 0; k < table->num_entries; k++) {
2737 				table->mc_reg_table_entry[k].mc_data[j] =
2738 					(temp_reg & 0xffff0000) |
2739 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2740 				if (!pi->mem_gddr5)
2741 					table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2742 			}
2743 			j++;
2744 			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2745 				return -EINVAL;
2746 			break;
2747 		case MC_SEQ_RESERVE_M >> 2:
2748 			temp_reg = RREG32(MC_PMG_CMD_MRS1);
2749 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2750 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2751 			for (k = 0; k < table->num_entries; k++)
2752 				table->mc_reg_table_entry[k].mc_data[j] =
2753 					(temp_reg & 0xffff0000) |
2754 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2755 			j++;
2756 			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2757 				return -EINVAL;
2758 			break;
2759 		default:
2760 			break;
2761 		}
2762 	}
2763 
2764 	table->last = j;
2765 
2766 	return 0;
2767 }
2768 
2769 static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2770 {
2771 	bool result = true;
2772 
2773 	switch (in_reg) {
2774         case  MC_SEQ_RAS_TIMING >> 2:
2775 		*out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2776 		break;
2777         case MC_SEQ_CAS_TIMING >> 2:
2778 		*out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2779 		break;
2780         case MC_SEQ_MISC_TIMING >> 2:
2781 		*out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2782 		break;
2783         case MC_SEQ_MISC_TIMING2 >> 2:
2784 		*out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2785 		break;
2786         case MC_SEQ_RD_CTL_D0 >> 2:
2787 		*out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2788 		break;
2789         case MC_SEQ_RD_CTL_D1 >> 2:
2790 		*out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2791 		break;
2792         case MC_SEQ_WR_CTL_D0 >> 2:
2793 		*out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2794 		break;
2795         case MC_SEQ_WR_CTL_D1 >> 2:
2796 		*out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2797 		break;
2798         case MC_PMG_CMD_EMRS >> 2:
2799 		*out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2800 		break;
2801         case MC_PMG_CMD_MRS >> 2:
2802 		*out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2803 		break;
2804         case MC_PMG_CMD_MRS1 >> 2:
2805 		*out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2806 		break;
2807         case MC_SEQ_PMG_TIMING >> 2:
2808 		*out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2809 		break;
2810         case MC_PMG_CMD_MRS2 >> 2:
2811 		*out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2812 		break;
2813         default:
2814 		result = false;
2815 		break;
2816 	}
2817 
2818 	return result;
2819 }
2820 
2821 static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2822 {
2823 	u8 i, j;
2824 
2825 	for (i = 0; i < table->last; i++) {
2826 		for (j = 1; j < table->num_entries; j++) {
2827 			if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2828 				table->valid_flag |= 1 << i;
2829 				break;
2830 			}
2831 		}
2832 	}
2833 }
2834 
2835 static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2836 {
2837 	u32 i;
2838 	u16 address;
2839 
2840 	for (i = 0; i < table->last; i++)
2841 		table->mc_reg_address[i].s0 =
2842 			ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2843 			address : table->mc_reg_address[i].s1;
2844 }
2845 
2846 static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2847 				      struct ni_mc_reg_table *ni_table)
2848 {
2849 	u8 i, j;
2850 
2851 	if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2852 		return -EINVAL;
2853 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2854 		return -EINVAL;
2855 
2856 	for (i = 0; i < table->last; i++)
2857 		ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2858 	ni_table->last = table->last;
2859 
2860 	for (i = 0; i < table->num_entries; i++) {
2861 		ni_table->mc_reg_table_entry[i].mclk_max =
2862 			table->mc_reg_table_entry[i].mclk_max;
2863 		for (j = 0; j < table->last; j++)
2864 			ni_table->mc_reg_table_entry[i].mc_data[j] =
2865 				table->mc_reg_table_entry[i].mc_data[j];
2866 	}
2867 	ni_table->num_entries = table->num_entries;
2868 
2869 	return 0;
2870 }
2871 
2872 static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2873 {
2874 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2875 	int ret;
2876 	struct atom_mc_reg_table *table;
2877 	struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2878 	u8 module_index = rv770_get_memory_module_index(rdev);
2879 
2880         table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2881         if (!table)
2882                 return -ENOMEM;
2883 
2884 	WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2885 	WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2886 	WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2887 	WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2888 	WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2889 	WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2890 	WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2891 	WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2892 	WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2893 	WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2894 	WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2895 	WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2896 	WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2897 
2898 	ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2899 
2900         if (ret)
2901                 goto init_mc_done;
2902 
2903 	ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2904 
2905         if (ret)
2906                 goto init_mc_done;
2907 
2908 	ni_set_s0_mc_reg_index(ni_table);
2909 
2910 	ret = ni_set_mc_special_registers(rdev, ni_table);
2911 
2912         if (ret)
2913                 goto init_mc_done;
2914 
2915 	ni_set_valid_flag(ni_table);
2916 
2917 init_mc_done:
2918         kfree(table);
2919 
2920 	return ret;
2921 }
2922 
2923 static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2924 					 SMC_NIslands_MCRegisters *mc_reg_table)
2925 {
2926 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2927 	u32 i, j;
2928 
2929 	for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2930 		if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2931 			if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2932 				break;
2933 			mc_reg_table->address[i].s0 =
2934 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2935 			mc_reg_table->address[i].s1 =
2936 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2937 			i++;
2938 		}
2939 	}
2940 	mc_reg_table->last = (u8)i;
2941 }
2942 
2943 
2944 static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2945 				    SMC_NIslands_MCRegisterSet *data,
2946 				    u32 num_entries, u32 valid_flag)
2947 {
2948 	u32 i, j;
2949 
2950 	for (i = 0, j = 0; j < num_entries; j++) {
2951 		if (valid_flag & (1 << j)) {
2952 			data->value[i] = cpu_to_be32(entry->mc_data[j]);
2953 			i++;
2954 		}
2955 	}
2956 }
2957 
2958 static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2959 						 struct rv7xx_pl *pl,
2960 						 SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2961 {
2962 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2963 	u32 i = 0;
2964 
2965 	for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2966 		if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2967 			break;
2968 	}
2969 
2970 	if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2971 		--i;
2972 
2973 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2974 				mc_reg_table_data,
2975 				ni_pi->mc_reg_table.last,
2976 				ni_pi->mc_reg_table.valid_flag);
2977 }
2978 
2979 static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
2980 					   struct radeon_ps *radeon_state,
2981 					   SMC_NIslands_MCRegisters *mc_reg_table)
2982 {
2983 	struct ni_ps *state = ni_get_ps(radeon_state);
2984 	int i;
2985 
2986 	for (i = 0; i < state->performance_level_count; i++) {
2987 		ni_convert_mc_reg_table_entry_to_smc(rdev,
2988 						     &state->performance_levels[i],
2989 						     &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
2990 	}
2991 }
2992 
2993 static int ni_populate_mc_reg_table(struct radeon_device *rdev,
2994 				    struct radeon_ps *radeon_boot_state)
2995 {
2996 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2997 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2998         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2999 	struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
3000 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3001 
3002 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3003 
3004 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
3005 
3006 	ni_populate_mc_reg_addresses(rdev, mc_reg_table);
3007 
3008 	ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3009 					     &mc_reg_table->data[0]);
3010 
3011 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3012 				&mc_reg_table->data[1],
3013 				ni_pi->mc_reg_table.last,
3014 				ni_pi->mc_reg_table.valid_flag);
3015 
3016 	ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3017 
3018 	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3019 				       (u8 *)mc_reg_table,
3020 				       sizeof(SMC_NIslands_MCRegisters),
3021 				       pi->sram_end);
3022 }
3023 
3024 static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3025 				  struct radeon_ps *radeon_new_state)
3026 {
3027 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3028 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3029         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3030 	struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3031 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3032 	u16 address;
3033 
3034 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3035 
3036 	ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3037 
3038 	address = eg_pi->mc_reg_table_start +
3039 		(u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3040 
3041 	return rv770_copy_bytes_to_smc(rdev, address,
3042 				       (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3043 				       sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3044 				       pi->sram_end);
3045 }
3046 
3047 static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3048 						   PP_NIslands_CACTABLES *cac_tables)
3049 {
3050 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3051 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3052 	u32 leakage = 0;
3053 	unsigned int i, j, table_size;
3054 	s32 t;
3055 	u32 smc_leakage, max_leakage = 0;
3056 	u32 scaling_factor;
3057 
3058 	table_size = eg_pi->vddc_voltage_table.count;
3059 
3060 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3061 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3062 
3063 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3064 
3065 	for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3066 		for (j = 0; j < table_size; j++) {
3067 			t = (1000 * ((i + 1) * 8));
3068 
3069 			if (t < ni_pi->cac_data.leakage_minimum_temperature)
3070 				t = ni_pi->cac_data.leakage_minimum_temperature;
3071 
3072 			ni_calculate_leakage_for_v_and_t(rdev,
3073 							 &ni_pi->cac_data.leakage_coefficients,
3074 							 eg_pi->vddc_voltage_table.entries[j].value,
3075 							 t,
3076 							 ni_pi->cac_data.i_leakage,
3077 							 &leakage);
3078 
3079 			smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3080 			if (smc_leakage > max_leakage)
3081 				max_leakage = smc_leakage;
3082 
3083 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3084 		}
3085 	}
3086 
3087 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3088 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3089 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3090 	}
3091 	return 0;
3092 }
3093 
3094 static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3095 					    PP_NIslands_CACTABLES *cac_tables)
3096 {
3097 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3098 	struct radeon_cac_leakage_table *leakage_table =
3099 		&rdev->pm.dpm.dyn_state.cac_leakage_table;
3100 	u32 i, j, table_size;
3101 	u32 smc_leakage, max_leakage = 0;
3102 	u32 scaling_factor;
3103 
3104 	if (!leakage_table)
3105 		return -EINVAL;
3106 
3107 	table_size = leakage_table->count;
3108 
3109 	if (eg_pi->vddc_voltage_table.count != table_size)
3110 		table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3111 			eg_pi->vddc_voltage_table.count : leakage_table->count;
3112 
3113 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3114 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3115 
3116 	if (table_size == 0)
3117 		return -EINVAL;
3118 
3119 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3120 
3121 	for (j = 0; j < table_size; j++) {
3122 		smc_leakage = leakage_table->entries[j].leakage;
3123 
3124 		if (smc_leakage > max_leakage)
3125 			max_leakage = smc_leakage;
3126 
3127 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3128 			cac_tables->cac_lkge_lut[i][j] =
3129 				cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3130 	}
3131 
3132 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3133 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3134 			cac_tables->cac_lkge_lut[i][j] =
3135 				cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3136 	}
3137 	return 0;
3138 }
3139 
3140 static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3141 {
3142 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3143 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3144 	PP_NIslands_CACTABLES *cac_tables = NULL;
3145 	int i, ret;
3146         u32 reg;
3147 
3148 	if (ni_pi->enable_cac == false)
3149 		return 0;
3150 
3151 	cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3152 	if (!cac_tables)
3153 		return -ENOMEM;
3154 
3155 	reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3156 	reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3157 		TID_UNIT(ni_pi->cac_weights->tid_unit));
3158 	WREG32(CG_CAC_CTRL, reg);
3159 
3160 	for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3161 		ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3162 
3163 	for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3164 		cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3165 
3166 	ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3167 	ni_pi->cac_data.pwr_const = 0;
3168 	ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3169 	ni_pi->cac_data.bif_cac_value = 0;
3170 	ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3171 	ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3172 	ni_pi->cac_data.allow_ovrflw = 0;
3173 	ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3174 	ni_pi->cac_data.num_win_tdp = 0;
3175 	ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3176 
3177 	if (ni_pi->driver_calculate_cac_leakage)
3178 		ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3179 	else
3180 		ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3181 
3182 	if (ret)
3183 		goto done_free;
3184 
3185 	cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
3186 	cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3187 	cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3188 	cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
3189 	cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
3190 	cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
3191 	cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
3192 	cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
3193 	cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3194 
3195 	ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3196 				      sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3197 
3198 done_free:
3199 	if (ret) {
3200 		ni_pi->enable_cac = false;
3201 		ni_pi->enable_power_containment = false;
3202 	}
3203 
3204 	kfree(cac_tables);
3205 
3206 	return 0;
3207 }
3208 
3209 static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3210 {
3211 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3212 	u32 reg;
3213 
3214 	if (!ni_pi->enable_cac ||
3215 	    !ni_pi->cac_configuration_required)
3216 		return 0;
3217 
3218 	if (ni_pi->cac_weights == NULL)
3219 		return -EINVAL;
3220 
3221 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3222 						      WEIGHT_TCP_SIG1_MASK |
3223 						      WEIGHT_TA_SIG_MASK);
3224 	reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3225 		WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3226 		WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3227 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3228 
3229 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3230 						      WEIGHT_TCC_EN1_MASK |
3231 						      WEIGHT_TCC_EN2_MASK);
3232 	reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3233 		WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3234 		WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3235 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3236 
3237 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3238 						      WEIGHT_CB_EN1_MASK |
3239 						      WEIGHT_CB_EN2_MASK |
3240 						      WEIGHT_CB_EN3_MASK);
3241 	reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3242 		WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3243 		WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3244 		WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3245 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3246 
3247 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3248 						      WEIGHT_DB_SIG1_MASK |
3249 						      WEIGHT_DB_SIG2_MASK |
3250 						      WEIGHT_DB_SIG3_MASK);
3251 	reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3252 		WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3253 		WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3254 		WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3255 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3256 
3257 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3258 						      WEIGHT_SXM_SIG1_MASK |
3259 						      WEIGHT_SXM_SIG2_MASK |
3260 						      WEIGHT_SXS_SIG0_MASK |
3261 						      WEIGHT_SXS_SIG1_MASK);
3262 	reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3263 		WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3264 		WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3265 		WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3266 		WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3267 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3268 
3269 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3270 						      WEIGHT_XBR_1_MASK |
3271 						      WEIGHT_XBR_2_MASK |
3272 						      WEIGHT_SPI_SIG0_MASK);
3273 	reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3274 		WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3275 		WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3276 		WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3277 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3278 
3279 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3280 						      WEIGHT_SPI_SIG2_MASK |
3281 						      WEIGHT_SPI_SIG3_MASK |
3282 						      WEIGHT_SPI_SIG4_MASK |
3283 						      WEIGHT_SPI_SIG5_MASK);
3284 	reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3285 		WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3286 		WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3287 		WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3288 		WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3289 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3290 
3291 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3292 						      WEIGHT_LDS_SIG1_MASK |
3293 						      WEIGHT_SC_MASK);
3294 	reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3295 		WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3296 		WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3297 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3298 
3299 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3300 						      WEIGHT_CP_MASK |
3301 						      WEIGHT_PA_SIG0_MASK |
3302 						      WEIGHT_PA_SIG1_MASK |
3303 						      WEIGHT_VGT_SIG0_MASK);
3304 	reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3305 		WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3306 		WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3307 		WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3308 		WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3309 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3310 
3311 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3312 						      WEIGHT_VGT_SIG2_MASK |
3313 						      WEIGHT_DC_SIG0_MASK |
3314 						      WEIGHT_DC_SIG1_MASK |
3315 						      WEIGHT_DC_SIG2_MASK);
3316 	reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3317 		WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3318 		WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3319 		WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3320 		WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3321 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3322 
3323 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3324 						      WEIGHT_UVD_SIG0_MASK |
3325 						      WEIGHT_UVD_SIG1_MASK |
3326 						      WEIGHT_SPARE0_MASK |
3327 						      WEIGHT_SPARE1_MASK);
3328 	reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3329 		WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3330 		WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3331 		WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3332 		WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3333 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3334 
3335 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3336 						      WEIGHT_SQ_VSP0_MASK);
3337 	reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3338 		WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3339 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3340 
3341 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3342 	reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3343 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3344 
3345 	reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3346 							OVR_VAL_SPARE_0_MASK |
3347 							OVR_MODE_SPARE_1_MASK |
3348 							OVR_VAL_SPARE_1_MASK);
3349 	reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3350 		OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3351 		OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3352 		OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3353 	WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3354 
3355 	reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3356 					   VSP0_MASK |
3357 					   GPR_MASK);
3358 	reg |= (VSP(ni_pi->cac_weights->vsp) |
3359 		VSP0(ni_pi->cac_weights->vsp0) |
3360 		GPR(ni_pi->cac_weights->gpr));
3361 	WREG32(SQ_CAC_THRESHOLD, reg);
3362 
3363 	reg = (MCDW_WR_ENABLE |
3364 	       MCDX_WR_ENABLE |
3365 	       MCDY_WR_ENABLE |
3366 	       MCDZ_WR_ENABLE |
3367 	       INDEX(0x09D4));
3368 	WREG32(MC_CG_CONFIG, reg);
3369 
3370 	reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3371 	       WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3372 	       ALLOW_OVERFLOW);
3373 	WREG32(MC_CG_DATAPORT, reg);
3374 
3375 	return 0;
3376 }
3377 
3378 static int ni_enable_smc_cac(struct radeon_device *rdev,
3379 			     struct radeon_ps *radeon_new_state,
3380 			     bool enable)
3381 {
3382 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3383 	int ret = 0;
3384 	PPSMC_Result smc_result;
3385 
3386 	if (ni_pi->enable_cac) {
3387 		if (enable) {
3388 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3389 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3390 
3391 				if (ni_pi->support_cac_long_term_average) {
3392 					smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3393 					if (PPSMC_Result_OK != smc_result)
3394 						ni_pi->support_cac_long_term_average = false;
3395 				}
3396 
3397 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3398 				if (PPSMC_Result_OK != smc_result)
3399 					ret = -EINVAL;
3400 
3401 				ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3402 			}
3403 		} else if (ni_pi->cac_enabled) {
3404 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3405 
3406 			ni_pi->cac_enabled = false;
3407 
3408 			if (ni_pi->support_cac_long_term_average) {
3409 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3410 				if (PPSMC_Result_OK != smc_result)
3411 					ni_pi->support_cac_long_term_average = false;
3412 			}
3413 		}
3414 	}
3415 
3416 	return ret;
3417 }
3418 
3419 static int ni_pcie_performance_request(struct radeon_device *rdev,
3420 				       u8 perf_req, bool advertise)
3421 {
3422 #if defined(CONFIG_ACPI)
3423 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3424 
3425 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3426             (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3427 		if (eg_pi->pcie_performance_request_registered == false)
3428 			radeon_acpi_pcie_notify_device_ready(rdev);
3429 		eg_pi->pcie_performance_request_registered = true;
3430 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3431 	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3432                    eg_pi->pcie_performance_request_registered) {
3433 		eg_pi->pcie_performance_request_registered = false;
3434 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3435 	}
3436 #endif
3437 	return 0;
3438 }
3439 
3440 static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3441 {
3442 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3443 	u32 tmp;
3444 
3445         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3446 
3447         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3448             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3449                 pi->pcie_gen2 = true;
3450         else
3451 		pi->pcie_gen2 = false;
3452 
3453 	if (!pi->pcie_gen2)
3454 		ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3455 
3456 	return 0;
3457 }
3458 
3459 static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3460 					    bool enable)
3461 {
3462         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3463         u32 tmp, bif;
3464 
3465 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3466 
3467 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3468 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3469 		if (enable) {
3470 			if (!pi->boot_in_gen2) {
3471 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3472 				bif |= CG_CLIENT_REQ(0xd);
3473 				WREG32(CG_BIF_REQ_AND_RSP, bif);
3474 			}
3475 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3476 			tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3477 			tmp |= LC_GEN2_EN_STRAP;
3478 
3479 			tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3480 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3481 			udelay(10);
3482 			tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3483 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3484 		} else {
3485 			if (!pi->boot_in_gen2) {
3486 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3487 				bif |= CG_CLIENT_REQ(0xd);
3488 				WREG32(CG_BIF_REQ_AND_RSP, bif);
3489 
3490 				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3491 				tmp &= ~LC_GEN2_EN_STRAP;
3492 			}
3493 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3494 		}
3495 	}
3496 }
3497 
3498 static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3499 					bool enable)
3500 {
3501 	ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3502 
3503 	if (enable)
3504 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3505 	else
3506                 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3507 }
3508 
3509 void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3510 					   struct radeon_ps *new_ps,
3511 					   struct radeon_ps *old_ps)
3512 {
3513 	struct ni_ps *new_state = ni_get_ps(new_ps);
3514 	struct ni_ps *current_state = ni_get_ps(old_ps);
3515 
3516 	if ((new_ps->vclk == old_ps->vclk) &&
3517 	    (new_ps->dclk == old_ps->dclk))
3518 		return;
3519 
3520 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3521 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3522 		return;
3523 
3524 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3525 }
3526 
3527 void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3528 					  struct radeon_ps *new_ps,
3529 					  struct radeon_ps *old_ps)
3530 {
3531 	struct ni_ps *new_state = ni_get_ps(new_ps);
3532 	struct ni_ps *current_state = ni_get_ps(old_ps);
3533 
3534 	if ((new_ps->vclk == old_ps->vclk) &&
3535 	    (new_ps->dclk == old_ps->dclk))
3536 		return;
3537 
3538 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3539 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3540 		return;
3541 
3542 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3543 }
3544 
3545 void ni_dpm_setup_asic(struct radeon_device *rdev)
3546 {
3547 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3548 	int r;
3549 
3550 	r = ni_mc_load_microcode(rdev);
3551 	if (r)
3552 		DRM_ERROR("Failed to load MC firmware!\n");
3553 	ni_read_clock_registers(rdev);
3554 	btc_read_arb_registers(rdev);
3555 	rv770_get_memory_type(rdev);
3556 	if (eg_pi->pcie_performance_request)
3557 		ni_advertise_gen2_capability(rdev);
3558 	rv770_get_pcie_gen2_status(rdev);
3559 	rv770_enable_acpi_pm(rdev);
3560 }
3561 
3562 void ni_update_current_ps(struct radeon_device *rdev,
3563 			  struct radeon_ps *rps)
3564 {
3565 	struct ni_ps *new_ps = ni_get_ps(rps);
3566 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3567         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3568 
3569 	eg_pi->current_rps = *rps;
3570 	ni_pi->current_ps = *new_ps;
3571 	eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3572 }
3573 
3574 void ni_update_requested_ps(struct radeon_device *rdev,
3575 			    struct radeon_ps *rps)
3576 {
3577 	struct ni_ps *new_ps = ni_get_ps(rps);
3578 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3579         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3580 
3581 	eg_pi->requested_rps = *rps;
3582 	ni_pi->requested_ps = *new_ps;
3583 	eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3584 }
3585 
3586 int ni_dpm_enable(struct radeon_device *rdev)
3587 {
3588 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3589 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3590 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3591 	int ret;
3592 
3593 	if (pi->gfx_clock_gating)
3594 		ni_cg_clockgating_default(rdev);
3595         if (btc_dpm_enabled(rdev))
3596                 return -EINVAL;
3597 	if (pi->mg_clock_gating)
3598 		ni_mg_clockgating_default(rdev);
3599 	if (eg_pi->ls_clock_gating)
3600 		ni_ls_clockgating_default(rdev);
3601 	if (pi->voltage_control) {
3602 		rv770_enable_voltage_control(rdev, true);
3603 		ret = cypress_construct_voltage_tables(rdev);
3604 		if (ret) {
3605 			DRM_ERROR("cypress_construct_voltage_tables failed\n");
3606 			return ret;
3607 		}
3608 	}
3609 	if (eg_pi->dynamic_ac_timing) {
3610 		ret = ni_initialize_mc_reg_table(rdev);
3611 		if (ret)
3612 			eg_pi->dynamic_ac_timing = false;
3613 	}
3614 	if (pi->dynamic_ss)
3615 		cypress_enable_spread_spectrum(rdev, true);
3616 	if (pi->thermal_protection)
3617 		rv770_enable_thermal_protection(rdev, true);
3618 	rv770_setup_bsp(rdev);
3619 	rv770_program_git(rdev);
3620 	rv770_program_tp(rdev);
3621 	rv770_program_tpp(rdev);
3622 	rv770_program_sstp(rdev);
3623 	cypress_enable_display_gap(rdev);
3624 	rv770_program_vc(rdev);
3625 	if (pi->dynamic_pcie_gen2)
3626 		ni_enable_dynamic_pcie_gen2(rdev, true);
3627 	ret = rv770_upload_firmware(rdev);
3628 	if (ret) {
3629 		DRM_ERROR("rv770_upload_firmware failed\n");
3630 		return ret;
3631 	}
3632 	ret = ni_process_firmware_header(rdev);
3633 	if (ret) {
3634 		DRM_ERROR("ni_process_firmware_header failed\n");
3635 		return ret;
3636 	}
3637 	ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3638 	if (ret) {
3639 		DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3640 		return ret;
3641 	}
3642 	ret = ni_init_smc_table(rdev);
3643 	if (ret) {
3644 		DRM_ERROR("ni_init_smc_table failed\n");
3645 		return ret;
3646 	}
3647 	ret = ni_init_smc_spll_table(rdev);
3648 	if (ret) {
3649 		DRM_ERROR("ni_init_smc_spll_table failed\n");
3650 		return ret;
3651 	}
3652 	ret = ni_init_arb_table_index(rdev);
3653 	if (ret) {
3654 		DRM_ERROR("ni_init_arb_table_index failed\n");
3655 		return ret;
3656 	}
3657 	if (eg_pi->dynamic_ac_timing) {
3658 		ret = ni_populate_mc_reg_table(rdev, boot_ps);
3659 		if (ret) {
3660 			DRM_ERROR("ni_populate_mc_reg_table failed\n");
3661 			return ret;
3662 		}
3663 	}
3664 	ret = ni_initialize_smc_cac_tables(rdev);
3665 	if (ret) {
3666 		DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3667 		return ret;
3668 	}
3669 	ret = ni_initialize_hardware_cac_manager(rdev);
3670 	if (ret) {
3671 		DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3672 		return ret;
3673 	}
3674 	ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3675 	if (ret) {
3676 		DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3677 		return ret;
3678 	}
3679 	ni_program_response_times(rdev);
3680 	r7xx_start_smc(rdev);
3681 	ret = cypress_notify_smc_display_change(rdev, false);
3682 	if (ret) {
3683 		DRM_ERROR("cypress_notify_smc_display_change failed\n");
3684 		return ret;
3685 	}
3686 	cypress_enable_sclk_control(rdev, true);
3687 	if (eg_pi->memory_transition)
3688 		cypress_enable_mclk_control(rdev, true);
3689 	cypress_start_dpm(rdev);
3690 	if (pi->gfx_clock_gating)
3691 		ni_gfx_clockgating_enable(rdev, true);
3692 	if (pi->mg_clock_gating)
3693 		ni_mg_clockgating_enable(rdev, true);
3694 	if (eg_pi->ls_clock_gating)
3695 		ni_ls_clockgating_enable(rdev, true);
3696 
3697 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3698 
3699 	ni_update_current_ps(rdev, boot_ps);
3700 
3701 	return 0;
3702 }
3703 
3704 void ni_dpm_disable(struct radeon_device *rdev)
3705 {
3706 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3707 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3708 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3709 
3710 	if (!btc_dpm_enabled(rdev))
3711 		return;
3712 	rv770_clear_vc(rdev);
3713 	if (pi->thermal_protection)
3714 		rv770_enable_thermal_protection(rdev, false);
3715 	ni_enable_power_containment(rdev, boot_ps, false);
3716 	ni_enable_smc_cac(rdev, boot_ps, false);
3717 	cypress_enable_spread_spectrum(rdev, false);
3718 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3719 	if (pi->dynamic_pcie_gen2)
3720 		ni_enable_dynamic_pcie_gen2(rdev, false);
3721 
3722 	if (rdev->irq.installed &&
3723 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3724 		rdev->irq.dpm_thermal = false;
3725 		radeon_irq_set(rdev);
3726 	}
3727 
3728 	if (pi->gfx_clock_gating)
3729 		ni_gfx_clockgating_enable(rdev, false);
3730 	if (pi->mg_clock_gating)
3731 		ni_mg_clockgating_enable(rdev, false);
3732 	if (eg_pi->ls_clock_gating)
3733 		ni_ls_clockgating_enable(rdev, false);
3734 	ni_stop_dpm(rdev);
3735 	btc_reset_to_default(rdev);
3736 	ni_stop_smc(rdev);
3737 	ni_force_switch_to_arb_f0(rdev);
3738 
3739 	ni_update_current_ps(rdev, boot_ps);
3740 }
3741 
3742 static int ni_power_control_set_level(struct radeon_device *rdev)
3743 {
3744 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3745 	int ret;
3746 
3747 	ret = ni_restrict_performance_levels_before_switch(rdev);
3748 	if (ret)
3749 		return ret;
3750 	ret = rv770_halt_smc(rdev);
3751 	if (ret)
3752 		return ret;
3753 	ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3754 	if (ret)
3755 		return ret;
3756 	ret = rv770_resume_smc(rdev);
3757 	if (ret)
3758 		return ret;
3759 	ret = rv770_set_sw_state(rdev);
3760 	if (ret)
3761 		return ret;
3762 
3763 	return 0;
3764 }
3765 
3766 int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3767 {
3768 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3769 	struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3770 	struct radeon_ps *new_ps = &requested_ps;
3771 
3772 	ni_update_requested_ps(rdev, new_ps);
3773 
3774 	ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3775 
3776 	return 0;
3777 }
3778 
3779 int ni_dpm_set_power_state(struct radeon_device *rdev)
3780 {
3781 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3782 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3783 	struct radeon_ps *old_ps = &eg_pi->current_rps;
3784 	int ret;
3785 
3786 	ret = ni_restrict_performance_levels_before_switch(rdev);
3787 	if (ret) {
3788 		DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3789 		return ret;
3790 	}
3791 	ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3792 	ret = ni_enable_power_containment(rdev, new_ps, false);
3793 	if (ret) {
3794 		DRM_ERROR("ni_enable_power_containment failed\n");
3795 		return ret;
3796 	}
3797 	ret = ni_enable_smc_cac(rdev, new_ps, false);
3798 	if (ret) {
3799 		DRM_ERROR("ni_enable_smc_cac failed\n");
3800 		return ret;
3801 	}
3802 	ret = rv770_halt_smc(rdev);
3803 	if (ret) {
3804 		DRM_ERROR("rv770_halt_smc failed\n");
3805 		return ret;
3806 	}
3807 	if (eg_pi->smu_uvd_hs)
3808 		btc_notify_uvd_to_smc(rdev, new_ps);
3809 	ret = ni_upload_sw_state(rdev, new_ps);
3810 	if (ret) {
3811 		DRM_ERROR("ni_upload_sw_state failed\n");
3812 		return ret;
3813 	}
3814 	if (eg_pi->dynamic_ac_timing) {
3815 		ret = ni_upload_mc_reg_table(rdev, new_ps);
3816 		if (ret) {
3817 			DRM_ERROR("ni_upload_mc_reg_table failed\n");
3818 			return ret;
3819 		}
3820 	}
3821 	ret = ni_program_memory_timing_parameters(rdev, new_ps);
3822 	if (ret) {
3823 		DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3824 		return ret;
3825 	}
3826 	ret = rv770_resume_smc(rdev);
3827 	if (ret) {
3828 		DRM_ERROR("rv770_resume_smc failed\n");
3829 		return ret;
3830 	}
3831 	ret = rv770_set_sw_state(rdev);
3832 	if (ret) {
3833 		DRM_ERROR("rv770_set_sw_state failed\n");
3834 		return ret;
3835 	}
3836 	ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3837 	ret = ni_enable_smc_cac(rdev, new_ps, true);
3838 	if (ret) {
3839 		DRM_ERROR("ni_enable_smc_cac failed\n");
3840 		return ret;
3841 	}
3842 	ret = ni_enable_power_containment(rdev, new_ps, true);
3843 	if (ret) {
3844 		DRM_ERROR("ni_enable_power_containment failed\n");
3845 		return ret;
3846 	}
3847 
3848 	/* update tdp */
3849 	ret = ni_power_control_set_level(rdev);
3850 	if (ret) {
3851 		DRM_ERROR("ni_power_control_set_level failed\n");
3852 		return ret;
3853 	}
3854 
3855 	return 0;
3856 }
3857 
3858 void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3859 {
3860 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3861 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3862 
3863 	ni_update_current_ps(rdev, new_ps);
3864 }
3865 
3866 void ni_dpm_reset_asic(struct radeon_device *rdev)
3867 {
3868 	ni_restrict_performance_levels_before_switch(rdev);
3869 	rv770_set_boot_state(rdev);
3870 }
3871 
3872 union power_info {
3873 	struct _ATOM_POWERPLAY_INFO info;
3874 	struct _ATOM_POWERPLAY_INFO_V2 info_2;
3875 	struct _ATOM_POWERPLAY_INFO_V3 info_3;
3876 	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3877 	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3878 	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3879 };
3880 
3881 union pplib_clock_info {
3882 	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3883 	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3884 	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3885 	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3886 };
3887 
3888 union pplib_power_state {
3889 	struct _ATOM_PPLIB_STATE v1;
3890 	struct _ATOM_PPLIB_STATE_V2 v2;
3891 };
3892 
3893 static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3894 					  struct radeon_ps *rps,
3895 					  struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3896 					  u8 table_rev)
3897 {
3898 	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3899 	rps->class = le16_to_cpu(non_clock_info->usClassification);
3900 	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3901 
3902 	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3903 		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3904 		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3905 	} else if (r600_is_uvd_state(rps->class, rps->class2)) {
3906 		rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3907 		rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3908 	} else {
3909 		rps->vclk = 0;
3910 		rps->dclk = 0;
3911 	}
3912 
3913 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3914 		rdev->pm.dpm.boot_ps = rps;
3915 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3916 		rdev->pm.dpm.uvd_ps = rps;
3917 }
3918 
3919 static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3920 				      struct radeon_ps *rps, int index,
3921 				      union pplib_clock_info *clock_info)
3922 {
3923 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3924 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3925 	struct ni_ps *ps = ni_get_ps(rps);
3926 	struct rv7xx_pl *pl = &ps->performance_levels[index];
3927 
3928 	ps->performance_level_count = index + 1;
3929 
3930 	pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3931 	pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3932 	pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3933 	pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3934 
3935 	pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3936 	pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3937 	pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3938 
3939 	/* patch up vddc if necessary */
3940 	if (pl->vddc == 0xff01) {
3941 		if (pi->max_vddc)
3942 			pl->vddc = pi->max_vddc;
3943 	}
3944 
3945 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3946 		pi->acpi_vddc = pl->vddc;
3947 		eg_pi->acpi_vddci = pl->vddci;
3948 		if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3949 			pi->acpi_pcie_gen2 = true;
3950 		else
3951 			pi->acpi_pcie_gen2 = false;
3952 	}
3953 
3954 	if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3955 		eg_pi->ulv.supported = true;
3956 		eg_pi->ulv.pl = pl;
3957 	}
3958 
3959 	if (pi->min_vddc_in_table > pl->vddc)
3960 		pi->min_vddc_in_table = pl->vddc;
3961 
3962 	if (pi->max_vddc_in_table < pl->vddc)
3963 		pi->max_vddc_in_table = pl->vddc;
3964 
3965 	/* patch up boot state */
3966 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3967 		u16 vddc, vddci, mvdd;
3968 		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3969 		pl->mclk = rdev->clock.default_mclk;
3970 		pl->sclk = rdev->clock.default_sclk;
3971 		pl->vddc = vddc;
3972 		pl->vddci = vddci;
3973 	}
3974 
3975 	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
3976 	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
3977 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
3978 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
3979 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
3980 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
3981 	}
3982 }
3983 
3984 static int ni_parse_power_table(struct radeon_device *rdev)
3985 {
3986 	struct radeon_mode_info *mode_info = &rdev->mode_info;
3987 	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
3988 	union pplib_power_state *power_state;
3989 	int i, j;
3990 	union pplib_clock_info *clock_info;
3991 	union power_info *power_info;
3992 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
3993         u16 data_offset;
3994 	u8 frev, crev;
3995 	struct ni_ps *ps;
3996 
3997 	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
3998 				   &frev, &crev, &data_offset))
3999 		return -EINVAL;
4000 	power_info = (union power_info *)((uint8_t*)mode_info->atom_context->bios + data_offset);
4001 
4002 	rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
4003 				  power_info->pplib.ucNumStates, GFP_KERNEL);
4004 	if (!rdev->pm.dpm.ps)
4005 		return -ENOMEM;
4006 
4007 	for (i = 0; i < power_info->pplib.ucNumStates; i++) {
4008 		power_state = (union pplib_power_state *)
4009 			((uint8_t*)mode_info->atom_context->bios + data_offset +
4010 			 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4011 			 i * power_info->pplib.ucStateEntrySize);
4012 		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4013 			((uint8_t*)mode_info->atom_context->bios + data_offset +
4014 			 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4015 			 (power_state->v1.ucNonClockStateIndex *
4016 			  power_info->pplib.ucNonClockSize));
4017 		if (power_info->pplib.ucStateEntrySize - 1) {
4018 			u8 *idx;
4019 			ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4020 			if (ps == NULL) {
4021 				kfree(rdev->pm.dpm.ps);
4022 				return -ENOMEM;
4023 			}
4024 			rdev->pm.dpm.ps[i].ps_priv = ps;
4025 			ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4026 							 non_clock_info,
4027 							 power_info->pplib.ucNonClockSize);
4028 			idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
4029 			for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4030 				clock_info = (union pplib_clock_info *)
4031 					((uint8_t*)mode_info->atom_context->bios + data_offset +
4032 					 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4033 					 (idx[j] * power_info->pplib.ucClockInfoSize));
4034 				ni_parse_pplib_clock_info(rdev,
4035 							  &rdev->pm.dpm.ps[i], j,
4036 							  clock_info);
4037 			}
4038 		}
4039 	}
4040 	rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4041 	return 0;
4042 }
4043 
4044 int ni_dpm_init(struct radeon_device *rdev)
4045 {
4046 	struct rv7xx_power_info *pi;
4047 	struct evergreen_power_info *eg_pi;
4048 	struct ni_power_info *ni_pi;
4049 	struct atom_clock_dividers dividers;
4050 	int ret;
4051 
4052 	ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4053 	if (ni_pi == NULL)
4054 		return -ENOMEM;
4055 	rdev->pm.dpm.priv = ni_pi;
4056 	eg_pi = &ni_pi->eg;
4057 	pi = &eg_pi->rv7xx;
4058 
4059 	rv770_get_max_vddc(rdev);
4060 
4061 	eg_pi->ulv.supported = false;
4062 	pi->acpi_vddc = 0;
4063 	eg_pi->acpi_vddci = 0;
4064 	pi->min_vddc_in_table = 0;
4065 	pi->max_vddc_in_table = 0;
4066 
4067 	ret = r600_get_platform_caps(rdev);
4068 	if (ret)
4069 		return ret;
4070 
4071 	ret = ni_parse_power_table(rdev);
4072 	if (ret)
4073 		return ret;
4074 	ret = r600_parse_extended_power_table(rdev);
4075 	if (ret)
4076 		return ret;
4077 
4078 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4079 		kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4080 	if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4081 		r600_free_extended_power_table(rdev);
4082 		return -ENOMEM;
4083 	}
4084 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4085 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4086 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4087 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4088 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4089 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4090 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4091 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4092 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4093 
4094 	ni_patch_dependency_tables_based_on_leakage(rdev);
4095 
4096 	if (rdev->pm.dpm.voltage_response_time == 0)
4097 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4098 	if (rdev->pm.dpm.backbias_response_time == 0)
4099 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4100 
4101 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4102 					     0, false, &dividers);
4103 	if (ret)
4104 		pi->ref_div = dividers.ref_div + 1;
4105 	else
4106 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4107 
4108 	pi->rlp = RV770_RLP_DFLT;
4109 	pi->rmp = RV770_RMP_DFLT;
4110 	pi->lhp = RV770_LHP_DFLT;
4111 	pi->lmp = RV770_LMP_DFLT;
4112 
4113 	eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4114 	eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4115 	eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4116 	eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4117 
4118 	eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4119 	eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4120 	eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4121 	eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4122 
4123 	eg_pi->smu_uvd_hs = true;
4124 
4125 	if (rdev->pdev->device == 0x6707) {
4126 		pi->mclk_strobe_mode_threshold = 55000;
4127 		pi->mclk_edc_enable_threshold = 55000;
4128 		eg_pi->mclk_edc_wr_enable_threshold = 55000;
4129 	} else {
4130 		pi->mclk_strobe_mode_threshold = 40000;
4131 		pi->mclk_edc_enable_threshold = 40000;
4132 		eg_pi->mclk_edc_wr_enable_threshold = 40000;
4133 	}
4134 	ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4135 
4136 	pi->voltage_control =
4137 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4138 
4139 	pi->mvdd_control =
4140 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4141 
4142 	eg_pi->vddci_control =
4143 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4144 
4145 	rv770_get_engine_memory_ss(rdev);
4146 
4147 	pi->asi = RV770_ASI_DFLT;
4148 	pi->pasi = CYPRESS_HASI_DFLT;
4149 	pi->vrc = CYPRESS_VRC_DFLT;
4150 
4151 	pi->power_gating = false;
4152 
4153 	pi->gfx_clock_gating = true;
4154 
4155 	pi->mg_clock_gating = true;
4156 	pi->mgcgtssm = true;
4157 	eg_pi->ls_clock_gating = false;
4158 	eg_pi->sclk_deep_sleep = false;
4159 
4160 	pi->dynamic_pcie_gen2 = true;
4161 
4162 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
4163 		pi->thermal_protection = true;
4164 	else
4165 		pi->thermal_protection = false;
4166 
4167 	pi->display_gap = true;
4168 
4169 	pi->dcodt = true;
4170 
4171 	pi->ulps = true;
4172 
4173 	eg_pi->dynamic_ac_timing = true;
4174 	eg_pi->abm = true;
4175 	eg_pi->mcls = true;
4176 	eg_pi->light_sleep = true;
4177 	eg_pi->memory_transition = true;
4178 #if defined(CONFIG_ACPI)
4179 	eg_pi->pcie_performance_request =
4180 		radeon_acpi_is_pcie_performance_request_supported(rdev);
4181 #else
4182 	eg_pi->pcie_performance_request = false;
4183 #endif
4184 
4185 	eg_pi->dll_default_on = false;
4186 
4187 	eg_pi->sclk_deep_sleep = false;
4188 
4189 	pi->mclk_stutter_mode_threshold = 0;
4190 
4191 	pi->sram_end = SMC_RAM_END;
4192 
4193 	rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4194 	rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4195 	rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4196 	rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4197 	rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4198 	rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4199 	rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4200 	rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4201 
4202 	ni_pi->cac_data.leakage_coefficients.at = 516;
4203 	ni_pi->cac_data.leakage_coefficients.bt = 18;
4204 	ni_pi->cac_data.leakage_coefficients.av = 51;
4205 	ni_pi->cac_data.leakage_coefficients.bv = 2957;
4206 
4207 	switch (rdev->pdev->device) {
4208 	case 0x6700:
4209 	case 0x6701:
4210 	case 0x6702:
4211 	case 0x6703:
4212 	case 0x6718:
4213 		ni_pi->cac_weights = &cac_weights_cayman_xt;
4214 		break;
4215 	case 0x6705:
4216 	case 0x6719:
4217 	case 0x671D:
4218 	case 0x671C:
4219 	default:
4220 		ni_pi->cac_weights = &cac_weights_cayman_pro;
4221 		break;
4222 	case 0x6704:
4223 	case 0x6706:
4224 	case 0x6707:
4225 	case 0x6708:
4226 	case 0x6709:
4227 		ni_pi->cac_weights = &cac_weights_cayman_le;
4228 		break;
4229 	}
4230 
4231 	if (ni_pi->cac_weights->enable_power_containment_by_default) {
4232 		ni_pi->enable_power_containment = true;
4233 		ni_pi->enable_cac = true;
4234 		ni_pi->enable_sq_ramping = true;
4235 	} else {
4236 		ni_pi->enable_power_containment = false;
4237 		ni_pi->enable_cac = false;
4238 		ni_pi->enable_sq_ramping = false;
4239 	}
4240 
4241 	ni_pi->driver_calculate_cac_leakage = false;
4242 	ni_pi->cac_configuration_required = true;
4243 
4244 	if (ni_pi->cac_configuration_required) {
4245 		ni_pi->support_cac_long_term_average = true;
4246 		ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4247 		ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4248 	} else {
4249 		ni_pi->support_cac_long_term_average = false;
4250 		ni_pi->lta_window_size = 0;
4251 		ni_pi->lts_truncate = 0;
4252 	}
4253 
4254 	ni_pi->use_power_boost_limit = true;
4255 
4256 	/* make sure dc limits are valid */
4257 	if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
4258 	    (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
4259 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
4260 			rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
4261 
4262 	return 0;
4263 }
4264 
4265 void ni_dpm_fini(struct radeon_device *rdev)
4266 {
4267 	int i;
4268 
4269 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4270 		kfree(rdev->pm.dpm.ps[i].ps_priv);
4271 	}
4272 	kfree(rdev->pm.dpm.ps);
4273 	kfree(rdev->pm.dpm.priv);
4274 	kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4275 	r600_free_extended_power_table(rdev);
4276 }
4277 
4278 void ni_dpm_print_power_state(struct radeon_device *rdev,
4279 			      struct radeon_ps *rps)
4280 {
4281 	struct ni_ps *ps = ni_get_ps(rps);
4282 	struct rv7xx_pl *pl;
4283 	int i;
4284 
4285 	r600_dpm_print_class_info(rps->class, rps->class2);
4286 	r600_dpm_print_cap_info(rps->caps);
4287 	printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4288 	for (i = 0; i < ps->performance_level_count; i++) {
4289 		pl = &ps->performance_levels[i];
4290 		if (rdev->family >= CHIP_TAHITI)
4291 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4292 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4293 		else
4294 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4295 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4296 	}
4297 	r600_dpm_print_ps_status(rdev, rps);
4298 }
4299 
4300 void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4301 						    struct seq_file *m)
4302 {
4303 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4304 	struct radeon_ps *rps = &eg_pi->current_rps;
4305 	struct ni_ps *ps = ni_get_ps(rps);
4306 	struct rv7xx_pl *pl;
4307 	u32 current_index =
4308 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4309 		CURRENT_STATE_INDEX_SHIFT;
4310 
4311 	if (current_index >= ps->performance_level_count) {
4312 		seq_printf(m, "invalid dpm profile %d\n", current_index);
4313 	} else {
4314 		pl = &ps->performance_levels[current_index];
4315 		seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4316 		seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4317 			   current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4318 	}
4319 }
4320 
4321 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4322 {
4323 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4324 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4325 
4326 	if (low)
4327 		return requested_state->performance_levels[0].sclk;
4328 	else
4329 		return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4330 }
4331 
4332 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4333 {
4334 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4335 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4336 
4337 	if (low)
4338 		return requested_state->performance_levels[0].mclk;
4339 	else
4340 		return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4341 }
4342