xref: /dragonfly/sys/dev/drm/radeon/ni_dpm.c (revision c6f73aab)
157e252bfSMichael Neumann /*
257e252bfSMichael Neumann  * Copyright 2012 Advanced Micro Devices, Inc.
357e252bfSMichael Neumann  *
457e252bfSMichael Neumann  * Permission is hereby granted, free of charge, to any person obtaining a
557e252bfSMichael Neumann  * copy of this software and associated documentation files (the "Software"),
657e252bfSMichael Neumann  * to deal in the Software without restriction, including without limitation
757e252bfSMichael Neumann  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
857e252bfSMichael Neumann  * and/or sell copies of the Software, and to permit persons to whom the
957e252bfSMichael Neumann  * Software is furnished to do so, subject to the following conditions:
1057e252bfSMichael Neumann  *
1157e252bfSMichael Neumann  * The above copyright notice and this permission notice shall be included in
1257e252bfSMichael Neumann  * all copies or substantial portions of the Software.
1357e252bfSMichael Neumann  *
1457e252bfSMichael Neumann  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
1557e252bfSMichael Neumann  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1657e252bfSMichael Neumann  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
1757e252bfSMichael Neumann  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
1857e252bfSMichael Neumann  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
1957e252bfSMichael Neumann  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
2057e252bfSMichael Neumann  * OTHER DEALINGS IN THE SOFTWARE.
2157e252bfSMichael Neumann  *
2257e252bfSMichael Neumann  */
2357e252bfSMichael Neumann 
2457e252bfSMichael Neumann #include <drm/drmP.h>
2557e252bfSMichael Neumann #include "radeon.h"
2657e252bfSMichael Neumann #include "radeon_asic.h"
2757e252bfSMichael Neumann #include "nid.h"
2857e252bfSMichael Neumann #include "r600_dpm.h"
2957e252bfSMichael Neumann #include "ni_dpm.h"
3057e252bfSMichael Neumann #include "atom.h"
3157e252bfSMichael Neumann #include <linux/math64.h>
3257e252bfSMichael Neumann #include <linux/seq_file.h>
3357e252bfSMichael Neumann 
3457e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F0           0x0a
3557e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F1           0x0b
3657e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F2           0x0c
3757e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F3           0x0d
3857e252bfSMichael Neumann 
3957e252bfSMichael Neumann #define SMC_RAM_END 0xC000
4057e252bfSMichael Neumann 
4157e252bfSMichael Neumann static const struct ni_cac_weights cac_weights_cayman_xt =
4257e252bfSMichael Neumann {
4357e252bfSMichael Neumann 	0x15,
4457e252bfSMichael Neumann 	0x2,
4557e252bfSMichael Neumann 	0x19,
4657e252bfSMichael Neumann 	0x2,
4757e252bfSMichael Neumann 	0x8,
4857e252bfSMichael Neumann 	0x14,
4957e252bfSMichael Neumann 	0x2,
5057e252bfSMichael Neumann 	0x16,
5157e252bfSMichael Neumann 	0xE,
5257e252bfSMichael Neumann 	0x17,
5357e252bfSMichael Neumann 	0x13,
5457e252bfSMichael Neumann 	0x2B,
5557e252bfSMichael Neumann 	0x10,
5657e252bfSMichael Neumann 	0x7,
5757e252bfSMichael Neumann 	0x5,
5857e252bfSMichael Neumann 	0x5,
5957e252bfSMichael Neumann 	0x5,
6057e252bfSMichael Neumann 	0x2,
6157e252bfSMichael Neumann 	0x3,
6257e252bfSMichael Neumann 	0x9,
6357e252bfSMichael Neumann 	0x10,
6457e252bfSMichael Neumann 	0x10,
6557e252bfSMichael Neumann 	0x2B,
6657e252bfSMichael Neumann 	0xA,
6757e252bfSMichael Neumann 	0x9,
6857e252bfSMichael Neumann 	0x4,
6957e252bfSMichael Neumann 	0xD,
7057e252bfSMichael Neumann 	0xD,
7157e252bfSMichael Neumann 	0x3E,
7257e252bfSMichael Neumann 	0x18,
7357e252bfSMichael Neumann 	0x14,
7457e252bfSMichael Neumann 	0,
7557e252bfSMichael Neumann 	0x3,
7657e252bfSMichael Neumann 	0x3,
7757e252bfSMichael Neumann 	0x5,
7857e252bfSMichael Neumann 	0,
7957e252bfSMichael Neumann 	0x2,
8057e252bfSMichael Neumann 	0,
8157e252bfSMichael Neumann 	0,
8257e252bfSMichael Neumann 	0,
8357e252bfSMichael Neumann 	0,
8457e252bfSMichael Neumann 	0,
8557e252bfSMichael Neumann 	0,
8657e252bfSMichael Neumann 	0,
8757e252bfSMichael Neumann 	0,
8857e252bfSMichael Neumann 	0,
8957e252bfSMichael Neumann 	0x1CC,
9057e252bfSMichael Neumann 	0,
9157e252bfSMichael Neumann 	0x164,
9257e252bfSMichael Neumann 	1,
9357e252bfSMichael Neumann 	1,
9457e252bfSMichael Neumann 	1,
9557e252bfSMichael Neumann 	1,
9657e252bfSMichael Neumann 	12,
9757e252bfSMichael Neumann 	12,
9857e252bfSMichael Neumann 	12,
9957e252bfSMichael Neumann 	0x12,
10057e252bfSMichael Neumann 	0x1F,
10157e252bfSMichael Neumann 	132,
10257e252bfSMichael Neumann 	5,
10357e252bfSMichael Neumann 	7,
10457e252bfSMichael Neumann 	0,
10557e252bfSMichael Neumann 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
10657e252bfSMichael Neumann 	{ 0, 0, 0, 0 },
10757e252bfSMichael Neumann 	true
10857e252bfSMichael Neumann };
10957e252bfSMichael Neumann 
11057e252bfSMichael Neumann static const struct ni_cac_weights cac_weights_cayman_pro =
11157e252bfSMichael Neumann {
11257e252bfSMichael Neumann 	0x16,
11357e252bfSMichael Neumann 	0x4,
11457e252bfSMichael Neumann 	0x10,
11557e252bfSMichael Neumann 	0x2,
11657e252bfSMichael Neumann 	0xA,
11757e252bfSMichael Neumann 	0x16,
11857e252bfSMichael Neumann 	0x2,
11957e252bfSMichael Neumann 	0x18,
12057e252bfSMichael Neumann 	0x10,
12157e252bfSMichael Neumann 	0x1A,
12257e252bfSMichael Neumann 	0x16,
12357e252bfSMichael Neumann 	0x2D,
12457e252bfSMichael Neumann 	0x12,
12557e252bfSMichael Neumann 	0xA,
12657e252bfSMichael Neumann 	0x6,
12757e252bfSMichael Neumann 	0x6,
12857e252bfSMichael Neumann 	0x6,
12957e252bfSMichael Neumann 	0x2,
13057e252bfSMichael Neumann 	0x4,
13157e252bfSMichael Neumann 	0xB,
13257e252bfSMichael Neumann 	0x11,
13357e252bfSMichael Neumann 	0x11,
13457e252bfSMichael Neumann 	0x2D,
13557e252bfSMichael Neumann 	0xC,
13657e252bfSMichael Neumann 	0xC,
13757e252bfSMichael Neumann 	0x7,
13857e252bfSMichael Neumann 	0x10,
13957e252bfSMichael Neumann 	0x10,
14057e252bfSMichael Neumann 	0x3F,
14157e252bfSMichael Neumann 	0x1A,
14257e252bfSMichael Neumann 	0x16,
14357e252bfSMichael Neumann 	0,
14457e252bfSMichael Neumann 	0x7,
14557e252bfSMichael Neumann 	0x4,
14657e252bfSMichael Neumann 	0x6,
14757e252bfSMichael Neumann 	1,
14857e252bfSMichael Neumann 	0x2,
14957e252bfSMichael Neumann 	0x1,
15057e252bfSMichael Neumann 	0,
15157e252bfSMichael Neumann 	0,
15257e252bfSMichael Neumann 	0,
15357e252bfSMichael Neumann 	0,
15457e252bfSMichael Neumann 	0,
15557e252bfSMichael Neumann 	0,
15657e252bfSMichael Neumann 	0x30,
15757e252bfSMichael Neumann 	0,
15857e252bfSMichael Neumann 	0x1CF,
15957e252bfSMichael Neumann 	0,
16057e252bfSMichael Neumann 	0x166,
16157e252bfSMichael Neumann 	1,
16257e252bfSMichael Neumann 	1,
16357e252bfSMichael Neumann 	1,
16457e252bfSMichael Neumann 	1,
16557e252bfSMichael Neumann 	12,
16657e252bfSMichael Neumann 	12,
16757e252bfSMichael Neumann 	12,
16857e252bfSMichael Neumann 	0x15,
16957e252bfSMichael Neumann 	0x1F,
17057e252bfSMichael Neumann 	132,
17157e252bfSMichael Neumann 	6,
17257e252bfSMichael Neumann 	6,
17357e252bfSMichael Neumann 	0,
17457e252bfSMichael Neumann 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
17557e252bfSMichael Neumann 	{ 0, 0, 0, 0 },
17657e252bfSMichael Neumann 	true
17757e252bfSMichael Neumann };
17857e252bfSMichael Neumann 
17957e252bfSMichael Neumann static const struct ni_cac_weights cac_weights_cayman_le =
18057e252bfSMichael Neumann {
18157e252bfSMichael Neumann 	0x7,
18257e252bfSMichael Neumann 	0xE,
18357e252bfSMichael Neumann 	0x1,
18457e252bfSMichael Neumann 	0xA,
18557e252bfSMichael Neumann 	0x1,
18657e252bfSMichael Neumann 	0x3F,
18757e252bfSMichael Neumann 	0x2,
18857e252bfSMichael Neumann 	0x18,
18957e252bfSMichael Neumann 	0x10,
19057e252bfSMichael Neumann 	0x1A,
19157e252bfSMichael Neumann 	0x1,
19257e252bfSMichael Neumann 	0x3F,
19357e252bfSMichael Neumann 	0x1,
19457e252bfSMichael Neumann 	0xE,
19557e252bfSMichael Neumann 	0x6,
19657e252bfSMichael Neumann 	0x6,
19757e252bfSMichael Neumann 	0x6,
19857e252bfSMichael Neumann 	0x2,
19957e252bfSMichael Neumann 	0x4,
20057e252bfSMichael Neumann 	0x9,
20157e252bfSMichael Neumann 	0x1A,
20257e252bfSMichael Neumann 	0x1A,
20357e252bfSMichael Neumann 	0x2C,
20457e252bfSMichael Neumann 	0xA,
20557e252bfSMichael Neumann 	0x11,
20657e252bfSMichael Neumann 	0x8,
20757e252bfSMichael Neumann 	0x19,
20857e252bfSMichael Neumann 	0x19,
20957e252bfSMichael Neumann 	0x1,
21057e252bfSMichael Neumann 	0x1,
21157e252bfSMichael Neumann 	0x1A,
21257e252bfSMichael Neumann 	0,
21357e252bfSMichael Neumann 	0x8,
21457e252bfSMichael Neumann 	0x5,
21557e252bfSMichael Neumann 	0x8,
21657e252bfSMichael Neumann 	0x1,
21757e252bfSMichael Neumann 	0x3,
21857e252bfSMichael Neumann 	0x1,
21957e252bfSMichael Neumann 	0,
22057e252bfSMichael Neumann 	0,
22157e252bfSMichael Neumann 	0,
22257e252bfSMichael Neumann 	0,
22357e252bfSMichael Neumann 	0,
22457e252bfSMichael Neumann 	0,
22557e252bfSMichael Neumann 	0x38,
22657e252bfSMichael Neumann 	0x38,
22757e252bfSMichael Neumann 	0x239,
22857e252bfSMichael Neumann 	0x3,
22957e252bfSMichael Neumann 	0x18A,
23057e252bfSMichael Neumann 	1,
23157e252bfSMichael Neumann 	1,
23257e252bfSMichael Neumann 	1,
23357e252bfSMichael Neumann 	1,
23457e252bfSMichael Neumann 	12,
23557e252bfSMichael Neumann 	12,
23657e252bfSMichael Neumann 	12,
23757e252bfSMichael Neumann 	0x15,
23857e252bfSMichael Neumann 	0x22,
23957e252bfSMichael Neumann 	132,
24057e252bfSMichael Neumann 	6,
24157e252bfSMichael Neumann 	6,
24257e252bfSMichael Neumann 	0,
24357e252bfSMichael Neumann 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
24457e252bfSMichael Neumann 	{ 0, 0, 0, 0 },
24557e252bfSMichael Neumann 	true
24657e252bfSMichael Neumann };
24757e252bfSMichael Neumann 
24857e252bfSMichael Neumann #define NISLANDS_MGCG_SEQUENCE  300
24957e252bfSMichael Neumann 
25057e252bfSMichael Neumann static const u32 cayman_cgcg_cgls_default[] =
25157e252bfSMichael Neumann {
25257e252bfSMichael Neumann 	0x000008f8, 0x00000010, 0xffffffff,
25357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
25457e252bfSMichael Neumann 	0x000008f8, 0x00000011, 0xffffffff,
25557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
25657e252bfSMichael Neumann 	0x000008f8, 0x00000012, 0xffffffff,
25757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
25857e252bfSMichael Neumann 	0x000008f8, 0x00000013, 0xffffffff,
25957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26057e252bfSMichael Neumann 	0x000008f8, 0x00000014, 0xffffffff,
26157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26257e252bfSMichael Neumann 	0x000008f8, 0x00000015, 0xffffffff,
26357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26457e252bfSMichael Neumann 	0x000008f8, 0x00000016, 0xffffffff,
26557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26657e252bfSMichael Neumann 	0x000008f8, 0x00000017, 0xffffffff,
26757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26857e252bfSMichael Neumann 	0x000008f8, 0x00000018, 0xffffffff,
26957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27057e252bfSMichael Neumann 	0x000008f8, 0x00000019, 0xffffffff,
27157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27257e252bfSMichael Neumann 	0x000008f8, 0x0000001a, 0xffffffff,
27357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27457e252bfSMichael Neumann 	0x000008f8, 0x0000001b, 0xffffffff,
27557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27657e252bfSMichael Neumann 	0x000008f8, 0x00000020, 0xffffffff,
27757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27857e252bfSMichael Neumann 	0x000008f8, 0x00000021, 0xffffffff,
27957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28057e252bfSMichael Neumann 	0x000008f8, 0x00000022, 0xffffffff,
28157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28257e252bfSMichael Neumann 	0x000008f8, 0x00000023, 0xffffffff,
28357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28457e252bfSMichael Neumann 	0x000008f8, 0x00000024, 0xffffffff,
28557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28657e252bfSMichael Neumann 	0x000008f8, 0x00000025, 0xffffffff,
28757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28857e252bfSMichael Neumann 	0x000008f8, 0x00000026, 0xffffffff,
28957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29057e252bfSMichael Neumann 	0x000008f8, 0x00000027, 0xffffffff,
29157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29257e252bfSMichael Neumann 	0x000008f8, 0x00000028, 0xffffffff,
29357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29457e252bfSMichael Neumann 	0x000008f8, 0x00000029, 0xffffffff,
29557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29657e252bfSMichael Neumann 	0x000008f8, 0x0000002a, 0xffffffff,
29757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29857e252bfSMichael Neumann 	0x000008f8, 0x0000002b, 0xffffffff,
29957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff
30057e252bfSMichael Neumann };
30157e252bfSMichael Neumann #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
30257e252bfSMichael Neumann 
30357e252bfSMichael Neumann static const u32 cayman_cgcg_cgls_disable[] =
30457e252bfSMichael Neumann {
30557e252bfSMichael Neumann 	0x000008f8, 0x00000010, 0xffffffff,
30657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
30757e252bfSMichael Neumann 	0x000008f8, 0x00000011, 0xffffffff,
30857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
30957e252bfSMichael Neumann 	0x000008f8, 0x00000012, 0xffffffff,
31057e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31157e252bfSMichael Neumann 	0x000008f8, 0x00000013, 0xffffffff,
31257e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31357e252bfSMichael Neumann 	0x000008f8, 0x00000014, 0xffffffff,
31457e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31557e252bfSMichael Neumann 	0x000008f8, 0x00000015, 0xffffffff,
31657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31757e252bfSMichael Neumann 	0x000008f8, 0x00000016, 0xffffffff,
31857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31957e252bfSMichael Neumann 	0x000008f8, 0x00000017, 0xffffffff,
32057e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32157e252bfSMichael Neumann 	0x000008f8, 0x00000018, 0xffffffff,
32257e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32357e252bfSMichael Neumann 	0x000008f8, 0x00000019, 0xffffffff,
32457e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32557e252bfSMichael Neumann 	0x000008f8, 0x0000001a, 0xffffffff,
32657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32757e252bfSMichael Neumann 	0x000008f8, 0x0000001b, 0xffffffff,
32857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32957e252bfSMichael Neumann 	0x000008f8, 0x00000020, 0xffffffff,
33057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33157e252bfSMichael Neumann 	0x000008f8, 0x00000021, 0xffffffff,
33257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33357e252bfSMichael Neumann 	0x000008f8, 0x00000022, 0xffffffff,
33457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33557e252bfSMichael Neumann 	0x000008f8, 0x00000023, 0xffffffff,
33657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33757e252bfSMichael Neumann 	0x000008f8, 0x00000024, 0xffffffff,
33857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33957e252bfSMichael Neumann 	0x000008f8, 0x00000025, 0xffffffff,
34057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34157e252bfSMichael Neumann 	0x000008f8, 0x00000026, 0xffffffff,
34257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34357e252bfSMichael Neumann 	0x000008f8, 0x00000027, 0xffffffff,
34457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34557e252bfSMichael Neumann 	0x000008f8, 0x00000028, 0xffffffff,
34657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34757e252bfSMichael Neumann 	0x000008f8, 0x00000029, 0xffffffff,
34857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34957e252bfSMichael Neumann 	0x000008f8, 0x0000002a, 0xffffffff,
35057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
35157e252bfSMichael Neumann 	0x000008f8, 0x0000002b, 0xffffffff,
35257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
35357e252bfSMichael Neumann 	0x00000644, 0x000f7902, 0x001f4180,
35457e252bfSMichael Neumann 	0x00000644, 0x000f3802, 0x001f4180
35557e252bfSMichael Neumann };
35657e252bfSMichael Neumann #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
35757e252bfSMichael Neumann 
35857e252bfSMichael Neumann static const u32 cayman_cgcg_cgls_enable[] =
35957e252bfSMichael Neumann {
36057e252bfSMichael Neumann 	0x00000644, 0x000f7882, 0x001f4080,
36157e252bfSMichael Neumann 	0x000008f8, 0x00000010, 0xffffffff,
36257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
36357e252bfSMichael Neumann 	0x000008f8, 0x00000011, 0xffffffff,
36457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
36557e252bfSMichael Neumann 	0x000008f8, 0x00000012, 0xffffffff,
36657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
36757e252bfSMichael Neumann 	0x000008f8, 0x00000013, 0xffffffff,
36857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
36957e252bfSMichael Neumann 	0x000008f8, 0x00000014, 0xffffffff,
37057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37157e252bfSMichael Neumann 	0x000008f8, 0x00000015, 0xffffffff,
37257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37357e252bfSMichael Neumann 	0x000008f8, 0x00000016, 0xffffffff,
37457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37557e252bfSMichael Neumann 	0x000008f8, 0x00000017, 0xffffffff,
37657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37757e252bfSMichael Neumann 	0x000008f8, 0x00000018, 0xffffffff,
37857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37957e252bfSMichael Neumann 	0x000008f8, 0x00000019, 0xffffffff,
38057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
38157e252bfSMichael Neumann 	0x000008f8, 0x0000001a, 0xffffffff,
38257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
38357e252bfSMichael Neumann 	0x000008f8, 0x0000001b, 0xffffffff,
38457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
38557e252bfSMichael Neumann 	0x000008f8, 0x00000020, 0xffffffff,
38657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
38757e252bfSMichael Neumann 	0x000008f8, 0x00000021, 0xffffffff,
38857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
38957e252bfSMichael Neumann 	0x000008f8, 0x00000022, 0xffffffff,
39057e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39157e252bfSMichael Neumann 	0x000008f8, 0x00000023, 0xffffffff,
39257e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39357e252bfSMichael Neumann 	0x000008f8, 0x00000024, 0xffffffff,
39457e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39557e252bfSMichael Neumann 	0x000008f8, 0x00000025, 0xffffffff,
39657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39757e252bfSMichael Neumann 	0x000008f8, 0x00000026, 0xffffffff,
39857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39957e252bfSMichael Neumann 	0x000008f8, 0x00000027, 0xffffffff,
40057e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
40157e252bfSMichael Neumann 	0x000008f8, 0x00000028, 0xffffffff,
40257e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
40357e252bfSMichael Neumann 	0x000008f8, 0x00000029, 0xffffffff,
40457e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
40557e252bfSMichael Neumann 	0x000008f8, 0x0000002a, 0xffffffff,
40657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
40757e252bfSMichael Neumann 	0x000008f8, 0x0000002b, 0xffffffff,
40857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff
40957e252bfSMichael Neumann };
41057e252bfSMichael Neumann #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
41157e252bfSMichael Neumann 
41257e252bfSMichael Neumann static const u32 cayman_mgcg_default[] =
41357e252bfSMichael Neumann {
41457e252bfSMichael Neumann 	0x0000802c, 0xc0000000, 0xffffffff,
41557e252bfSMichael Neumann 	0x00003fc4, 0xc0000000, 0xffffffff,
41657e252bfSMichael Neumann 	0x00005448, 0x00000100, 0xffffffff,
41757e252bfSMichael Neumann 	0x000055e4, 0x00000100, 0xffffffff,
41857e252bfSMichael Neumann 	0x0000160c, 0x00000100, 0xffffffff,
41957e252bfSMichael Neumann 	0x00008984, 0x06000100, 0xffffffff,
42057e252bfSMichael Neumann 	0x0000c164, 0x00000100, 0xffffffff,
42157e252bfSMichael Neumann 	0x00008a18, 0x00000100, 0xffffffff,
42257e252bfSMichael Neumann 	0x0000897c, 0x06000100, 0xffffffff,
42357e252bfSMichael Neumann 	0x00008b28, 0x00000100, 0xffffffff,
42457e252bfSMichael Neumann 	0x00009144, 0x00800200, 0xffffffff,
42557e252bfSMichael Neumann 	0x00009a60, 0x00000100, 0xffffffff,
42657e252bfSMichael Neumann 	0x00009868, 0x00000100, 0xffffffff,
42757e252bfSMichael Neumann 	0x00008d58, 0x00000100, 0xffffffff,
42857e252bfSMichael Neumann 	0x00009510, 0x00000100, 0xffffffff,
42957e252bfSMichael Neumann 	0x0000949c, 0x00000100, 0xffffffff,
43057e252bfSMichael Neumann 	0x00009654, 0x00000100, 0xffffffff,
43157e252bfSMichael Neumann 	0x00009030, 0x00000100, 0xffffffff,
43257e252bfSMichael Neumann 	0x00009034, 0x00000100, 0xffffffff,
43357e252bfSMichael Neumann 	0x00009038, 0x00000100, 0xffffffff,
43457e252bfSMichael Neumann 	0x0000903c, 0x00000100, 0xffffffff,
43557e252bfSMichael Neumann 	0x00009040, 0x00000100, 0xffffffff,
43657e252bfSMichael Neumann 	0x0000a200, 0x00000100, 0xffffffff,
43757e252bfSMichael Neumann 	0x0000a204, 0x00000100, 0xffffffff,
43857e252bfSMichael Neumann 	0x0000a208, 0x00000100, 0xffffffff,
43957e252bfSMichael Neumann 	0x0000a20c, 0x00000100, 0xffffffff,
44057e252bfSMichael Neumann 	0x00009744, 0x00000100, 0xffffffff,
44157e252bfSMichael Neumann 	0x00003f80, 0x00000100, 0xffffffff,
44257e252bfSMichael Neumann 	0x0000a210, 0x00000100, 0xffffffff,
44357e252bfSMichael Neumann 	0x0000a214, 0x00000100, 0xffffffff,
44457e252bfSMichael Neumann 	0x000004d8, 0x00000100, 0xffffffff,
44557e252bfSMichael Neumann 	0x00009664, 0x00000100, 0xffffffff,
44657e252bfSMichael Neumann 	0x00009698, 0x00000100, 0xffffffff,
44757e252bfSMichael Neumann 	0x000004d4, 0x00000200, 0xffffffff,
44857e252bfSMichael Neumann 	0x000004d0, 0x00000000, 0xffffffff,
44957e252bfSMichael Neumann 	0x000030cc, 0x00000104, 0xffffffff,
45057e252bfSMichael Neumann 	0x0000d0c0, 0x00000100, 0xffffffff,
45157e252bfSMichael Neumann 	0x0000d8c0, 0x00000100, 0xffffffff,
45257e252bfSMichael Neumann 	0x0000802c, 0x40000000, 0xffffffff,
45357e252bfSMichael Neumann 	0x00003fc4, 0x40000000, 0xffffffff,
45457e252bfSMichael Neumann 	0x0000915c, 0x00010000, 0xffffffff,
45557e252bfSMichael Neumann 	0x00009160, 0x00030002, 0xffffffff,
45657e252bfSMichael Neumann 	0x00009164, 0x00050004, 0xffffffff,
45757e252bfSMichael Neumann 	0x00009168, 0x00070006, 0xffffffff,
45857e252bfSMichael Neumann 	0x00009178, 0x00070000, 0xffffffff,
45957e252bfSMichael Neumann 	0x0000917c, 0x00030002, 0xffffffff,
46057e252bfSMichael Neumann 	0x00009180, 0x00050004, 0xffffffff,
46157e252bfSMichael Neumann 	0x0000918c, 0x00010006, 0xffffffff,
46257e252bfSMichael Neumann 	0x00009190, 0x00090008, 0xffffffff,
46357e252bfSMichael Neumann 	0x00009194, 0x00070000, 0xffffffff,
46457e252bfSMichael Neumann 	0x00009198, 0x00030002, 0xffffffff,
46557e252bfSMichael Neumann 	0x0000919c, 0x00050004, 0xffffffff,
46657e252bfSMichael Neumann 	0x000091a8, 0x00010006, 0xffffffff,
46757e252bfSMichael Neumann 	0x000091ac, 0x00090008, 0xffffffff,
46857e252bfSMichael Neumann 	0x000091b0, 0x00070000, 0xffffffff,
46957e252bfSMichael Neumann 	0x000091b4, 0x00030002, 0xffffffff,
47057e252bfSMichael Neumann 	0x000091b8, 0x00050004, 0xffffffff,
47157e252bfSMichael Neumann 	0x000091c4, 0x00010006, 0xffffffff,
47257e252bfSMichael Neumann 	0x000091c8, 0x00090008, 0xffffffff,
47357e252bfSMichael Neumann 	0x000091cc, 0x00070000, 0xffffffff,
47457e252bfSMichael Neumann 	0x000091d0, 0x00030002, 0xffffffff,
47557e252bfSMichael Neumann 	0x000091d4, 0x00050004, 0xffffffff,
47657e252bfSMichael Neumann 	0x000091e0, 0x00010006, 0xffffffff,
47757e252bfSMichael Neumann 	0x000091e4, 0x00090008, 0xffffffff,
47857e252bfSMichael Neumann 	0x000091e8, 0x00000000, 0xffffffff,
47957e252bfSMichael Neumann 	0x000091ec, 0x00070000, 0xffffffff,
48057e252bfSMichael Neumann 	0x000091f0, 0x00030002, 0xffffffff,
48157e252bfSMichael Neumann 	0x000091f4, 0x00050004, 0xffffffff,
48257e252bfSMichael Neumann 	0x00009200, 0x00010006, 0xffffffff,
48357e252bfSMichael Neumann 	0x00009204, 0x00090008, 0xffffffff,
48457e252bfSMichael Neumann 	0x00009208, 0x00070000, 0xffffffff,
48557e252bfSMichael Neumann 	0x0000920c, 0x00030002, 0xffffffff,
48657e252bfSMichael Neumann 	0x00009210, 0x00050004, 0xffffffff,
48757e252bfSMichael Neumann 	0x0000921c, 0x00010006, 0xffffffff,
48857e252bfSMichael Neumann 	0x00009220, 0x00090008, 0xffffffff,
48957e252bfSMichael Neumann 	0x00009224, 0x00070000, 0xffffffff,
49057e252bfSMichael Neumann 	0x00009228, 0x00030002, 0xffffffff,
49157e252bfSMichael Neumann 	0x0000922c, 0x00050004, 0xffffffff,
49257e252bfSMichael Neumann 	0x00009238, 0x00010006, 0xffffffff,
49357e252bfSMichael Neumann 	0x0000923c, 0x00090008, 0xffffffff,
49457e252bfSMichael Neumann 	0x00009240, 0x00070000, 0xffffffff,
49557e252bfSMichael Neumann 	0x00009244, 0x00030002, 0xffffffff,
49657e252bfSMichael Neumann 	0x00009248, 0x00050004, 0xffffffff,
49757e252bfSMichael Neumann 	0x00009254, 0x00010006, 0xffffffff,
49857e252bfSMichael Neumann 	0x00009258, 0x00090008, 0xffffffff,
49957e252bfSMichael Neumann 	0x0000925c, 0x00070000, 0xffffffff,
50057e252bfSMichael Neumann 	0x00009260, 0x00030002, 0xffffffff,
50157e252bfSMichael Neumann 	0x00009264, 0x00050004, 0xffffffff,
50257e252bfSMichael Neumann 	0x00009270, 0x00010006, 0xffffffff,
50357e252bfSMichael Neumann 	0x00009274, 0x00090008, 0xffffffff,
50457e252bfSMichael Neumann 	0x00009278, 0x00070000, 0xffffffff,
50557e252bfSMichael Neumann 	0x0000927c, 0x00030002, 0xffffffff,
50657e252bfSMichael Neumann 	0x00009280, 0x00050004, 0xffffffff,
50757e252bfSMichael Neumann 	0x0000928c, 0x00010006, 0xffffffff,
50857e252bfSMichael Neumann 	0x00009290, 0x00090008, 0xffffffff,
50957e252bfSMichael Neumann 	0x000092a8, 0x00070000, 0xffffffff,
51057e252bfSMichael Neumann 	0x000092ac, 0x00030002, 0xffffffff,
51157e252bfSMichael Neumann 	0x000092b0, 0x00050004, 0xffffffff,
51257e252bfSMichael Neumann 	0x000092bc, 0x00010006, 0xffffffff,
51357e252bfSMichael Neumann 	0x000092c0, 0x00090008, 0xffffffff,
51457e252bfSMichael Neumann 	0x000092c4, 0x00070000, 0xffffffff,
51557e252bfSMichael Neumann 	0x000092c8, 0x00030002, 0xffffffff,
51657e252bfSMichael Neumann 	0x000092cc, 0x00050004, 0xffffffff,
51757e252bfSMichael Neumann 	0x000092d8, 0x00010006, 0xffffffff,
51857e252bfSMichael Neumann 	0x000092dc, 0x00090008, 0xffffffff,
51957e252bfSMichael Neumann 	0x00009294, 0x00000000, 0xffffffff,
52057e252bfSMichael Neumann 	0x0000802c, 0x40010000, 0xffffffff,
52157e252bfSMichael Neumann 	0x00003fc4, 0x40010000, 0xffffffff,
52257e252bfSMichael Neumann 	0x0000915c, 0x00010000, 0xffffffff,
52357e252bfSMichael Neumann 	0x00009160, 0x00030002, 0xffffffff,
52457e252bfSMichael Neumann 	0x00009164, 0x00050004, 0xffffffff,
52557e252bfSMichael Neumann 	0x00009168, 0x00070006, 0xffffffff,
52657e252bfSMichael Neumann 	0x00009178, 0x00070000, 0xffffffff,
52757e252bfSMichael Neumann 	0x0000917c, 0x00030002, 0xffffffff,
52857e252bfSMichael Neumann 	0x00009180, 0x00050004, 0xffffffff,
52957e252bfSMichael Neumann 	0x0000918c, 0x00010006, 0xffffffff,
53057e252bfSMichael Neumann 	0x00009190, 0x00090008, 0xffffffff,
53157e252bfSMichael Neumann 	0x00009194, 0x00070000, 0xffffffff,
53257e252bfSMichael Neumann 	0x00009198, 0x00030002, 0xffffffff,
53357e252bfSMichael Neumann 	0x0000919c, 0x00050004, 0xffffffff,
53457e252bfSMichael Neumann 	0x000091a8, 0x00010006, 0xffffffff,
53557e252bfSMichael Neumann 	0x000091ac, 0x00090008, 0xffffffff,
53657e252bfSMichael Neumann 	0x000091b0, 0x00070000, 0xffffffff,
53757e252bfSMichael Neumann 	0x000091b4, 0x00030002, 0xffffffff,
53857e252bfSMichael Neumann 	0x000091b8, 0x00050004, 0xffffffff,
53957e252bfSMichael Neumann 	0x000091c4, 0x00010006, 0xffffffff,
54057e252bfSMichael Neumann 	0x000091c8, 0x00090008, 0xffffffff,
54157e252bfSMichael Neumann 	0x000091cc, 0x00070000, 0xffffffff,
54257e252bfSMichael Neumann 	0x000091d0, 0x00030002, 0xffffffff,
54357e252bfSMichael Neumann 	0x000091d4, 0x00050004, 0xffffffff,
54457e252bfSMichael Neumann 	0x000091e0, 0x00010006, 0xffffffff,
54557e252bfSMichael Neumann 	0x000091e4, 0x00090008, 0xffffffff,
54657e252bfSMichael Neumann 	0x000091e8, 0x00000000, 0xffffffff,
54757e252bfSMichael Neumann 	0x000091ec, 0x00070000, 0xffffffff,
54857e252bfSMichael Neumann 	0x000091f0, 0x00030002, 0xffffffff,
54957e252bfSMichael Neumann 	0x000091f4, 0x00050004, 0xffffffff,
55057e252bfSMichael Neumann 	0x00009200, 0x00010006, 0xffffffff,
55157e252bfSMichael Neumann 	0x00009204, 0x00090008, 0xffffffff,
55257e252bfSMichael Neumann 	0x00009208, 0x00070000, 0xffffffff,
55357e252bfSMichael Neumann 	0x0000920c, 0x00030002, 0xffffffff,
55457e252bfSMichael Neumann 	0x00009210, 0x00050004, 0xffffffff,
55557e252bfSMichael Neumann 	0x0000921c, 0x00010006, 0xffffffff,
55657e252bfSMichael Neumann 	0x00009220, 0x00090008, 0xffffffff,
55757e252bfSMichael Neumann 	0x00009224, 0x00070000, 0xffffffff,
55857e252bfSMichael Neumann 	0x00009228, 0x00030002, 0xffffffff,
55957e252bfSMichael Neumann 	0x0000922c, 0x00050004, 0xffffffff,
56057e252bfSMichael Neumann 	0x00009238, 0x00010006, 0xffffffff,
56157e252bfSMichael Neumann 	0x0000923c, 0x00090008, 0xffffffff,
56257e252bfSMichael Neumann 	0x00009240, 0x00070000, 0xffffffff,
56357e252bfSMichael Neumann 	0x00009244, 0x00030002, 0xffffffff,
56457e252bfSMichael Neumann 	0x00009248, 0x00050004, 0xffffffff,
56557e252bfSMichael Neumann 	0x00009254, 0x00010006, 0xffffffff,
56657e252bfSMichael Neumann 	0x00009258, 0x00090008, 0xffffffff,
56757e252bfSMichael Neumann 	0x0000925c, 0x00070000, 0xffffffff,
56857e252bfSMichael Neumann 	0x00009260, 0x00030002, 0xffffffff,
56957e252bfSMichael Neumann 	0x00009264, 0x00050004, 0xffffffff,
57057e252bfSMichael Neumann 	0x00009270, 0x00010006, 0xffffffff,
57157e252bfSMichael Neumann 	0x00009274, 0x00090008, 0xffffffff,
57257e252bfSMichael Neumann 	0x00009278, 0x00070000, 0xffffffff,
57357e252bfSMichael Neumann 	0x0000927c, 0x00030002, 0xffffffff,
57457e252bfSMichael Neumann 	0x00009280, 0x00050004, 0xffffffff,
57557e252bfSMichael Neumann 	0x0000928c, 0x00010006, 0xffffffff,
57657e252bfSMichael Neumann 	0x00009290, 0x00090008, 0xffffffff,
57757e252bfSMichael Neumann 	0x000092a8, 0x00070000, 0xffffffff,
57857e252bfSMichael Neumann 	0x000092ac, 0x00030002, 0xffffffff,
57957e252bfSMichael Neumann 	0x000092b0, 0x00050004, 0xffffffff,
58057e252bfSMichael Neumann 	0x000092bc, 0x00010006, 0xffffffff,
58157e252bfSMichael Neumann 	0x000092c0, 0x00090008, 0xffffffff,
58257e252bfSMichael Neumann 	0x000092c4, 0x00070000, 0xffffffff,
58357e252bfSMichael Neumann 	0x000092c8, 0x00030002, 0xffffffff,
58457e252bfSMichael Neumann 	0x000092cc, 0x00050004, 0xffffffff,
58557e252bfSMichael Neumann 	0x000092d8, 0x00010006, 0xffffffff,
58657e252bfSMichael Neumann 	0x000092dc, 0x00090008, 0xffffffff,
58757e252bfSMichael Neumann 	0x00009294, 0x00000000, 0xffffffff,
58857e252bfSMichael Neumann 	0x0000802c, 0xc0000000, 0xffffffff,
58957e252bfSMichael Neumann 	0x00003fc4, 0xc0000000, 0xffffffff,
59057e252bfSMichael Neumann 	0x000008f8, 0x00000010, 0xffffffff,
59157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
59257e252bfSMichael Neumann 	0x000008f8, 0x00000011, 0xffffffff,
59357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
59457e252bfSMichael Neumann 	0x000008f8, 0x00000012, 0xffffffff,
59557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
59657e252bfSMichael Neumann 	0x000008f8, 0x00000013, 0xffffffff,
59757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
59857e252bfSMichael Neumann 	0x000008f8, 0x00000014, 0xffffffff,
59957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60057e252bfSMichael Neumann 	0x000008f8, 0x00000015, 0xffffffff,
60157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60257e252bfSMichael Neumann 	0x000008f8, 0x00000016, 0xffffffff,
60357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60457e252bfSMichael Neumann 	0x000008f8, 0x00000017, 0xffffffff,
60557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60657e252bfSMichael Neumann 	0x000008f8, 0x00000018, 0xffffffff,
60757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60857e252bfSMichael Neumann 	0x000008f8, 0x00000019, 0xffffffff,
60957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
61057e252bfSMichael Neumann 	0x000008f8, 0x0000001a, 0xffffffff,
61157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
61257e252bfSMichael Neumann 	0x000008f8, 0x0000001b, 0xffffffff,
61357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff
61457e252bfSMichael Neumann };
61557e252bfSMichael Neumann #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
61657e252bfSMichael Neumann 
61757e252bfSMichael Neumann static const u32 cayman_mgcg_disable[] =
61857e252bfSMichael Neumann {
61957e252bfSMichael Neumann 	0x0000802c, 0xc0000000, 0xffffffff,
62057e252bfSMichael Neumann 	0x000008f8, 0x00000000, 0xffffffff,
62157e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
62257e252bfSMichael Neumann 	0x000008f8, 0x00000001, 0xffffffff,
62357e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
62457e252bfSMichael Neumann 	0x000008f8, 0x00000002, 0xffffffff,
62557e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
62657e252bfSMichael Neumann 	0x000008f8, 0x00000003, 0xffffffff,
62757e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
62857e252bfSMichael Neumann 	0x00009150, 0x00600000, 0xffffffff
62957e252bfSMichael Neumann };
63057e252bfSMichael Neumann #define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
63157e252bfSMichael Neumann 
63257e252bfSMichael Neumann static const u32 cayman_mgcg_enable[] =
63357e252bfSMichael Neumann {
63457e252bfSMichael Neumann 	0x0000802c, 0xc0000000, 0xffffffff,
63557e252bfSMichael Neumann 	0x000008f8, 0x00000000, 0xffffffff,
63657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
63757e252bfSMichael Neumann 	0x000008f8, 0x00000001, 0xffffffff,
63857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
63957e252bfSMichael Neumann 	0x000008f8, 0x00000002, 0xffffffff,
64057e252bfSMichael Neumann 	0x000008fc, 0x00600000, 0xffffffff,
64157e252bfSMichael Neumann 	0x000008f8, 0x00000003, 0xffffffff,
64257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
64357e252bfSMichael Neumann 	0x00009150, 0x96944200, 0xffffffff
64457e252bfSMichael Neumann };
64557e252bfSMichael Neumann 
64657e252bfSMichael Neumann #define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
64757e252bfSMichael Neumann 
64857e252bfSMichael Neumann #define NISLANDS_SYSLS_SEQUENCE  100
64957e252bfSMichael Neumann 
65057e252bfSMichael Neumann static const u32 cayman_sysls_default[] =
65157e252bfSMichael Neumann {
65257e252bfSMichael Neumann 	/* Register,   Value,     Mask bits */
65357e252bfSMichael Neumann 	0x000055e8, 0x00000000, 0xffffffff,
65457e252bfSMichael Neumann 	0x0000d0bc, 0x00000000, 0xffffffff,
65557e252bfSMichael Neumann 	0x0000d8bc, 0x00000000, 0xffffffff,
65657e252bfSMichael Neumann 	0x000015c0, 0x000c1401, 0xffffffff,
65757e252bfSMichael Neumann 	0x0000264c, 0x000c0400, 0xffffffff,
65857e252bfSMichael Neumann 	0x00002648, 0x000c0400, 0xffffffff,
65957e252bfSMichael Neumann 	0x00002650, 0x000c0400, 0xffffffff,
66057e252bfSMichael Neumann 	0x000020b8, 0x000c0400, 0xffffffff,
66157e252bfSMichael Neumann 	0x000020bc, 0x000c0400, 0xffffffff,
66257e252bfSMichael Neumann 	0x000020c0, 0x000c0c80, 0xffffffff,
66357e252bfSMichael Neumann 	0x0000f4a0, 0x000000c0, 0xffffffff,
66457e252bfSMichael Neumann 	0x0000f4a4, 0x00680fff, 0xffffffff,
66557e252bfSMichael Neumann 	0x00002f50, 0x00000404, 0xffffffff,
66657e252bfSMichael Neumann 	0x000004c8, 0x00000001, 0xffffffff,
66757e252bfSMichael Neumann 	0x000064ec, 0x00000000, 0xffffffff,
66857e252bfSMichael Neumann 	0x00000c7c, 0x00000000, 0xffffffff,
66957e252bfSMichael Neumann 	0x00008dfc, 0x00000000, 0xffffffff
67057e252bfSMichael Neumann };
67157e252bfSMichael Neumann #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
67257e252bfSMichael Neumann 
67357e252bfSMichael Neumann static const u32 cayman_sysls_disable[] =
67457e252bfSMichael Neumann {
67557e252bfSMichael Neumann 	/* Register,   Value,     Mask bits */
67657e252bfSMichael Neumann 	0x0000d0c0, 0x00000000, 0xffffffff,
67757e252bfSMichael Neumann 	0x0000d8c0, 0x00000000, 0xffffffff,
67857e252bfSMichael Neumann 	0x000055e8, 0x00000000, 0xffffffff,
67957e252bfSMichael Neumann 	0x0000d0bc, 0x00000000, 0xffffffff,
68057e252bfSMichael Neumann 	0x0000d8bc, 0x00000000, 0xffffffff,
68157e252bfSMichael Neumann 	0x000015c0, 0x00041401, 0xffffffff,
68257e252bfSMichael Neumann 	0x0000264c, 0x00040400, 0xffffffff,
68357e252bfSMichael Neumann 	0x00002648, 0x00040400, 0xffffffff,
68457e252bfSMichael Neumann 	0x00002650, 0x00040400, 0xffffffff,
68557e252bfSMichael Neumann 	0x000020b8, 0x00040400, 0xffffffff,
68657e252bfSMichael Neumann 	0x000020bc, 0x00040400, 0xffffffff,
68757e252bfSMichael Neumann 	0x000020c0, 0x00040c80, 0xffffffff,
68857e252bfSMichael Neumann 	0x0000f4a0, 0x000000c0, 0xffffffff,
68957e252bfSMichael Neumann 	0x0000f4a4, 0x00680000, 0xffffffff,
69057e252bfSMichael Neumann 	0x00002f50, 0x00000404, 0xffffffff,
69157e252bfSMichael Neumann 	0x000004c8, 0x00000001, 0xffffffff,
69257e252bfSMichael Neumann 	0x000064ec, 0x00007ffd, 0xffffffff,
69357e252bfSMichael Neumann 	0x00000c7c, 0x0000ff00, 0xffffffff,
69457e252bfSMichael Neumann 	0x00008dfc, 0x0000007f, 0xffffffff
69557e252bfSMichael Neumann };
69657e252bfSMichael Neumann #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
69757e252bfSMichael Neumann 
69857e252bfSMichael Neumann static const u32 cayman_sysls_enable[] =
69957e252bfSMichael Neumann {
70057e252bfSMichael Neumann 	/* Register,   Value,     Mask bits */
70157e252bfSMichael Neumann 	0x000055e8, 0x00000001, 0xffffffff,
70257e252bfSMichael Neumann 	0x0000d0bc, 0x00000100, 0xffffffff,
70357e252bfSMichael Neumann 	0x0000d8bc, 0x00000100, 0xffffffff,
70457e252bfSMichael Neumann 	0x000015c0, 0x000c1401, 0xffffffff,
70557e252bfSMichael Neumann 	0x0000264c, 0x000c0400, 0xffffffff,
70657e252bfSMichael Neumann 	0x00002648, 0x000c0400, 0xffffffff,
70757e252bfSMichael Neumann 	0x00002650, 0x000c0400, 0xffffffff,
70857e252bfSMichael Neumann 	0x000020b8, 0x000c0400, 0xffffffff,
70957e252bfSMichael Neumann 	0x000020bc, 0x000c0400, 0xffffffff,
71057e252bfSMichael Neumann 	0x000020c0, 0x000c0c80, 0xffffffff,
71157e252bfSMichael Neumann 	0x0000f4a0, 0x000000c0, 0xffffffff,
71257e252bfSMichael Neumann 	0x0000f4a4, 0x00680fff, 0xffffffff,
71357e252bfSMichael Neumann 	0x00002f50, 0x00000903, 0xffffffff,
71457e252bfSMichael Neumann 	0x000004c8, 0x00000000, 0xffffffff,
71557e252bfSMichael Neumann 	0x000064ec, 0x00000000, 0xffffffff,
71657e252bfSMichael Neumann 	0x00000c7c, 0x00000000, 0xffffffff,
71757e252bfSMichael Neumann 	0x00008dfc, 0x00000000, 0xffffffff
71857e252bfSMichael Neumann };
71957e252bfSMichael Neumann #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
72057e252bfSMichael Neumann 
72157e252bfSMichael Neumann struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
72257e252bfSMichael Neumann struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
72357e252bfSMichael Neumann struct ni_power_info *ni_get_pi(struct radeon_device *rdev);
72457e252bfSMichael Neumann struct ni_ps *ni_get_ps(struct radeon_ps *rps);
72557e252bfSMichael Neumann void ni_dpm_reset_asic(struct radeon_device *rdev);
72657e252bfSMichael Neumann 
72757e252bfSMichael Neumann struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
72857e252bfSMichael Neumann {
72957e252bfSMichael Neumann         struct ni_power_info *pi = rdev->pm.dpm.priv;
73057e252bfSMichael Neumann 
73157e252bfSMichael Neumann         return pi;
73257e252bfSMichael Neumann }
73357e252bfSMichael Neumann 
73457e252bfSMichael Neumann struct ni_ps *ni_get_ps(struct radeon_ps *rps)
73557e252bfSMichael Neumann {
73657e252bfSMichael Neumann 	struct ni_ps *ps = rps->ps_priv;
73757e252bfSMichael Neumann 
73857e252bfSMichael Neumann 	return ps;
73957e252bfSMichael Neumann }
74057e252bfSMichael Neumann 
74157e252bfSMichael Neumann static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
74257e252bfSMichael Neumann 						     u16 v, s32 t,
74357e252bfSMichael Neumann 						     u32 ileakage,
74457e252bfSMichael Neumann 						     u32 *leakage)
74557e252bfSMichael Neumann {
74657e252bfSMichael Neumann 	s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
74757e252bfSMichael Neumann 
74857e252bfSMichael Neumann 	i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
74957e252bfSMichael Neumann 	vddc = div64_s64(drm_int2fixp(v), 1000);
75057e252bfSMichael Neumann 	temperature = div64_s64(drm_int2fixp(t), 1000);
75157e252bfSMichael Neumann 
75257e252bfSMichael Neumann 	kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
75357e252bfSMichael Neumann 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
75457e252bfSMichael Neumann 	kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
75557e252bfSMichael Neumann 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
75657e252bfSMichael Neumann 
75757e252bfSMichael Neumann 	leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
75857e252bfSMichael Neumann 
75957e252bfSMichael Neumann 	*leakage = drm_fixp2int(leakage_w * 1000);
76057e252bfSMichael Neumann }
76157e252bfSMichael Neumann 
76257e252bfSMichael Neumann static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
76357e252bfSMichael Neumann 					     const struct ni_leakage_coeffients *coeff,
76457e252bfSMichael Neumann 					     u16 v,
76557e252bfSMichael Neumann 					     s32 t,
76657e252bfSMichael Neumann 					     u32 i_leakage,
76757e252bfSMichael Neumann 					     u32 *leakage)
76857e252bfSMichael Neumann {
76957e252bfSMichael Neumann 	ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
77057e252bfSMichael Neumann }
77157e252bfSMichael Neumann 
77257e252bfSMichael Neumann bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
77357e252bfSMichael Neumann {
77457e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
77557e252bfSMichael Neumann 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
7764cd92098Szrj 	/* we never hit the non-gddr5 limit so disable it */
7774cd92098Szrj 	u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
77857e252bfSMichael Neumann 
77957e252bfSMichael Neumann 	if (vblank_time < switch_limit)
78057e252bfSMichael Neumann 		return true;
78157e252bfSMichael Neumann 	else
78257e252bfSMichael Neumann 		return false;
78357e252bfSMichael Neumann 
78457e252bfSMichael Neumann }
78557e252bfSMichael Neumann 
78657e252bfSMichael Neumann static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
78757e252bfSMichael Neumann 					struct radeon_ps *rps)
78857e252bfSMichael Neumann {
78957e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(rps);
79057e252bfSMichael Neumann 	struct radeon_clock_and_voltage_limits *max_limits;
79157e252bfSMichael Neumann 	bool disable_mclk_switching;
792*c6f73aabSFrançois Tigeot 	u32 mclk;
793*c6f73aabSFrançois Tigeot 	u16 vddci;
7944cd92098Szrj 	u32 max_sclk_vddc, max_mclk_vddci, max_mclk_vddc;
79557e252bfSMichael Neumann 	int i;
79657e252bfSMichael Neumann 
79757e252bfSMichael Neumann 	if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
79857e252bfSMichael Neumann 	    ni_dpm_vblank_too_short(rdev))
79957e252bfSMichael Neumann 		disable_mclk_switching = true;
80057e252bfSMichael Neumann 	else
80157e252bfSMichael Neumann 		disable_mclk_switching = false;
80257e252bfSMichael Neumann 
80357e252bfSMichael Neumann 	if (rdev->pm.dpm.ac_power)
80457e252bfSMichael Neumann 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
80557e252bfSMichael Neumann 	else
80657e252bfSMichael Neumann 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
80757e252bfSMichael Neumann 
80857e252bfSMichael Neumann 	if (rdev->pm.dpm.ac_power == false) {
80957e252bfSMichael Neumann 		for (i = 0; i < ps->performance_level_count; i++) {
81057e252bfSMichael Neumann 			if (ps->performance_levels[i].mclk > max_limits->mclk)
81157e252bfSMichael Neumann 				ps->performance_levels[i].mclk = max_limits->mclk;
81257e252bfSMichael Neumann 			if (ps->performance_levels[i].sclk > max_limits->sclk)
81357e252bfSMichael Neumann 				ps->performance_levels[i].sclk = max_limits->sclk;
81457e252bfSMichael Neumann 			if (ps->performance_levels[i].vddc > max_limits->vddc)
81557e252bfSMichael Neumann 				ps->performance_levels[i].vddc = max_limits->vddc;
81657e252bfSMichael Neumann 			if (ps->performance_levels[i].vddci > max_limits->vddci)
81757e252bfSMichael Neumann 				ps->performance_levels[i].vddci = max_limits->vddci;
81857e252bfSMichael Neumann 		}
81957e252bfSMichael Neumann 	}
82057e252bfSMichael Neumann 
8214cd92098Szrj 	/* limit clocks to max supported clocks based on voltage dependency tables */
8224cd92098Szrj 	btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
8234cd92098Szrj 							&max_sclk_vddc);
8244cd92098Szrj 	btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
8254cd92098Szrj 							&max_mclk_vddci);
8264cd92098Szrj 	btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
8274cd92098Szrj 							&max_mclk_vddc);
8284cd92098Szrj 
8294cd92098Szrj 	for (i = 0; i < ps->performance_level_count; i++) {
8304cd92098Szrj 		if (max_sclk_vddc) {
8314cd92098Szrj 			if (ps->performance_levels[i].sclk > max_sclk_vddc)
8324cd92098Szrj 				ps->performance_levels[i].sclk = max_sclk_vddc;
8334cd92098Szrj 		}
8344cd92098Szrj 		if (max_mclk_vddci) {
8354cd92098Szrj 			if (ps->performance_levels[i].mclk > max_mclk_vddci)
8364cd92098Szrj 				ps->performance_levels[i].mclk = max_mclk_vddci;
8374cd92098Szrj 		}
8384cd92098Szrj 		if (max_mclk_vddc) {
8394cd92098Szrj 			if (ps->performance_levels[i].mclk > max_mclk_vddc)
8404cd92098Szrj 				ps->performance_levels[i].mclk = max_mclk_vddc;
8414cd92098Szrj 		}
8424cd92098Szrj 	}
8434cd92098Szrj 
84457e252bfSMichael Neumann 	/* XXX validate the min clocks required for display */
84557e252bfSMichael Neumann 
846*c6f73aabSFrançois Tigeot 	/* adjust low state */
84757e252bfSMichael Neumann 	if (disable_mclk_switching) {
848*c6f73aabSFrançois Tigeot 		ps->performance_levels[0].mclk =
849*c6f73aabSFrançois Tigeot 			ps->performance_levels[ps->performance_level_count - 1].mclk;
850*c6f73aabSFrançois Tigeot 		ps->performance_levels[0].vddci =
851*c6f73aabSFrançois Tigeot 			ps->performance_levels[ps->performance_level_count - 1].vddci;
85257e252bfSMichael Neumann 	}
85357e252bfSMichael Neumann 
85457e252bfSMichael Neumann 	btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
85557e252bfSMichael Neumann 				  &ps->performance_levels[0].sclk,
85657e252bfSMichael Neumann 				  &ps->performance_levels[0].mclk);
85757e252bfSMichael Neumann 
85857e252bfSMichael Neumann 	for (i = 1; i < ps->performance_level_count; i++) {
85957e252bfSMichael Neumann 		if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
86057e252bfSMichael Neumann 			ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
86157e252bfSMichael Neumann 		if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
86257e252bfSMichael Neumann 			ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
86357e252bfSMichael Neumann 	}
86457e252bfSMichael Neumann 
865*c6f73aabSFrançois Tigeot 	/* adjust remaining states */
86657e252bfSMichael Neumann 	if (disable_mclk_switching) {
86757e252bfSMichael Neumann 		mclk = ps->performance_levels[0].mclk;
868*c6f73aabSFrançois Tigeot 		vddci = ps->performance_levels[0].vddci;
86957e252bfSMichael Neumann 		for (i = 1; i < ps->performance_level_count; i++) {
87057e252bfSMichael Neumann 			if (mclk < ps->performance_levels[i].mclk)
87157e252bfSMichael Neumann 				mclk = ps->performance_levels[i].mclk;
872*c6f73aabSFrançois Tigeot 			if (vddci < ps->performance_levels[i].vddci)
873*c6f73aabSFrançois Tigeot 				vddci = ps->performance_levels[i].vddci;
87457e252bfSMichael Neumann 		}
87557e252bfSMichael Neumann 		for (i = 0; i < ps->performance_level_count; i++) {
87657e252bfSMichael Neumann 			ps->performance_levels[i].mclk = mclk;
87757e252bfSMichael Neumann 			ps->performance_levels[i].vddci = vddci;
87857e252bfSMichael Neumann 		}
87957e252bfSMichael Neumann 	} else {
88057e252bfSMichael Neumann 		for (i = 1; i < ps->performance_level_count; i++) {
88157e252bfSMichael Neumann 			if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
88257e252bfSMichael Neumann 				ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
88357e252bfSMichael Neumann 			if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
88457e252bfSMichael Neumann 				ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
88557e252bfSMichael Neumann 		}
88657e252bfSMichael Neumann 	}
88757e252bfSMichael Neumann 
88857e252bfSMichael Neumann 	for (i = 1; i < ps->performance_level_count; i++)
88957e252bfSMichael Neumann 		btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
89057e252bfSMichael Neumann 					  &ps->performance_levels[i].sclk,
89157e252bfSMichael Neumann 					  &ps->performance_levels[i].mclk);
89257e252bfSMichael Neumann 
89357e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++)
89457e252bfSMichael Neumann 		btc_adjust_clock_combinations(rdev, max_limits,
89557e252bfSMichael Neumann 					      &ps->performance_levels[i]);
89657e252bfSMichael Neumann 
89757e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++) {
89857e252bfSMichael Neumann 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
89957e252bfSMichael Neumann 						   ps->performance_levels[i].sclk,
90057e252bfSMichael Neumann 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
90157e252bfSMichael Neumann 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
90257e252bfSMichael Neumann 						   ps->performance_levels[i].mclk,
90357e252bfSMichael Neumann 						   max_limits->vddci, &ps->performance_levels[i].vddci);
90457e252bfSMichael Neumann 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
90557e252bfSMichael Neumann 						   ps->performance_levels[i].mclk,
90657e252bfSMichael Neumann 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
90757e252bfSMichael Neumann 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
90857e252bfSMichael Neumann 						   rdev->clock.current_dispclk,
90957e252bfSMichael Neumann 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
91057e252bfSMichael Neumann 	}
91157e252bfSMichael Neumann 
91257e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++) {
91357e252bfSMichael Neumann 		btc_apply_voltage_delta_rules(rdev,
91457e252bfSMichael Neumann 					      max_limits->vddc, max_limits->vddci,
91557e252bfSMichael Neumann 					      &ps->performance_levels[i].vddc,
91657e252bfSMichael Neumann 					      &ps->performance_levels[i].vddci);
91757e252bfSMichael Neumann 	}
91857e252bfSMichael Neumann 
91957e252bfSMichael Neumann 	ps->dc_compatible = true;
92057e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++) {
92157e252bfSMichael Neumann 		if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
92257e252bfSMichael Neumann 			ps->dc_compatible = false;
92357e252bfSMichael Neumann 
92457e252bfSMichael Neumann 		if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
92557e252bfSMichael Neumann 			ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
92657e252bfSMichael Neumann 	}
92757e252bfSMichael Neumann }
92857e252bfSMichael Neumann 
92957e252bfSMichael Neumann static void ni_cg_clockgating_default(struct radeon_device *rdev)
93057e252bfSMichael Neumann {
93157e252bfSMichael Neumann 	u32 count;
93257e252bfSMichael Neumann 	const u32 *ps = NULL;
93357e252bfSMichael Neumann 
93457e252bfSMichael Neumann 	ps = (const u32 *)&cayman_cgcg_cgls_default;
93557e252bfSMichael Neumann 	count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
93657e252bfSMichael Neumann 
93757e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
93857e252bfSMichael Neumann }
93957e252bfSMichael Neumann 
94057e252bfSMichael Neumann static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
94157e252bfSMichael Neumann 				      bool enable)
94257e252bfSMichael Neumann {
94357e252bfSMichael Neumann 	u32 count;
94457e252bfSMichael Neumann 	const u32 *ps = NULL;
94557e252bfSMichael Neumann 
94657e252bfSMichael Neumann 	if (enable) {
94757e252bfSMichael Neumann 		ps = (const u32 *)&cayman_cgcg_cgls_enable;
94857e252bfSMichael Neumann 		count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
94957e252bfSMichael Neumann 	} else {
95057e252bfSMichael Neumann 		ps = (const u32 *)&cayman_cgcg_cgls_disable;
95157e252bfSMichael Neumann 		count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
95257e252bfSMichael Neumann 	}
95357e252bfSMichael Neumann 
95457e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
95557e252bfSMichael Neumann }
95657e252bfSMichael Neumann 
95757e252bfSMichael Neumann static void ni_mg_clockgating_default(struct radeon_device *rdev)
95857e252bfSMichael Neumann {
95957e252bfSMichael Neumann 	u32 count;
96057e252bfSMichael Neumann 	const u32 *ps = NULL;
96157e252bfSMichael Neumann 
96257e252bfSMichael Neumann 	ps = (const u32 *)&cayman_mgcg_default;
96357e252bfSMichael Neumann 	count = CAYMAN_MGCG_DEFAULT_LENGTH;
96457e252bfSMichael Neumann 
96557e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
96657e252bfSMichael Neumann }
96757e252bfSMichael Neumann 
96857e252bfSMichael Neumann static void ni_mg_clockgating_enable(struct radeon_device *rdev,
96957e252bfSMichael Neumann 				     bool enable)
97057e252bfSMichael Neumann {
97157e252bfSMichael Neumann 	u32 count;
97257e252bfSMichael Neumann 	const u32 *ps = NULL;
97357e252bfSMichael Neumann 
97457e252bfSMichael Neumann 	if (enable) {
97557e252bfSMichael Neumann 		ps = (const u32 *)&cayman_mgcg_enable;
97657e252bfSMichael Neumann 		count = CAYMAN_MGCG_ENABLE_LENGTH;
97757e252bfSMichael Neumann 	} else {
97857e252bfSMichael Neumann 		ps = (const u32 *)&cayman_mgcg_disable;
97957e252bfSMichael Neumann 		count = CAYMAN_MGCG_DISABLE_LENGTH;
98057e252bfSMichael Neumann 	}
98157e252bfSMichael Neumann 
98257e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
98357e252bfSMichael Neumann }
98457e252bfSMichael Neumann 
98557e252bfSMichael Neumann static void ni_ls_clockgating_default(struct radeon_device *rdev)
98657e252bfSMichael Neumann {
98757e252bfSMichael Neumann 	u32 count;
98857e252bfSMichael Neumann 	const u32 *ps = NULL;
98957e252bfSMichael Neumann 
99057e252bfSMichael Neumann 	ps = (const u32 *)&cayman_sysls_default;
99157e252bfSMichael Neumann 	count = CAYMAN_SYSLS_DEFAULT_LENGTH;
99257e252bfSMichael Neumann 
99357e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
99457e252bfSMichael Neumann }
99557e252bfSMichael Neumann 
99657e252bfSMichael Neumann static void ni_ls_clockgating_enable(struct radeon_device *rdev,
99757e252bfSMichael Neumann 				     bool enable)
99857e252bfSMichael Neumann {
99957e252bfSMichael Neumann 	u32 count;
100057e252bfSMichael Neumann 	const u32 *ps = NULL;
100157e252bfSMichael Neumann 
100257e252bfSMichael Neumann 	if (enable) {
100357e252bfSMichael Neumann 		ps = (const u32 *)&cayman_sysls_enable;
100457e252bfSMichael Neumann 		count = CAYMAN_SYSLS_ENABLE_LENGTH;
100557e252bfSMichael Neumann 	} else {
100657e252bfSMichael Neumann 		ps = (const u32 *)&cayman_sysls_disable;
100757e252bfSMichael Neumann 		count = CAYMAN_SYSLS_DISABLE_LENGTH;
100857e252bfSMichael Neumann 	}
100957e252bfSMichael Neumann 
101057e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
101157e252bfSMichael Neumann 
101257e252bfSMichael Neumann }
101357e252bfSMichael Neumann 
101457e252bfSMichael Neumann static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
101557e252bfSMichael Neumann 							     struct radeon_clock_voltage_dependency_table *table)
101657e252bfSMichael Neumann {
101757e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
101857e252bfSMichael Neumann 	u32 i;
101957e252bfSMichael Neumann 
102057e252bfSMichael Neumann 	if (table) {
102157e252bfSMichael Neumann 		for (i = 0; i < table->count; i++) {
102257e252bfSMichael Neumann 			if (0xff01 == table->entries[i].v) {
102357e252bfSMichael Neumann 				if (pi->max_vddc == 0)
102457e252bfSMichael Neumann 					return -EINVAL;
102557e252bfSMichael Neumann 				table->entries[i].v = pi->max_vddc;
102657e252bfSMichael Neumann 			}
102757e252bfSMichael Neumann 		}
102857e252bfSMichael Neumann 	}
102957e252bfSMichael Neumann 	return 0;
103057e252bfSMichael Neumann }
103157e252bfSMichael Neumann 
103257e252bfSMichael Neumann static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
103357e252bfSMichael Neumann {
103457e252bfSMichael Neumann 	int ret = 0;
103557e252bfSMichael Neumann 
103657e252bfSMichael Neumann 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
103757e252bfSMichael Neumann 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
103857e252bfSMichael Neumann 
103957e252bfSMichael Neumann 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
104057e252bfSMichael Neumann 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
104157e252bfSMichael Neumann 	return ret;
104257e252bfSMichael Neumann }
104357e252bfSMichael Neumann 
104457e252bfSMichael Neumann static void ni_stop_dpm(struct radeon_device *rdev)
104557e252bfSMichael Neumann {
104657e252bfSMichael Neumann 	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
104757e252bfSMichael Neumann }
104857e252bfSMichael Neumann 
104957e252bfSMichael Neumann #if 0
105057e252bfSMichael Neumann static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
105157e252bfSMichael Neumann 					bool ac_power)
105257e252bfSMichael Neumann {
105357e252bfSMichael Neumann 	if (ac_power)
105457e252bfSMichael Neumann 		return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
105557e252bfSMichael Neumann 			0 : -EINVAL;
105657e252bfSMichael Neumann 
105757e252bfSMichael Neumann 	return 0;
105857e252bfSMichael Neumann }
105957e252bfSMichael Neumann #endif
106057e252bfSMichael Neumann 
106157e252bfSMichael Neumann static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
106257e252bfSMichael Neumann 						      PPSMC_Msg msg, u32 parameter)
106357e252bfSMichael Neumann {
106457e252bfSMichael Neumann 	WREG32(SMC_SCRATCH0, parameter);
106557e252bfSMichael Neumann 	return rv770_send_msg_to_smc(rdev, msg);
106657e252bfSMichael Neumann }
106757e252bfSMichael Neumann 
106857e252bfSMichael Neumann static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
106957e252bfSMichael Neumann {
107057e252bfSMichael Neumann 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
107157e252bfSMichael Neumann 		return -EINVAL;
107257e252bfSMichael Neumann 
107357e252bfSMichael Neumann 	return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
107457e252bfSMichael Neumann 		0 : -EINVAL;
107557e252bfSMichael Neumann }
107657e252bfSMichael Neumann 
107757e252bfSMichael Neumann int ni_dpm_force_performance_level(struct radeon_device *rdev,
107857e252bfSMichael Neumann 				   enum radeon_dpm_forced_level level)
107957e252bfSMichael Neumann {
108057e252bfSMichael Neumann 	if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
108157e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
108257e252bfSMichael Neumann 			return -EINVAL;
108357e252bfSMichael Neumann 
108457e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
108557e252bfSMichael Neumann 			return -EINVAL;
108657e252bfSMichael Neumann 	} else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
108757e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
108857e252bfSMichael Neumann 			return -EINVAL;
108957e252bfSMichael Neumann 
109057e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
109157e252bfSMichael Neumann 			return -EINVAL;
109257e252bfSMichael Neumann 	} else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
109357e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
109457e252bfSMichael Neumann 			return -EINVAL;
109557e252bfSMichael Neumann 
109657e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
109757e252bfSMichael Neumann 			return -EINVAL;
109857e252bfSMichael Neumann 	}
109957e252bfSMichael Neumann 
110057e252bfSMichael Neumann 	rdev->pm.dpm.forced_level = level;
110157e252bfSMichael Neumann 
110257e252bfSMichael Neumann 	return 0;
110357e252bfSMichael Neumann }
110457e252bfSMichael Neumann 
110557e252bfSMichael Neumann static void ni_stop_smc(struct radeon_device *rdev)
110657e252bfSMichael Neumann {
110757e252bfSMichael Neumann 	u32 tmp;
110857e252bfSMichael Neumann 	int i;
110957e252bfSMichael Neumann 
111057e252bfSMichael Neumann 	for (i = 0; i < rdev->usec_timeout; i++) {
111157e252bfSMichael Neumann 		tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
111257e252bfSMichael Neumann 		if (tmp != 1)
111357e252bfSMichael Neumann 			break;
1114c4ef309bSzrj 		udelay(1);
111557e252bfSMichael Neumann 	}
111657e252bfSMichael Neumann 
1117c4ef309bSzrj 	udelay(100);
111857e252bfSMichael Neumann 
111957e252bfSMichael Neumann 	r7xx_stop_smc(rdev);
112057e252bfSMichael Neumann }
112157e252bfSMichael Neumann 
112257e252bfSMichael Neumann static int ni_process_firmware_header(struct radeon_device *rdev)
112357e252bfSMichael Neumann {
112457e252bfSMichael Neumann         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
112557e252bfSMichael Neumann         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
112657e252bfSMichael Neumann         struct ni_power_info *ni_pi = ni_get_pi(rdev);
112757e252bfSMichael Neumann 	u32 tmp;
112857e252bfSMichael Neumann 	int ret;
112957e252bfSMichael Neumann 
113057e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
113157e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
113257e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
113357e252bfSMichael Neumann 					&tmp, pi->sram_end);
113457e252bfSMichael Neumann 
113557e252bfSMichael Neumann 	if (ret)
113657e252bfSMichael Neumann 		return ret;
113757e252bfSMichael Neumann 
113857e252bfSMichael Neumann 	pi->state_table_start = (u16)tmp;
113957e252bfSMichael Neumann 
114057e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
114157e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
114257e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
114357e252bfSMichael Neumann 					&tmp, pi->sram_end);
114457e252bfSMichael Neumann 
114557e252bfSMichael Neumann 	if (ret)
114657e252bfSMichael Neumann 		return ret;
114757e252bfSMichael Neumann 
114857e252bfSMichael Neumann 	pi->soft_regs_start = (u16)tmp;
114957e252bfSMichael Neumann 
115057e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
115157e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
115257e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
115357e252bfSMichael Neumann 					&tmp, pi->sram_end);
115457e252bfSMichael Neumann 
115557e252bfSMichael Neumann 	if (ret)
115657e252bfSMichael Neumann 		return ret;
115757e252bfSMichael Neumann 
115857e252bfSMichael Neumann 	eg_pi->mc_reg_table_start = (u16)tmp;
115957e252bfSMichael Neumann 
116057e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
116157e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
116257e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
116357e252bfSMichael Neumann 					&tmp, pi->sram_end);
116457e252bfSMichael Neumann 
116557e252bfSMichael Neumann 	if (ret)
116657e252bfSMichael Neumann 		return ret;
116757e252bfSMichael Neumann 
116857e252bfSMichael Neumann 	ni_pi->fan_table_start = (u16)tmp;
116957e252bfSMichael Neumann 
117057e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
117157e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
117257e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
117357e252bfSMichael Neumann 					&tmp, pi->sram_end);
117457e252bfSMichael Neumann 
117557e252bfSMichael Neumann 	if (ret)
117657e252bfSMichael Neumann 		return ret;
117757e252bfSMichael Neumann 
117857e252bfSMichael Neumann 	ni_pi->arb_table_start = (u16)tmp;
117957e252bfSMichael Neumann 
118057e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
118157e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
118257e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
118357e252bfSMichael Neumann 					&tmp, pi->sram_end);
118457e252bfSMichael Neumann 
118557e252bfSMichael Neumann 	if (ret)
118657e252bfSMichael Neumann 		return ret;
118757e252bfSMichael Neumann 
118857e252bfSMichael Neumann 	ni_pi->cac_table_start = (u16)tmp;
118957e252bfSMichael Neumann 
119057e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
119157e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
119257e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
119357e252bfSMichael Neumann 					&tmp, pi->sram_end);
119457e252bfSMichael Neumann 
119557e252bfSMichael Neumann 	if (ret)
119657e252bfSMichael Neumann 		return ret;
119757e252bfSMichael Neumann 
119857e252bfSMichael Neumann 	ni_pi->spll_table_start = (u16)tmp;
119957e252bfSMichael Neumann 
120057e252bfSMichael Neumann 
120157e252bfSMichael Neumann 	return ret;
120257e252bfSMichael Neumann }
120357e252bfSMichael Neumann 
120457e252bfSMichael Neumann static void ni_read_clock_registers(struct radeon_device *rdev)
120557e252bfSMichael Neumann {
120657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
120757e252bfSMichael Neumann 
120857e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
120957e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
121057e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
121157e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
121257e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
121357e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
121457e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
121557e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
121657e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
121757e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
121857e252bfSMichael Neumann 	ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
121957e252bfSMichael Neumann 	ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
122057e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
122157e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
122257e252bfSMichael Neumann }
122357e252bfSMichael Neumann 
122457e252bfSMichael Neumann #if 0
122557e252bfSMichael Neumann static int ni_enter_ulp_state(struct radeon_device *rdev)
122657e252bfSMichael Neumann {
122757e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
122857e252bfSMichael Neumann 
122957e252bfSMichael Neumann 	if (pi->gfx_clock_gating) {
123057e252bfSMichael Neumann                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
123157e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
123257e252bfSMichael Neumann                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
123357e252bfSMichael Neumann 		RREG32(GB_ADDR_CONFIG);
123457e252bfSMichael Neumann         }
123557e252bfSMichael Neumann 
123657e252bfSMichael Neumann 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
123757e252bfSMichael Neumann                  ~HOST_SMC_MSG_MASK);
123857e252bfSMichael Neumann 
1239c4ef309bSzrj 	udelay(25000);
124057e252bfSMichael Neumann 
124157e252bfSMichael Neumann 	return 0;
124257e252bfSMichael Neumann }
124357e252bfSMichael Neumann #endif
124457e252bfSMichael Neumann 
124557e252bfSMichael Neumann static void ni_program_response_times(struct radeon_device *rdev)
124657e252bfSMichael Neumann {
124757e252bfSMichael Neumann 	u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
124857e252bfSMichael Neumann 	u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
124957e252bfSMichael Neumann 	u32 reference_clock;
125057e252bfSMichael Neumann 
125157e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
125257e252bfSMichael Neumann 
125357e252bfSMichael Neumann 	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
125457e252bfSMichael Neumann 	backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
125557e252bfSMichael Neumann 
125657e252bfSMichael Neumann 	if (voltage_response_time == 0)
125757e252bfSMichael Neumann 		voltage_response_time = 1000;
125857e252bfSMichael Neumann 
125957e252bfSMichael Neumann 	if (backbias_response_time == 0)
126057e252bfSMichael Neumann 		backbias_response_time = 1000;
126157e252bfSMichael Neumann 
126257e252bfSMichael Neumann 	acpi_delay_time = 15000;
126357e252bfSMichael Neumann 	vbi_time_out = 100000;
126457e252bfSMichael Neumann 
126557e252bfSMichael Neumann 	reference_clock = radeon_get_xclk(rdev);
126657e252bfSMichael Neumann 
126757e252bfSMichael Neumann 	vddc_dly = (voltage_response_time  * reference_clock) / 1600;
126857e252bfSMichael Neumann 	bb_dly   = (backbias_response_time * reference_clock) / 1600;
126957e252bfSMichael Neumann 	acpi_dly = (acpi_delay_time * reference_clock) / 1600;
127057e252bfSMichael Neumann 	vbi_dly  = (vbi_time_out * reference_clock) / 1600;
127157e252bfSMichael Neumann 
127257e252bfSMichael Neumann 	mclk_switch_limit = (460 * reference_clock) / 100;
127357e252bfSMichael Neumann 
127457e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
127557e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
127657e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
127757e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
127857e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
127957e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
128057e252bfSMichael Neumann }
128157e252bfSMichael Neumann 
128257e252bfSMichael Neumann static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
128357e252bfSMichael Neumann 					  struct atom_voltage_table *voltage_table,
128457e252bfSMichael Neumann 					  NISLANDS_SMC_STATETABLE *table)
128557e252bfSMichael Neumann {
128657e252bfSMichael Neumann 	unsigned int i;
128757e252bfSMichael Neumann 
128857e252bfSMichael Neumann 	for (i = 0; i < voltage_table->count; i++) {
128957e252bfSMichael Neumann 		table->highSMIO[i] = 0;
129057e252bfSMichael Neumann 		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
129157e252bfSMichael Neumann 	}
129257e252bfSMichael Neumann }
129357e252bfSMichael Neumann 
129457e252bfSMichael Neumann static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
129557e252bfSMichael Neumann 					   NISLANDS_SMC_STATETABLE *table)
129657e252bfSMichael Neumann {
129757e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
129857e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
129957e252bfSMichael Neumann 	unsigned char i;
130057e252bfSMichael Neumann 
130157e252bfSMichael Neumann 	if (eg_pi->vddc_voltage_table.count) {
130257e252bfSMichael Neumann 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
130357e252bfSMichael Neumann 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
130457e252bfSMichael Neumann 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
130557e252bfSMichael Neumann 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
130657e252bfSMichael Neumann 
130757e252bfSMichael Neumann 		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
130857e252bfSMichael Neumann 			if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
130957e252bfSMichael Neumann 				table->maxVDDCIndexInPPTable = i;
131057e252bfSMichael Neumann 				break;
131157e252bfSMichael Neumann 			}
131257e252bfSMichael Neumann 		}
131357e252bfSMichael Neumann 	}
131457e252bfSMichael Neumann 
131557e252bfSMichael Neumann 	if (eg_pi->vddci_voltage_table.count) {
131657e252bfSMichael Neumann 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
131757e252bfSMichael Neumann 
131857e252bfSMichael Neumann 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
131957e252bfSMichael Neumann 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1320*c6f73aabSFrançois Tigeot 			cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
132157e252bfSMichael Neumann 	}
132257e252bfSMichael Neumann }
132357e252bfSMichael Neumann 
132457e252bfSMichael Neumann static int ni_populate_voltage_value(struct radeon_device *rdev,
132557e252bfSMichael Neumann 				     struct atom_voltage_table *table,
132657e252bfSMichael Neumann 				     u16 value,
132757e252bfSMichael Neumann 				     NISLANDS_SMC_VOLTAGE_VALUE *voltage)
132857e252bfSMichael Neumann {
132957e252bfSMichael Neumann 	unsigned int i;
133057e252bfSMichael Neumann 
133157e252bfSMichael Neumann 	for (i = 0; i < table->count; i++) {
133257e252bfSMichael Neumann 		if (value <= table->entries[i].value) {
133357e252bfSMichael Neumann 			voltage->index = (u8)i;
133457e252bfSMichael Neumann 			voltage->value = cpu_to_be16(table->entries[i].value);
133557e252bfSMichael Neumann 			break;
133657e252bfSMichael Neumann 		}
133757e252bfSMichael Neumann 	}
133857e252bfSMichael Neumann 
133957e252bfSMichael Neumann 	if (i >= table->count)
134057e252bfSMichael Neumann 		return -EINVAL;
134157e252bfSMichael Neumann 
134257e252bfSMichael Neumann 	return 0;
134357e252bfSMichael Neumann }
134457e252bfSMichael Neumann 
134557e252bfSMichael Neumann static void ni_populate_mvdd_value(struct radeon_device *rdev,
134657e252bfSMichael Neumann 				   u32 mclk,
134757e252bfSMichael Neumann 				   NISLANDS_SMC_VOLTAGE_VALUE *voltage)
134857e252bfSMichael Neumann {
134957e252bfSMichael Neumann         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
135057e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
135157e252bfSMichael Neumann 
135257e252bfSMichael Neumann 	if (!pi->mvdd_control) {
135357e252bfSMichael Neumann 		voltage->index = eg_pi->mvdd_high_index;
135457e252bfSMichael Neumann                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
135557e252bfSMichael Neumann 		return;
135657e252bfSMichael Neumann 	}
135757e252bfSMichael Neumann 
135857e252bfSMichael Neumann 	if (mclk <= pi->mvdd_split_frequency) {
135957e252bfSMichael Neumann 		voltage->index = eg_pi->mvdd_low_index;
136057e252bfSMichael Neumann 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
136157e252bfSMichael Neumann 	} else {
136257e252bfSMichael Neumann 		voltage->index = eg_pi->mvdd_high_index;
136357e252bfSMichael Neumann 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
136457e252bfSMichael Neumann 	}
136557e252bfSMichael Neumann }
136657e252bfSMichael Neumann 
136757e252bfSMichael Neumann static int ni_get_std_voltage_value(struct radeon_device *rdev,
136857e252bfSMichael Neumann 				    NISLANDS_SMC_VOLTAGE_VALUE *voltage,
136957e252bfSMichael Neumann 				    u16 *std_voltage)
137057e252bfSMichael Neumann {
137157e252bfSMichael Neumann 	if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
137257e252bfSMichael Neumann 	    ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
137357e252bfSMichael Neumann 		*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
137457e252bfSMichael Neumann 	else
137557e252bfSMichael Neumann 		*std_voltage = be16_to_cpu(voltage->value);
137657e252bfSMichael Neumann 
137757e252bfSMichael Neumann 	return 0;
137857e252bfSMichael Neumann }
137957e252bfSMichael Neumann 
138057e252bfSMichael Neumann static void ni_populate_std_voltage_value(struct radeon_device *rdev,
138157e252bfSMichael Neumann 					  u16 value, u8 index,
138257e252bfSMichael Neumann 					  NISLANDS_SMC_VOLTAGE_VALUE *voltage)
138357e252bfSMichael Neumann {
138457e252bfSMichael Neumann 	voltage->index = index;
138557e252bfSMichael Neumann 	voltage->value = cpu_to_be16(value);
138657e252bfSMichael Neumann }
138757e252bfSMichael Neumann 
138857e252bfSMichael Neumann static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
138957e252bfSMichael Neumann {
139057e252bfSMichael Neumann 	u32 xclk_period;
139157e252bfSMichael Neumann 	u32 xclk = radeon_get_xclk(rdev);
139257e252bfSMichael Neumann 	u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
139357e252bfSMichael Neumann 
139457e252bfSMichael Neumann 	xclk_period = (1000000000UL / xclk);
139557e252bfSMichael Neumann 	xclk_period /= 10000UL;
139657e252bfSMichael Neumann 
139757e252bfSMichael Neumann 	return tmp * xclk_period;
139857e252bfSMichael Neumann }
139957e252bfSMichael Neumann 
140057e252bfSMichael Neumann static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
140157e252bfSMichael Neumann {
140257e252bfSMichael Neumann 	return (power_in_watts * scaling_factor) << 2;
140357e252bfSMichael Neumann }
140457e252bfSMichael Neumann 
140557e252bfSMichael Neumann static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
140657e252bfSMichael Neumann 					  struct radeon_ps *radeon_state,
140757e252bfSMichael Neumann 					  u32 near_tdp_limit)
140857e252bfSMichael Neumann {
140957e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
141057e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
141157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
141257e252bfSMichael Neumann 	u32 power_boost_limit = 0;
141357e252bfSMichael Neumann 	int ret;
141457e252bfSMichael Neumann 
141557e252bfSMichael Neumann 	if (ni_pi->enable_power_containment &&
141657e252bfSMichael Neumann 	    ni_pi->use_power_boost_limit) {
141757e252bfSMichael Neumann 		NISLANDS_SMC_VOLTAGE_VALUE vddc;
141857e252bfSMichael Neumann 		u16 std_vddc_med;
141957e252bfSMichael Neumann 		u16 std_vddc_high;
142057e252bfSMichael Neumann 		u64 tmp, n, d;
142157e252bfSMichael Neumann 
142257e252bfSMichael Neumann 		if (state->performance_level_count < 3)
142357e252bfSMichael Neumann 			return 0;
142457e252bfSMichael Neumann 
142557e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
142657e252bfSMichael Neumann 						state->performance_levels[state->performance_level_count - 2].vddc,
142757e252bfSMichael Neumann 						&vddc);
142857e252bfSMichael Neumann 		if (ret)
142957e252bfSMichael Neumann 			return 0;
143057e252bfSMichael Neumann 
143157e252bfSMichael Neumann 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
143257e252bfSMichael Neumann 		if (ret)
143357e252bfSMichael Neumann 			return 0;
143457e252bfSMichael Neumann 
143557e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
143657e252bfSMichael Neumann 						state->performance_levels[state->performance_level_count - 1].vddc,
143757e252bfSMichael Neumann 						&vddc);
143857e252bfSMichael Neumann 		if (ret)
143957e252bfSMichael Neumann 			return 0;
144057e252bfSMichael Neumann 
144157e252bfSMichael Neumann 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
144257e252bfSMichael Neumann 		if (ret)
144357e252bfSMichael Neumann 			return 0;
144457e252bfSMichael Neumann 
144557e252bfSMichael Neumann 		n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
144657e252bfSMichael Neumann 		d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
144757e252bfSMichael Neumann 		tmp = div64_u64(n, d);
144857e252bfSMichael Neumann 
144957e252bfSMichael Neumann 		if (tmp >> 32)
145057e252bfSMichael Neumann 			return 0;
145157e252bfSMichael Neumann 		power_boost_limit = (u32)tmp;
145257e252bfSMichael Neumann 	}
145357e252bfSMichael Neumann 
145457e252bfSMichael Neumann 	return power_boost_limit;
145557e252bfSMichael Neumann }
145657e252bfSMichael Neumann 
145757e252bfSMichael Neumann static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
145857e252bfSMichael Neumann 					    bool adjust_polarity,
145957e252bfSMichael Neumann 					    u32 tdp_adjustment,
146057e252bfSMichael Neumann 					    u32 *tdp_limit,
146157e252bfSMichael Neumann 					    u32 *near_tdp_limit)
146257e252bfSMichael Neumann {
146357e252bfSMichael Neumann 	if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
146457e252bfSMichael Neumann 		return -EINVAL;
146557e252bfSMichael Neumann 
146657e252bfSMichael Neumann 	if (adjust_polarity) {
146757e252bfSMichael Neumann 		*tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
146857e252bfSMichael Neumann 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
146957e252bfSMichael Neumann 	} else {
147057e252bfSMichael Neumann 		*tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
147157e252bfSMichael Neumann 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
147257e252bfSMichael Neumann 	}
147357e252bfSMichael Neumann 
147457e252bfSMichael Neumann 	return 0;
147557e252bfSMichael Neumann }
147657e252bfSMichael Neumann 
147757e252bfSMichael Neumann static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
147857e252bfSMichael Neumann 				      struct radeon_ps *radeon_state)
147957e252bfSMichael Neumann {
148057e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
148157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
148257e252bfSMichael Neumann 
148357e252bfSMichael Neumann 	if (ni_pi->enable_power_containment) {
148457e252bfSMichael Neumann 		NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
148557e252bfSMichael Neumann 		u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
148657e252bfSMichael Neumann 		u32 tdp_limit;
148757e252bfSMichael Neumann 		u32 near_tdp_limit;
148857e252bfSMichael Neumann 		u32 power_boost_limit;
148957e252bfSMichael Neumann 		int ret;
149057e252bfSMichael Neumann 
149157e252bfSMichael Neumann 		if (scaling_factor == 0)
149257e252bfSMichael Neumann 			return -EINVAL;
149357e252bfSMichael Neumann 
149457e252bfSMichael Neumann 		memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
149557e252bfSMichael Neumann 
149657e252bfSMichael Neumann 		ret = ni_calculate_adjusted_tdp_limits(rdev,
149757e252bfSMichael Neumann 						       false, /* ??? */
149857e252bfSMichael Neumann 						       rdev->pm.dpm.tdp_adjustment,
149957e252bfSMichael Neumann 						       &tdp_limit,
150057e252bfSMichael Neumann 						       &near_tdp_limit);
150157e252bfSMichael Neumann 		if (ret)
150257e252bfSMichael Neumann 			return ret;
150357e252bfSMichael Neumann 
150457e252bfSMichael Neumann 		power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
150557e252bfSMichael Neumann 								   near_tdp_limit);
150657e252bfSMichael Neumann 
150757e252bfSMichael Neumann 		smc_table->dpm2Params.TDPLimit =
150857e252bfSMichael Neumann 			cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
150957e252bfSMichael Neumann 		smc_table->dpm2Params.NearTDPLimit =
151057e252bfSMichael Neumann 			cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
151157e252bfSMichael Neumann 		smc_table->dpm2Params.SafePowerLimit =
151257e252bfSMichael Neumann 			cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
151357e252bfSMichael Neumann 							   scaling_factor));
151457e252bfSMichael Neumann 		smc_table->dpm2Params.PowerBoostLimit =
151557e252bfSMichael Neumann 			cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
151657e252bfSMichael Neumann 
151757e252bfSMichael Neumann 		ret = rv770_copy_bytes_to_smc(rdev,
151857e252bfSMichael Neumann 					      (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
151957e252bfSMichael Neumann 						    offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
152057e252bfSMichael Neumann 					      (u8 *)(&smc_table->dpm2Params.TDPLimit),
152157e252bfSMichael Neumann 					      sizeof(u32) * 4, pi->sram_end);
152257e252bfSMichael Neumann 		if (ret)
152357e252bfSMichael Neumann 			return ret;
152457e252bfSMichael Neumann 	}
152557e252bfSMichael Neumann 
152657e252bfSMichael Neumann 	return 0;
152757e252bfSMichael Neumann }
152857e252bfSMichael Neumann 
152957e252bfSMichael Neumann int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
153057e252bfSMichael Neumann 				u32 arb_freq_src, u32 arb_freq_dest)
153157e252bfSMichael Neumann {
153257e252bfSMichael Neumann 	u32 mc_arb_dram_timing;
153357e252bfSMichael Neumann 	u32 mc_arb_dram_timing2;
153457e252bfSMichael Neumann 	u32 burst_time;
153557e252bfSMichael Neumann 	u32 mc_cg_config;
153657e252bfSMichael Neumann 
153757e252bfSMichael Neumann 	switch (arb_freq_src) {
153857e252bfSMichael Neumann         case MC_CG_ARB_FREQ_F0:
153957e252bfSMichael Neumann 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
154057e252bfSMichael Neumann 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
154157e252bfSMichael Neumann 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
154257e252bfSMichael Neumann 		break;
154357e252bfSMichael Neumann         case MC_CG_ARB_FREQ_F1:
154457e252bfSMichael Neumann 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
154557e252bfSMichael Neumann 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
154657e252bfSMichael Neumann 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
154757e252bfSMichael Neumann 		break;
154857e252bfSMichael Neumann         case MC_CG_ARB_FREQ_F2:
154957e252bfSMichael Neumann 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
155057e252bfSMichael Neumann 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
155157e252bfSMichael Neumann 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
155257e252bfSMichael Neumann 		break;
155357e252bfSMichael Neumann         case MC_CG_ARB_FREQ_F3:
155457e252bfSMichael Neumann 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
155557e252bfSMichael Neumann 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
155657e252bfSMichael Neumann 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
155757e252bfSMichael Neumann 		break;
155857e252bfSMichael Neumann         default:
155957e252bfSMichael Neumann 		return -EINVAL;
156057e252bfSMichael Neumann 	}
156157e252bfSMichael Neumann 
156257e252bfSMichael Neumann 	switch (arb_freq_dest) {
156357e252bfSMichael Neumann         case MC_CG_ARB_FREQ_F0:
156457e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
156557e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
156657e252bfSMichael Neumann 		WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
156757e252bfSMichael Neumann 		break;
156857e252bfSMichael Neumann         case MC_CG_ARB_FREQ_F1:
156957e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
157057e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
157157e252bfSMichael Neumann 		WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
157257e252bfSMichael Neumann 		break;
157357e252bfSMichael Neumann         case MC_CG_ARB_FREQ_F2:
157457e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
157557e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
157657e252bfSMichael Neumann 		WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
157757e252bfSMichael Neumann 		break;
157857e252bfSMichael Neumann         case MC_CG_ARB_FREQ_F3:
157957e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
158057e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
158157e252bfSMichael Neumann 		WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
158257e252bfSMichael Neumann 		break;
158357e252bfSMichael Neumann 	default:
158457e252bfSMichael Neumann 		return -EINVAL;
158557e252bfSMichael Neumann 	}
158657e252bfSMichael Neumann 
158757e252bfSMichael Neumann 	mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
158857e252bfSMichael Neumann 	WREG32(MC_CG_CONFIG, mc_cg_config);
158957e252bfSMichael Neumann 	WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
159057e252bfSMichael Neumann 
159157e252bfSMichael Neumann 	return 0;
159257e252bfSMichael Neumann }
159357e252bfSMichael Neumann 
159457e252bfSMichael Neumann static int ni_init_arb_table_index(struct radeon_device *rdev)
159557e252bfSMichael Neumann {
159657e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
159757e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
159857e252bfSMichael Neumann 	u32 tmp;
159957e252bfSMichael Neumann 	int ret;
160057e252bfSMichael Neumann 
160157e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
160257e252bfSMichael Neumann 					&tmp, pi->sram_end);
160357e252bfSMichael Neumann 	if (ret)
160457e252bfSMichael Neumann 		return ret;
160557e252bfSMichael Neumann 
160657e252bfSMichael Neumann 	tmp &= 0x00FFFFFF;
160757e252bfSMichael Neumann 	tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
160857e252bfSMichael Neumann 
160957e252bfSMichael Neumann 	return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
161057e252bfSMichael Neumann 					  tmp, pi->sram_end);
161157e252bfSMichael Neumann }
161257e252bfSMichael Neumann 
161357e252bfSMichael Neumann static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
161457e252bfSMichael Neumann {
161557e252bfSMichael Neumann 	return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
161657e252bfSMichael Neumann }
161757e252bfSMichael Neumann 
161857e252bfSMichael Neumann static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
161957e252bfSMichael Neumann {
162057e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
162157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
162257e252bfSMichael Neumann 	u32 tmp;
162357e252bfSMichael Neumann 	int ret;
162457e252bfSMichael Neumann 
162557e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
162657e252bfSMichael Neumann 					&tmp, pi->sram_end);
162757e252bfSMichael Neumann 	if (ret)
162857e252bfSMichael Neumann 		return ret;
162957e252bfSMichael Neumann 
163057e252bfSMichael Neumann 	tmp = (tmp >> 24) & 0xff;
163157e252bfSMichael Neumann 
163257e252bfSMichael Neumann 	if (tmp == MC_CG_ARB_FREQ_F0)
163357e252bfSMichael Neumann 		return 0;
163457e252bfSMichael Neumann 
163557e252bfSMichael Neumann 	return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
163657e252bfSMichael Neumann }
163757e252bfSMichael Neumann 
163857e252bfSMichael Neumann static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
163957e252bfSMichael Neumann 						struct rv7xx_pl *pl,
164057e252bfSMichael Neumann 						SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
164157e252bfSMichael Neumann {
164257e252bfSMichael Neumann 	u32 dram_timing;
164357e252bfSMichael Neumann 	u32 dram_timing2;
164457e252bfSMichael Neumann 
164557e252bfSMichael Neumann 	arb_regs->mc_arb_rfsh_rate =
164657e252bfSMichael Neumann 		(u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
164757e252bfSMichael Neumann 
164857e252bfSMichael Neumann 
164957e252bfSMichael Neumann 	radeon_atom_set_engine_dram_timings(rdev,
165057e252bfSMichael Neumann                                             pl->sclk,
165157e252bfSMichael Neumann                                             pl->mclk);
165257e252bfSMichael Neumann 
165357e252bfSMichael Neumann 	dram_timing = RREG32(MC_ARB_DRAM_TIMING);
165457e252bfSMichael Neumann 	dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
165557e252bfSMichael Neumann 
165657e252bfSMichael Neumann 	arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
165757e252bfSMichael Neumann 	arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
165857e252bfSMichael Neumann 
165957e252bfSMichael Neumann 	return 0;
166057e252bfSMichael Neumann }
166157e252bfSMichael Neumann 
166257e252bfSMichael Neumann static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
166357e252bfSMichael Neumann 						  struct radeon_ps *radeon_state,
166457e252bfSMichael Neumann 						  unsigned int first_arb_set)
166557e252bfSMichael Neumann {
166657e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
166757e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
166857e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
166957e252bfSMichael Neumann 	SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
167057e252bfSMichael Neumann 	int i, ret = 0;
167157e252bfSMichael Neumann 
167257e252bfSMichael Neumann 	for (i = 0; i < state->performance_level_count; i++) {
167357e252bfSMichael Neumann 		ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
167457e252bfSMichael Neumann 		if (ret)
167557e252bfSMichael Neumann 			break;
167657e252bfSMichael Neumann 
167757e252bfSMichael Neumann 		ret = rv770_copy_bytes_to_smc(rdev,
167857e252bfSMichael Neumann 					      (u16)(ni_pi->arb_table_start +
167957e252bfSMichael Neumann 						    offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
168057e252bfSMichael Neumann 						    sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
168157e252bfSMichael Neumann 					      (u8 *)&arb_regs,
168257e252bfSMichael Neumann 					      (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
168357e252bfSMichael Neumann 					      pi->sram_end);
168457e252bfSMichael Neumann 		if (ret)
168557e252bfSMichael Neumann 			break;
168657e252bfSMichael Neumann 	}
168757e252bfSMichael Neumann 	return ret;
168857e252bfSMichael Neumann }
168957e252bfSMichael Neumann 
169057e252bfSMichael Neumann static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
169157e252bfSMichael Neumann 					       struct radeon_ps *radeon_new_state)
169257e252bfSMichael Neumann {
169357e252bfSMichael Neumann 	return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
169457e252bfSMichael Neumann 						      NISLANDS_DRIVER_STATE_ARB_INDEX);
169557e252bfSMichael Neumann }
169657e252bfSMichael Neumann 
169757e252bfSMichael Neumann static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
169857e252bfSMichael Neumann 					   struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
169957e252bfSMichael Neumann {
170057e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
170157e252bfSMichael Neumann 
170257e252bfSMichael Neumann 	voltage->index = eg_pi->mvdd_high_index;
170357e252bfSMichael Neumann 	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
170457e252bfSMichael Neumann }
170557e252bfSMichael Neumann 
170657e252bfSMichael Neumann static int ni_populate_smc_initial_state(struct radeon_device *rdev,
170757e252bfSMichael Neumann 					 struct radeon_ps *radeon_initial_state,
170857e252bfSMichael Neumann 					 NISLANDS_SMC_STATETABLE *table)
170957e252bfSMichael Neumann {
171057e252bfSMichael Neumann 	struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
171157e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
171257e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
171357e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
171457e252bfSMichael Neumann 	u32 reg;
171557e252bfSMichael Neumann 	int ret;
171657e252bfSMichael Neumann 
171757e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
171857e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
171957e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
172057e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
172157e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
172257e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
172357e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
172457e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
172557e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
172657e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
172757e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vDLL_CNTL =
172857e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.dll_cntl);
172957e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_SS =
173057e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
173157e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_SS2 =
173257e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
173357e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk_value =
173457e252bfSMichael Neumann 		cpu_to_be32(initial_state->performance_levels[0].mclk);
173557e252bfSMichael Neumann 
173657e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
173757e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
173857e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
173957e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
174057e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
174157e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
174257e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
174357e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
174457e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
174557e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
174657e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
174757e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
174857e252bfSMichael Neumann 	table->initialState.levels[0].sclk.sclk_value =
174957e252bfSMichael Neumann 		cpu_to_be32(initial_state->performance_levels[0].sclk);
175057e252bfSMichael Neumann 	table->initialState.levels[0].arbRefreshState =
175157e252bfSMichael Neumann 		NISLANDS_INITIAL_STATE_ARB_INDEX;
175257e252bfSMichael Neumann 
175357e252bfSMichael Neumann 	table->initialState.levels[0].ACIndex = 0;
175457e252bfSMichael Neumann 
175557e252bfSMichael Neumann 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
175657e252bfSMichael Neumann 					initial_state->performance_levels[0].vddc,
175757e252bfSMichael Neumann 					&table->initialState.levels[0].vddc);
175857e252bfSMichael Neumann 	if (!ret) {
175957e252bfSMichael Neumann 		u16 std_vddc;
176057e252bfSMichael Neumann 
176157e252bfSMichael Neumann 		ret = ni_get_std_voltage_value(rdev,
176257e252bfSMichael Neumann 					       &table->initialState.levels[0].vddc,
176357e252bfSMichael Neumann 					       &std_vddc);
176457e252bfSMichael Neumann 		if (!ret)
176557e252bfSMichael Neumann 			ni_populate_std_voltage_value(rdev, std_vddc,
176657e252bfSMichael Neumann 						      table->initialState.levels[0].vddc.index,
176757e252bfSMichael Neumann 						      &table->initialState.levels[0].std_vddc);
176857e252bfSMichael Neumann 	}
176957e252bfSMichael Neumann 
177057e252bfSMichael Neumann 	if (eg_pi->vddci_control)
177157e252bfSMichael Neumann 		ni_populate_voltage_value(rdev,
177257e252bfSMichael Neumann 					  &eg_pi->vddci_voltage_table,
177357e252bfSMichael Neumann 					  initial_state->performance_levels[0].vddci,
177457e252bfSMichael Neumann 					  &table->initialState.levels[0].vddci);
177557e252bfSMichael Neumann 
177657e252bfSMichael Neumann 	ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
177757e252bfSMichael Neumann 
177857e252bfSMichael Neumann 	reg = CG_R(0xffff) | CG_L(0);
177957e252bfSMichael Neumann 	table->initialState.levels[0].aT = cpu_to_be32(reg);
178057e252bfSMichael Neumann 
178157e252bfSMichael Neumann 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
178257e252bfSMichael Neumann 
178357e252bfSMichael Neumann 	if (pi->boot_in_gen2)
178457e252bfSMichael Neumann 		table->initialState.levels[0].gen2PCIE = 1;
178557e252bfSMichael Neumann 	else
178657e252bfSMichael Neumann 		table->initialState.levels[0].gen2PCIE = 0;
178757e252bfSMichael Neumann 
178857e252bfSMichael Neumann 	if (pi->mem_gddr5) {
178957e252bfSMichael Neumann 		table->initialState.levels[0].strobeMode =
179057e252bfSMichael Neumann 			cypress_get_strobe_mode_settings(rdev,
179157e252bfSMichael Neumann 							 initial_state->performance_levels[0].mclk);
179257e252bfSMichael Neumann 
179357e252bfSMichael Neumann 		if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
179457e252bfSMichael Neumann 			table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
179557e252bfSMichael Neumann 		else
179657e252bfSMichael Neumann 			table->initialState.levels[0].mcFlags =  0;
179757e252bfSMichael Neumann 	}
179857e252bfSMichael Neumann 
179957e252bfSMichael Neumann 	table->initialState.levelCount = 1;
180057e252bfSMichael Neumann 
180157e252bfSMichael Neumann 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
180257e252bfSMichael Neumann 
180357e252bfSMichael Neumann 	table->initialState.levels[0].dpm2.MaxPS = 0;
180457e252bfSMichael Neumann 	table->initialState.levels[0].dpm2.NearTDPDec = 0;
180557e252bfSMichael Neumann 	table->initialState.levels[0].dpm2.AboveSafeInc = 0;
180657e252bfSMichael Neumann 	table->initialState.levels[0].dpm2.BelowSafeInc = 0;
180757e252bfSMichael Neumann 
180857e252bfSMichael Neumann 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
180957e252bfSMichael Neumann 	table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
181057e252bfSMichael Neumann 
181157e252bfSMichael Neumann 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
181257e252bfSMichael Neumann 	table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
181357e252bfSMichael Neumann 
181457e252bfSMichael Neumann 	return 0;
181557e252bfSMichael Neumann }
181657e252bfSMichael Neumann 
181757e252bfSMichael Neumann static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
181857e252bfSMichael Neumann 				      NISLANDS_SMC_STATETABLE *table)
181957e252bfSMichael Neumann {
182057e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
182157e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
182257e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
182357e252bfSMichael Neumann 	u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
182457e252bfSMichael Neumann 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
182557e252bfSMichael Neumann 	u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
182657e252bfSMichael Neumann 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
182757e252bfSMichael Neumann 	u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
182857e252bfSMichael Neumann 	u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
182957e252bfSMichael Neumann 	u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
183057e252bfSMichael Neumann 	u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
183157e252bfSMichael Neumann 	u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
183257e252bfSMichael Neumann 	u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
183357e252bfSMichael Neumann 	u32 reg;
183457e252bfSMichael Neumann 	int ret;
183557e252bfSMichael Neumann 
183657e252bfSMichael Neumann 	table->ACPIState = table->initialState;
183757e252bfSMichael Neumann 
183857e252bfSMichael Neumann 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
183957e252bfSMichael Neumann 
184057e252bfSMichael Neumann 	if (pi->acpi_vddc) {
184157e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev,
184257e252bfSMichael Neumann 						&eg_pi->vddc_voltage_table,
184357e252bfSMichael Neumann 						pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
184457e252bfSMichael Neumann 		if (!ret) {
184557e252bfSMichael Neumann 			u16 std_vddc;
184657e252bfSMichael Neumann 
184757e252bfSMichael Neumann 			ret = ni_get_std_voltage_value(rdev,
184857e252bfSMichael Neumann 						       &table->ACPIState.levels[0].vddc, &std_vddc);
184957e252bfSMichael Neumann 			if (!ret)
185057e252bfSMichael Neumann 				ni_populate_std_voltage_value(rdev, std_vddc,
185157e252bfSMichael Neumann 							      table->ACPIState.levels[0].vddc.index,
185257e252bfSMichael Neumann 							      &table->ACPIState.levels[0].std_vddc);
185357e252bfSMichael Neumann 		}
185457e252bfSMichael Neumann 
185557e252bfSMichael Neumann 		if (pi->pcie_gen2) {
185657e252bfSMichael Neumann 			if (pi->acpi_pcie_gen2)
185757e252bfSMichael Neumann 				table->ACPIState.levels[0].gen2PCIE = 1;
185857e252bfSMichael Neumann 			else
185957e252bfSMichael Neumann 				table->ACPIState.levels[0].gen2PCIE = 0;
186057e252bfSMichael Neumann 		} else {
186157e252bfSMichael Neumann 			table->ACPIState.levels[0].gen2PCIE = 0;
186257e252bfSMichael Neumann 		}
186357e252bfSMichael Neumann 	} else {
186457e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev,
186557e252bfSMichael Neumann 						&eg_pi->vddc_voltage_table,
186657e252bfSMichael Neumann 						pi->min_vddc_in_table,
186757e252bfSMichael Neumann 						&table->ACPIState.levels[0].vddc);
186857e252bfSMichael Neumann 		if (!ret) {
186957e252bfSMichael Neumann 			u16 std_vddc;
187057e252bfSMichael Neumann 
187157e252bfSMichael Neumann 			ret = ni_get_std_voltage_value(rdev,
187257e252bfSMichael Neumann 						       &table->ACPIState.levels[0].vddc,
187357e252bfSMichael Neumann 						       &std_vddc);
187457e252bfSMichael Neumann 			if (!ret)
187557e252bfSMichael Neumann 				ni_populate_std_voltage_value(rdev, std_vddc,
187657e252bfSMichael Neumann 							      table->ACPIState.levels[0].vddc.index,
187757e252bfSMichael Neumann 							      &table->ACPIState.levels[0].std_vddc);
187857e252bfSMichael Neumann 		}
187957e252bfSMichael Neumann 		table->ACPIState.levels[0].gen2PCIE = 0;
188057e252bfSMichael Neumann 	}
188157e252bfSMichael Neumann 
188257e252bfSMichael Neumann 	if (eg_pi->acpi_vddci) {
188357e252bfSMichael Neumann 		if (eg_pi->vddci_control)
188457e252bfSMichael Neumann 			ni_populate_voltage_value(rdev,
188557e252bfSMichael Neumann 						  &eg_pi->vddci_voltage_table,
188657e252bfSMichael Neumann 						  eg_pi->acpi_vddci,
188757e252bfSMichael Neumann 						  &table->ACPIState.levels[0].vddci);
188857e252bfSMichael Neumann 	}
188957e252bfSMichael Neumann 
189057e252bfSMichael Neumann 
189157e252bfSMichael Neumann 	mpll_ad_func_cntl &= ~PDNB;
189257e252bfSMichael Neumann 
189357e252bfSMichael Neumann 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
189457e252bfSMichael Neumann 
189557e252bfSMichael Neumann         if (pi->mem_gddr5)
189657e252bfSMichael Neumann                 mpll_dq_func_cntl &= ~PDNB;
189757e252bfSMichael Neumann         mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
189857e252bfSMichael Neumann 
189957e252bfSMichael Neumann 
190057e252bfSMichael Neumann 	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
190157e252bfSMichael Neumann 			     MRDCKA1_RESET |
190257e252bfSMichael Neumann 			     MRDCKB0_RESET |
190357e252bfSMichael Neumann 			     MRDCKB1_RESET |
190457e252bfSMichael Neumann 			     MRDCKC0_RESET |
190557e252bfSMichael Neumann 			     MRDCKC1_RESET |
190657e252bfSMichael Neumann 			     MRDCKD0_RESET |
190757e252bfSMichael Neumann 			     MRDCKD1_RESET);
190857e252bfSMichael Neumann 
190957e252bfSMichael Neumann 	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
191057e252bfSMichael Neumann 			      MRDCKA1_PDNB |
191157e252bfSMichael Neumann 			      MRDCKB0_PDNB |
191257e252bfSMichael Neumann 			      MRDCKB1_PDNB |
191357e252bfSMichael Neumann 			      MRDCKC0_PDNB |
191457e252bfSMichael Neumann 			      MRDCKC1_PDNB |
191557e252bfSMichael Neumann 			      MRDCKD0_PDNB |
191657e252bfSMichael Neumann 			      MRDCKD1_PDNB);
191757e252bfSMichael Neumann 
191857e252bfSMichael Neumann 	dll_cntl |= (MRDCKA0_BYPASS |
191957e252bfSMichael Neumann                      MRDCKA1_BYPASS |
192057e252bfSMichael Neumann                      MRDCKB0_BYPASS |
192157e252bfSMichael Neumann                      MRDCKB1_BYPASS |
192257e252bfSMichael Neumann                      MRDCKC0_BYPASS |
192357e252bfSMichael Neumann                      MRDCKC1_BYPASS |
192457e252bfSMichael Neumann                      MRDCKD0_BYPASS |
192557e252bfSMichael Neumann                      MRDCKD1_BYPASS);
192657e252bfSMichael Neumann 
192757e252bfSMichael Neumann         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
192857e252bfSMichael Neumann 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
192957e252bfSMichael Neumann 
193057e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
193157e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
193257e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
193357e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
193457e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
193557e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
193657e252bfSMichael Neumann 
193757e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.mclk_value = 0;
193857e252bfSMichael Neumann 
193957e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
194057e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
194157e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
194257e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
194357e252bfSMichael Neumann 
194457e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.sclk_value = 0;
194557e252bfSMichael Neumann 
194657e252bfSMichael Neumann 	ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
194757e252bfSMichael Neumann 
194857e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing)
194957e252bfSMichael Neumann 		table->ACPIState.levels[0].ACIndex = 1;
195057e252bfSMichael Neumann 
195157e252bfSMichael Neumann 	table->ACPIState.levels[0].dpm2.MaxPS = 0;
195257e252bfSMichael Neumann 	table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
195357e252bfSMichael Neumann 	table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
195457e252bfSMichael Neumann 	table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
195557e252bfSMichael Neumann 
195657e252bfSMichael Neumann 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
195757e252bfSMichael Neumann 	table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
195857e252bfSMichael Neumann 
195957e252bfSMichael Neumann 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
196057e252bfSMichael Neumann 	table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
196157e252bfSMichael Neumann 
196257e252bfSMichael Neumann 	return 0;
196357e252bfSMichael Neumann }
196457e252bfSMichael Neumann 
196557e252bfSMichael Neumann static int ni_init_smc_table(struct radeon_device *rdev)
196657e252bfSMichael Neumann {
196757e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
196857e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
196957e252bfSMichael Neumann 	int ret;
197057e252bfSMichael Neumann 	struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
197157e252bfSMichael Neumann 	NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
197257e252bfSMichael Neumann 
197357e252bfSMichael Neumann 	memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
197457e252bfSMichael Neumann 
197557e252bfSMichael Neumann 	ni_populate_smc_voltage_tables(rdev, table);
197657e252bfSMichael Neumann 
197757e252bfSMichael Neumann 	switch (rdev->pm.int_thermal_type) {
197857e252bfSMichael Neumann 	case THERMAL_TYPE_NI:
197957e252bfSMichael Neumann 	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
198057e252bfSMichael Neumann 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
198157e252bfSMichael Neumann 		break;
198257e252bfSMichael Neumann 	case THERMAL_TYPE_NONE:
198357e252bfSMichael Neumann 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
198457e252bfSMichael Neumann 		break;
198557e252bfSMichael Neumann 	default:
198657e252bfSMichael Neumann 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
198757e252bfSMichael Neumann 		break;
198857e252bfSMichael Neumann 	}
198957e252bfSMichael Neumann 
199057e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
199157e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
199257e252bfSMichael Neumann 
199357e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
199457e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
199557e252bfSMichael Neumann 
199657e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
199757e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
199857e252bfSMichael Neumann 
199957e252bfSMichael Neumann 	if (pi->mem_gddr5)
200057e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
200157e252bfSMichael Neumann 
200257e252bfSMichael Neumann 	ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
200357e252bfSMichael Neumann 	if (ret)
200457e252bfSMichael Neumann 		return ret;
200557e252bfSMichael Neumann 
200657e252bfSMichael Neumann 	ret = ni_populate_smc_acpi_state(rdev, table);
200757e252bfSMichael Neumann 	if (ret)
200857e252bfSMichael Neumann 		return ret;
200957e252bfSMichael Neumann 
201057e252bfSMichael Neumann 	table->driverState = table->initialState;
201157e252bfSMichael Neumann 
201257e252bfSMichael Neumann 	table->ULVState = table->initialState;
201357e252bfSMichael Neumann 
201457e252bfSMichael Neumann 	ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
201557e252bfSMichael Neumann 						     NISLANDS_INITIAL_STATE_ARB_INDEX);
201657e252bfSMichael Neumann 	if (ret)
201757e252bfSMichael Neumann 		return ret;
201857e252bfSMichael Neumann 
201957e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
202057e252bfSMichael Neumann 				       sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
202157e252bfSMichael Neumann }
202257e252bfSMichael Neumann 
202357e252bfSMichael Neumann static int ni_calculate_sclk_params(struct radeon_device *rdev,
202457e252bfSMichael Neumann 				    u32 engine_clock,
202557e252bfSMichael Neumann 				    NISLANDS_SMC_SCLK_VALUE *sclk)
202657e252bfSMichael Neumann {
202757e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
202857e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
202957e252bfSMichael Neumann 	struct atom_clock_dividers dividers;
203057e252bfSMichael Neumann 	u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
203157e252bfSMichael Neumann 	u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
203257e252bfSMichael Neumann 	u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
203357e252bfSMichael Neumann 	u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
203457e252bfSMichael Neumann 	u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
203557e252bfSMichael Neumann 	u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
203657e252bfSMichael Neumann 	u64 tmp;
203757e252bfSMichael Neumann 	u32 reference_clock = rdev->clock.spll.reference_freq;
203857e252bfSMichael Neumann 	u32 reference_divider;
203957e252bfSMichael Neumann 	u32 fbdiv;
204057e252bfSMichael Neumann 	int ret;
204157e252bfSMichael Neumann 
204257e252bfSMichael Neumann 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
204357e252bfSMichael Neumann 					     engine_clock, false, &dividers);
204457e252bfSMichael Neumann 	if (ret)
204557e252bfSMichael Neumann 		return ret;
204657e252bfSMichael Neumann 
204757e252bfSMichael Neumann 	reference_divider = 1 + dividers.ref_div;
204857e252bfSMichael Neumann 
204957e252bfSMichael Neumann 
205057e252bfSMichael Neumann 	tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
205157e252bfSMichael Neumann 	do_div(tmp, reference_clock);
205257e252bfSMichael Neumann 	fbdiv = (u32) tmp;
205357e252bfSMichael Neumann 
205457e252bfSMichael Neumann 	spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
205557e252bfSMichael Neumann 	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
205657e252bfSMichael Neumann 	spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
205757e252bfSMichael Neumann 
205857e252bfSMichael Neumann 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
205957e252bfSMichael Neumann 	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
206057e252bfSMichael Neumann 
206157e252bfSMichael Neumann 	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
206257e252bfSMichael Neumann 	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
206357e252bfSMichael Neumann 	spll_func_cntl_3 |= SPLL_DITHEN;
206457e252bfSMichael Neumann 
206557e252bfSMichael Neumann 	if (pi->sclk_ss) {
206657e252bfSMichael Neumann 		struct radeon_atom_ss ss;
206757e252bfSMichael Neumann 		u32 vco_freq = engine_clock * dividers.post_div;
206857e252bfSMichael Neumann 
206957e252bfSMichael Neumann 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
207057e252bfSMichael Neumann 						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
207157e252bfSMichael Neumann 			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
207257e252bfSMichael Neumann 			u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
207357e252bfSMichael Neumann 
207457e252bfSMichael Neumann 			cg_spll_spread_spectrum &= ~CLK_S_MASK;
207557e252bfSMichael Neumann 			cg_spll_spread_spectrum |= CLK_S(clk_s);
207657e252bfSMichael Neumann 			cg_spll_spread_spectrum |= SSEN;
207757e252bfSMichael Neumann 
207857e252bfSMichael Neumann 			cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
207957e252bfSMichael Neumann 			cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
208057e252bfSMichael Neumann 		}
208157e252bfSMichael Neumann 	}
208257e252bfSMichael Neumann 
208357e252bfSMichael Neumann 	sclk->sclk_value = engine_clock;
208457e252bfSMichael Neumann 	sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
208557e252bfSMichael Neumann 	sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
208657e252bfSMichael Neumann 	sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
208757e252bfSMichael Neumann 	sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
208857e252bfSMichael Neumann 	sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
208957e252bfSMichael Neumann 	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
209057e252bfSMichael Neumann 
209157e252bfSMichael Neumann 	return 0;
209257e252bfSMichael Neumann }
209357e252bfSMichael Neumann 
209457e252bfSMichael Neumann static int ni_populate_sclk_value(struct radeon_device *rdev,
209557e252bfSMichael Neumann 				  u32 engine_clock,
209657e252bfSMichael Neumann 				  NISLANDS_SMC_SCLK_VALUE *sclk)
209757e252bfSMichael Neumann {
209857e252bfSMichael Neumann 	NISLANDS_SMC_SCLK_VALUE sclk_tmp;
209957e252bfSMichael Neumann 	int ret;
210057e252bfSMichael Neumann 
210157e252bfSMichael Neumann 	ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
210257e252bfSMichael Neumann 	if (!ret) {
210357e252bfSMichael Neumann 		sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
210457e252bfSMichael Neumann 		sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
210557e252bfSMichael Neumann 		sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
210657e252bfSMichael Neumann 		sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
210757e252bfSMichael Neumann 		sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
210857e252bfSMichael Neumann 		sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
210957e252bfSMichael Neumann 		sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
211057e252bfSMichael Neumann 	}
211157e252bfSMichael Neumann 
211257e252bfSMichael Neumann 	return ret;
211357e252bfSMichael Neumann }
211457e252bfSMichael Neumann 
211557e252bfSMichael Neumann static int ni_init_smc_spll_table(struct radeon_device *rdev)
211657e252bfSMichael Neumann {
211757e252bfSMichael Neumann         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
211857e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
211957e252bfSMichael Neumann 	SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
212057e252bfSMichael Neumann 	NISLANDS_SMC_SCLK_VALUE sclk_params;
212157e252bfSMichael Neumann 	u32 fb_div;
212257e252bfSMichael Neumann 	u32 p_div;
212357e252bfSMichael Neumann 	u32 clk_s;
212457e252bfSMichael Neumann 	u32 clk_v;
212557e252bfSMichael Neumann 	u32 sclk = 0;
212657e252bfSMichael Neumann 	int i, ret;
212757e252bfSMichael Neumann 	u32 tmp;
212857e252bfSMichael Neumann 
212957e252bfSMichael Neumann 	if (ni_pi->spll_table_start == 0)
213057e252bfSMichael Neumann 		return -EINVAL;
213157e252bfSMichael Neumann 
213257e252bfSMichael Neumann 	spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
213357e252bfSMichael Neumann 	if (spll_table == NULL)
213457e252bfSMichael Neumann 		return -ENOMEM;
213557e252bfSMichael Neumann 
213657e252bfSMichael Neumann 	for (i = 0; i < 256; i++) {
213757e252bfSMichael Neumann 		ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
213857e252bfSMichael Neumann 		if (ret)
213957e252bfSMichael Neumann 			break;
214057e252bfSMichael Neumann 
214157e252bfSMichael Neumann 		p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
214257e252bfSMichael Neumann 		fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
214357e252bfSMichael Neumann 		clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
214457e252bfSMichael Neumann 		clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
214557e252bfSMichael Neumann 
214657e252bfSMichael Neumann 		fb_div &= ~0x00001FFF;
214757e252bfSMichael Neumann 		fb_div >>= 1;
214857e252bfSMichael Neumann 		clk_v >>= 6;
214957e252bfSMichael Neumann 
215057e252bfSMichael Neumann 		if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
215157e252bfSMichael Neumann 			ret = -EINVAL;
215257e252bfSMichael Neumann 
215357e252bfSMichael Neumann 		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
215457e252bfSMichael Neumann 			ret = -EINVAL;
215557e252bfSMichael Neumann 
215657e252bfSMichael Neumann 		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
215757e252bfSMichael Neumann 			ret = -EINVAL;
215857e252bfSMichael Neumann 
215957e252bfSMichael Neumann 		if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
216057e252bfSMichael Neumann 			ret = -EINVAL;
216157e252bfSMichael Neumann 
216257e252bfSMichael Neumann 		if (ret)
216357e252bfSMichael Neumann 			break;
216457e252bfSMichael Neumann 
216557e252bfSMichael Neumann 		tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
216657e252bfSMichael Neumann 			((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
216757e252bfSMichael Neumann 		spll_table->freq[i] = cpu_to_be32(tmp);
216857e252bfSMichael Neumann 
216957e252bfSMichael Neumann 		tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
217057e252bfSMichael Neumann 			((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
217157e252bfSMichael Neumann 		spll_table->ss[i] = cpu_to_be32(tmp);
217257e252bfSMichael Neumann 
217357e252bfSMichael Neumann 		sclk += 512;
217457e252bfSMichael Neumann 	}
217557e252bfSMichael Neumann 
217657e252bfSMichael Neumann 	if (!ret)
217757e252bfSMichael Neumann 		ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
217857e252bfSMichael Neumann 					      sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
217957e252bfSMichael Neumann 
218057e252bfSMichael Neumann 	kfree(spll_table);
218157e252bfSMichael Neumann 
218257e252bfSMichael Neumann 	return ret;
218357e252bfSMichael Neumann }
218457e252bfSMichael Neumann 
218557e252bfSMichael Neumann static int ni_populate_mclk_value(struct radeon_device *rdev,
218657e252bfSMichael Neumann 				  u32 engine_clock,
218757e252bfSMichael Neumann 				  u32 memory_clock,
218857e252bfSMichael Neumann 				  NISLANDS_SMC_MCLK_VALUE *mclk,
218957e252bfSMichael Neumann 				  bool strobe_mode,
219057e252bfSMichael Neumann 				  bool dll_state_on)
219157e252bfSMichael Neumann {
219257e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
219357e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
219457e252bfSMichael Neumann 	u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
219557e252bfSMichael Neumann 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
219657e252bfSMichael Neumann 	u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
219757e252bfSMichael Neumann 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
219857e252bfSMichael Neumann 	u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
219957e252bfSMichael Neumann 	u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
220057e252bfSMichael Neumann 	u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
220157e252bfSMichael Neumann 	u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
220257e252bfSMichael Neumann 	struct atom_clock_dividers dividers;
220357e252bfSMichael Neumann 	u32 ibias;
220457e252bfSMichael Neumann 	u32 dll_speed;
220557e252bfSMichael Neumann 	int ret;
220657e252bfSMichael Neumann 	u32 mc_seq_misc7;
220757e252bfSMichael Neumann 
220857e252bfSMichael Neumann 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
220957e252bfSMichael Neumann 					     memory_clock, strobe_mode, &dividers);
221057e252bfSMichael Neumann 	if (ret)
221157e252bfSMichael Neumann 		return ret;
221257e252bfSMichael Neumann 
221357e252bfSMichael Neumann 	if (!strobe_mode) {
221457e252bfSMichael Neumann 		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
221557e252bfSMichael Neumann 
221657e252bfSMichael Neumann 		if (mc_seq_misc7 & 0x8000000)
221757e252bfSMichael Neumann 			dividers.post_div = 1;
221857e252bfSMichael Neumann 	}
221957e252bfSMichael Neumann 
222057e252bfSMichael Neumann 	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
222157e252bfSMichael Neumann 
222257e252bfSMichael Neumann 	mpll_ad_func_cntl &= ~(CLKR_MASK |
222357e252bfSMichael Neumann 			       YCLK_POST_DIV_MASK |
222457e252bfSMichael Neumann 			       CLKF_MASK |
222557e252bfSMichael Neumann 			       CLKFRAC_MASK |
222657e252bfSMichael Neumann 			       IBIAS_MASK);
222757e252bfSMichael Neumann 	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
222857e252bfSMichael Neumann 	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
222957e252bfSMichael Neumann 	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
223057e252bfSMichael Neumann 	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
223157e252bfSMichael Neumann 	mpll_ad_func_cntl |= IBIAS(ibias);
223257e252bfSMichael Neumann 
223357e252bfSMichael Neumann 	if (dividers.vco_mode)
223457e252bfSMichael Neumann 		mpll_ad_func_cntl_2 |= VCO_MODE;
223557e252bfSMichael Neumann 	else
223657e252bfSMichael Neumann 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
223757e252bfSMichael Neumann 
223857e252bfSMichael Neumann 	if (pi->mem_gddr5) {
223957e252bfSMichael Neumann 		mpll_dq_func_cntl &= ~(CLKR_MASK |
224057e252bfSMichael Neumann 				       YCLK_POST_DIV_MASK |
224157e252bfSMichael Neumann 				       CLKF_MASK |
224257e252bfSMichael Neumann 				       CLKFRAC_MASK |
224357e252bfSMichael Neumann 				       IBIAS_MASK);
224457e252bfSMichael Neumann 		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
224557e252bfSMichael Neumann 		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
224657e252bfSMichael Neumann 		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
224757e252bfSMichael Neumann 		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
224857e252bfSMichael Neumann 		mpll_dq_func_cntl |= IBIAS(ibias);
224957e252bfSMichael Neumann 
225057e252bfSMichael Neumann 		if (strobe_mode)
225157e252bfSMichael Neumann 			mpll_dq_func_cntl &= ~PDNB;
225257e252bfSMichael Neumann 		else
225357e252bfSMichael Neumann 			mpll_dq_func_cntl |= PDNB;
225457e252bfSMichael Neumann 
225557e252bfSMichael Neumann 		if (dividers.vco_mode)
225657e252bfSMichael Neumann 			mpll_dq_func_cntl_2 |= VCO_MODE;
225757e252bfSMichael Neumann 		else
225857e252bfSMichael Neumann 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
225957e252bfSMichael Neumann 	}
226057e252bfSMichael Neumann 
226157e252bfSMichael Neumann 	if (pi->mclk_ss) {
226257e252bfSMichael Neumann 		struct radeon_atom_ss ss;
226357e252bfSMichael Neumann 		u32 vco_freq = memory_clock * dividers.post_div;
226457e252bfSMichael Neumann 
226557e252bfSMichael Neumann 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
226657e252bfSMichael Neumann 						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
226757e252bfSMichael Neumann 			u32 reference_clock = rdev->clock.mpll.reference_freq;
226857e252bfSMichael Neumann 			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
226957e252bfSMichael Neumann 			u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
227057e252bfSMichael Neumann 			u32 clk_v = ss.percentage *
227157e252bfSMichael Neumann 				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
227257e252bfSMichael Neumann 
227357e252bfSMichael Neumann 			mpll_ss1 &= ~CLKV_MASK;
227457e252bfSMichael Neumann 			mpll_ss1 |= CLKV(clk_v);
227557e252bfSMichael Neumann 
227657e252bfSMichael Neumann 			mpll_ss2 &= ~CLKS_MASK;
227757e252bfSMichael Neumann 			mpll_ss2 |= CLKS(clk_s);
227857e252bfSMichael Neumann 		}
227957e252bfSMichael Neumann 	}
228057e252bfSMichael Neumann 
228157e252bfSMichael Neumann 	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
228257e252bfSMichael Neumann 					memory_clock);
228357e252bfSMichael Neumann 
228457e252bfSMichael Neumann 	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
228557e252bfSMichael Neumann 	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
228657e252bfSMichael Neumann 	if (dll_state_on)
228757e252bfSMichael Neumann 		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
228857e252bfSMichael Neumann 				     MRDCKA1_PDNB |
228957e252bfSMichael Neumann 				     MRDCKB0_PDNB |
229057e252bfSMichael Neumann 				     MRDCKB1_PDNB |
229157e252bfSMichael Neumann 				     MRDCKC0_PDNB |
229257e252bfSMichael Neumann 				     MRDCKC1_PDNB |
229357e252bfSMichael Neumann 				     MRDCKD0_PDNB |
229457e252bfSMichael Neumann 				     MRDCKD1_PDNB);
229557e252bfSMichael Neumann 	else
229657e252bfSMichael Neumann 		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
229757e252bfSMichael Neumann 				      MRDCKA1_PDNB |
229857e252bfSMichael Neumann 				      MRDCKB0_PDNB |
229957e252bfSMichael Neumann 				      MRDCKB1_PDNB |
230057e252bfSMichael Neumann 				      MRDCKC0_PDNB |
230157e252bfSMichael Neumann 				      MRDCKC1_PDNB |
230257e252bfSMichael Neumann 				      MRDCKD0_PDNB |
230357e252bfSMichael Neumann 				      MRDCKD1_PDNB);
230457e252bfSMichael Neumann 
230557e252bfSMichael Neumann 
230657e252bfSMichael Neumann 	mclk->mclk_value = cpu_to_be32(memory_clock);
230757e252bfSMichael Neumann 	mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
230857e252bfSMichael Neumann 	mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
230957e252bfSMichael Neumann 	mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
231057e252bfSMichael Neumann 	mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
231157e252bfSMichael Neumann 	mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
231257e252bfSMichael Neumann 	mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
231357e252bfSMichael Neumann 	mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
231457e252bfSMichael Neumann 	mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
231557e252bfSMichael Neumann 
231657e252bfSMichael Neumann 	return 0;
231757e252bfSMichael Neumann }
231857e252bfSMichael Neumann 
231957e252bfSMichael Neumann static void ni_populate_smc_sp(struct radeon_device *rdev,
232057e252bfSMichael Neumann 			       struct radeon_ps *radeon_state,
232157e252bfSMichael Neumann 			       NISLANDS_SMC_SWSTATE *smc_state)
232257e252bfSMichael Neumann {
232357e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(radeon_state);
232457e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
232557e252bfSMichael Neumann 	int i;
232657e252bfSMichael Neumann 
232757e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count - 1; i++)
232857e252bfSMichael Neumann 		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
232957e252bfSMichael Neumann 
233057e252bfSMichael Neumann 	smc_state->levels[ps->performance_level_count - 1].bSP =
233157e252bfSMichael Neumann 		cpu_to_be32(pi->psp);
233257e252bfSMichael Neumann }
233357e252bfSMichael Neumann 
233457e252bfSMichael Neumann static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
233557e252bfSMichael Neumann 					 struct rv7xx_pl *pl,
233657e252bfSMichael Neumann 					 NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
233757e252bfSMichael Neumann {
233857e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
233957e252bfSMichael Neumann         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
234057e252bfSMichael Neumann         struct ni_power_info *ni_pi = ni_get_pi(rdev);
234157e252bfSMichael Neumann 	int ret;
234257e252bfSMichael Neumann 	bool dll_state_on;
234357e252bfSMichael Neumann 	u16 std_vddc;
234457e252bfSMichael Neumann 	u32 tmp = RREG32(DC_STUTTER_CNTL);
234557e252bfSMichael Neumann 
234657e252bfSMichael Neumann 	level->gen2PCIE = pi->pcie_gen2 ?
234757e252bfSMichael Neumann 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
234857e252bfSMichael Neumann 
234957e252bfSMichael Neumann 	ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
235057e252bfSMichael Neumann 	if (ret)
235157e252bfSMichael Neumann 		return ret;
235257e252bfSMichael Neumann 
235357e252bfSMichael Neumann 	level->mcFlags =  0;
235457e252bfSMichael Neumann 	if (pi->mclk_stutter_mode_threshold &&
235557e252bfSMichael Neumann 	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
235657e252bfSMichael Neumann 	    !eg_pi->uvd_enabled &&
235757e252bfSMichael Neumann 	    (tmp & DC_STUTTER_ENABLE_A) &&
235857e252bfSMichael Neumann 	    (tmp & DC_STUTTER_ENABLE_B))
235957e252bfSMichael Neumann 		level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
236057e252bfSMichael Neumann 
236157e252bfSMichael Neumann 	if (pi->mem_gddr5) {
236257e252bfSMichael Neumann 		if (pl->mclk > pi->mclk_edc_enable_threshold)
236357e252bfSMichael Neumann 			level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
236457e252bfSMichael Neumann 		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
236557e252bfSMichael Neumann 			level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
236657e252bfSMichael Neumann 
236757e252bfSMichael Neumann 		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
236857e252bfSMichael Neumann 
236957e252bfSMichael Neumann 		if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
237057e252bfSMichael Neumann 			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
237157e252bfSMichael Neumann 			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
237257e252bfSMichael Neumann 				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
237357e252bfSMichael Neumann 			else
237457e252bfSMichael Neumann 				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
237557e252bfSMichael Neumann 		} else {
237657e252bfSMichael Neumann 			dll_state_on = false;
237757e252bfSMichael Neumann 			if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
237857e252bfSMichael Neumann 				level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
237957e252bfSMichael Neumann 		}
238057e252bfSMichael Neumann 
238157e252bfSMichael Neumann 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
238257e252bfSMichael Neumann 					     &level->mclk,
238357e252bfSMichael Neumann 					     (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
238457e252bfSMichael Neumann 					     dll_state_on);
238557e252bfSMichael Neumann 	} else
238657e252bfSMichael Neumann 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
238757e252bfSMichael Neumann 
238857e252bfSMichael Neumann 	if (ret)
238957e252bfSMichael Neumann 		return ret;
239057e252bfSMichael Neumann 
239157e252bfSMichael Neumann 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
239257e252bfSMichael Neumann 					pl->vddc, &level->vddc);
239357e252bfSMichael Neumann 	if (ret)
239457e252bfSMichael Neumann 		return ret;
239557e252bfSMichael Neumann 
239657e252bfSMichael Neumann 	ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
239757e252bfSMichael Neumann 	if (ret)
239857e252bfSMichael Neumann 		return ret;
239957e252bfSMichael Neumann 
240057e252bfSMichael Neumann 	ni_populate_std_voltage_value(rdev, std_vddc,
240157e252bfSMichael Neumann 				      level->vddc.index, &level->std_vddc);
240257e252bfSMichael Neumann 
240357e252bfSMichael Neumann 	if (eg_pi->vddci_control) {
240457e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
240557e252bfSMichael Neumann 						pl->vddci, &level->vddci);
240657e252bfSMichael Neumann 		if (ret)
240757e252bfSMichael Neumann 			return ret;
240857e252bfSMichael Neumann 	}
240957e252bfSMichael Neumann 
241057e252bfSMichael Neumann 	ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
241157e252bfSMichael Neumann 
241257e252bfSMichael Neumann 	return ret;
241357e252bfSMichael Neumann }
241457e252bfSMichael Neumann 
241557e252bfSMichael Neumann static int ni_populate_smc_t(struct radeon_device *rdev,
241657e252bfSMichael Neumann 			     struct radeon_ps *radeon_state,
241757e252bfSMichael Neumann 			     NISLANDS_SMC_SWSTATE *smc_state)
241857e252bfSMichael Neumann {
241957e252bfSMichael Neumann         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
242057e252bfSMichael Neumann         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
242157e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
242257e252bfSMichael Neumann 	u32 a_t;
242357e252bfSMichael Neumann 	u32 t_l, t_h;
242457e252bfSMichael Neumann 	u32 high_bsp;
242557e252bfSMichael Neumann 	int i, ret;
242657e252bfSMichael Neumann 
242757e252bfSMichael Neumann 	if (state->performance_level_count >= 9)
242857e252bfSMichael Neumann 		return -EINVAL;
242957e252bfSMichael Neumann 
243057e252bfSMichael Neumann 	if (state->performance_level_count < 2) {
243157e252bfSMichael Neumann 		a_t = CG_R(0xffff) | CG_L(0);
243257e252bfSMichael Neumann 		smc_state->levels[0].aT = cpu_to_be32(a_t);
243357e252bfSMichael Neumann 		return 0;
243457e252bfSMichael Neumann 	}
243557e252bfSMichael Neumann 
243657e252bfSMichael Neumann 	smc_state->levels[0].aT = cpu_to_be32(0);
243757e252bfSMichael Neumann 
243857e252bfSMichael Neumann 	for (i = 0; i <= state->performance_level_count - 2; i++) {
243957e252bfSMichael Neumann 		if (eg_pi->uvd_enabled)
244057e252bfSMichael Neumann 			ret = r600_calculate_at(
244157e252bfSMichael Neumann 				1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
244257e252bfSMichael Neumann 				100 * R600_AH_DFLT,
244357e252bfSMichael Neumann 				state->performance_levels[i + 1].sclk,
244457e252bfSMichael Neumann 				state->performance_levels[i].sclk,
244557e252bfSMichael Neumann 				&t_l,
244657e252bfSMichael Neumann 				&t_h);
244757e252bfSMichael Neumann 		else
244857e252bfSMichael Neumann 			ret = r600_calculate_at(
244957e252bfSMichael Neumann 				1000 * (i + 1),
245057e252bfSMichael Neumann 				100 * R600_AH_DFLT,
245157e252bfSMichael Neumann 				state->performance_levels[i + 1].sclk,
245257e252bfSMichael Neumann 				state->performance_levels[i].sclk,
245357e252bfSMichael Neumann 				&t_l,
245457e252bfSMichael Neumann 				&t_h);
245557e252bfSMichael Neumann 
245657e252bfSMichael Neumann 		if (ret) {
245757e252bfSMichael Neumann 			t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
245857e252bfSMichael Neumann 			t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
245957e252bfSMichael Neumann 		}
246057e252bfSMichael Neumann 
246157e252bfSMichael Neumann 		a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
246257e252bfSMichael Neumann 		a_t |= CG_R(t_l * pi->bsp / 20000);
246357e252bfSMichael Neumann 		smc_state->levels[i].aT = cpu_to_be32(a_t);
246457e252bfSMichael Neumann 
246557e252bfSMichael Neumann 		high_bsp = (i == state->performance_level_count - 2) ?
246657e252bfSMichael Neumann 			pi->pbsp : pi->bsp;
246757e252bfSMichael Neumann 
246857e252bfSMichael Neumann 		a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
246957e252bfSMichael Neumann 		smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
247057e252bfSMichael Neumann 	}
247157e252bfSMichael Neumann 
247257e252bfSMichael Neumann 	return 0;
247357e252bfSMichael Neumann }
247457e252bfSMichael Neumann 
247557e252bfSMichael Neumann static int ni_populate_power_containment_values(struct radeon_device *rdev,
247657e252bfSMichael Neumann 						struct radeon_ps *radeon_state,
247757e252bfSMichael Neumann 						NISLANDS_SMC_SWSTATE *smc_state)
247857e252bfSMichael Neumann {
247957e252bfSMichael Neumann         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
248057e252bfSMichael Neumann         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
248157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
248257e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
248357e252bfSMichael Neumann 	u32 prev_sclk;
248457e252bfSMichael Neumann 	u32 max_sclk;
248557e252bfSMichael Neumann 	u32 min_sclk;
248657e252bfSMichael Neumann 	int i, ret;
248757e252bfSMichael Neumann 	u32 tdp_limit;
248857e252bfSMichael Neumann 	u32 near_tdp_limit;
248957e252bfSMichael Neumann 	u32 power_boost_limit;
249057e252bfSMichael Neumann 	u8 max_ps_percent;
249157e252bfSMichael Neumann 
249257e252bfSMichael Neumann 	if (ni_pi->enable_power_containment == false)
249357e252bfSMichael Neumann 		return 0;
249457e252bfSMichael Neumann 
249557e252bfSMichael Neumann 	if (state->performance_level_count == 0)
249657e252bfSMichael Neumann 		return -EINVAL;
249757e252bfSMichael Neumann 
249857e252bfSMichael Neumann 	if (smc_state->levelCount != state->performance_level_count)
249957e252bfSMichael Neumann 		return -EINVAL;
250057e252bfSMichael Neumann 
250157e252bfSMichael Neumann 	ret = ni_calculate_adjusted_tdp_limits(rdev,
250257e252bfSMichael Neumann 					       false, /* ??? */
250357e252bfSMichael Neumann 					       rdev->pm.dpm.tdp_adjustment,
250457e252bfSMichael Neumann 					       &tdp_limit,
250557e252bfSMichael Neumann 					       &near_tdp_limit);
250657e252bfSMichael Neumann 	if (ret)
250757e252bfSMichael Neumann 		return ret;
250857e252bfSMichael Neumann 
250957e252bfSMichael Neumann 	power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
251057e252bfSMichael Neumann 
251157e252bfSMichael Neumann 	ret = rv770_write_smc_sram_dword(rdev,
251257e252bfSMichael Neumann 					 pi->state_table_start +
251357e252bfSMichael Neumann 					 offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
251457e252bfSMichael Neumann 					 offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
251557e252bfSMichael Neumann 					 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
251657e252bfSMichael Neumann 					 pi->sram_end);
251757e252bfSMichael Neumann 	if (ret)
251857e252bfSMichael Neumann 		power_boost_limit = 0;
251957e252bfSMichael Neumann 
252057e252bfSMichael Neumann 	smc_state->levels[0].dpm2.MaxPS = 0;
252157e252bfSMichael Neumann 	smc_state->levels[0].dpm2.NearTDPDec = 0;
252257e252bfSMichael Neumann 	smc_state->levels[0].dpm2.AboveSafeInc = 0;
252357e252bfSMichael Neumann 	smc_state->levels[0].dpm2.BelowSafeInc = 0;
252457e252bfSMichael Neumann 	smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
252557e252bfSMichael Neumann 
252657e252bfSMichael Neumann 	for (i = 1; i < state->performance_level_count; i++) {
252757e252bfSMichael Neumann 		prev_sclk = state->performance_levels[i-1].sclk;
252857e252bfSMichael Neumann 		max_sclk  = state->performance_levels[i].sclk;
252957e252bfSMichael Neumann 		max_ps_percent = (i != (state->performance_level_count - 1)) ?
253057e252bfSMichael Neumann 			NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
253157e252bfSMichael Neumann 
253257e252bfSMichael Neumann 		if (max_sclk < prev_sclk)
253357e252bfSMichael Neumann 			return -EINVAL;
253457e252bfSMichael Neumann 
253557e252bfSMichael Neumann 		if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
253657e252bfSMichael Neumann 			min_sclk = max_sclk;
253757e252bfSMichael Neumann 		else if (1 == i)
253857e252bfSMichael Neumann 			min_sclk = prev_sclk;
253957e252bfSMichael Neumann 		else
254057e252bfSMichael Neumann 			min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
254157e252bfSMichael Neumann 
254257e252bfSMichael Neumann 		if (min_sclk < state->performance_levels[0].sclk)
254357e252bfSMichael Neumann 			min_sclk = state->performance_levels[0].sclk;
254457e252bfSMichael Neumann 
254557e252bfSMichael Neumann 		if (min_sclk == 0)
254657e252bfSMichael Neumann 			return -EINVAL;
254757e252bfSMichael Neumann 
254857e252bfSMichael Neumann 		smc_state->levels[i].dpm2.MaxPS =
254957e252bfSMichael Neumann 			(u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
255057e252bfSMichael Neumann 		smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
255157e252bfSMichael Neumann 		smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
255257e252bfSMichael Neumann 		smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
255357e252bfSMichael Neumann 		smc_state->levels[i].stateFlags |=
255457e252bfSMichael Neumann 			((i != (state->performance_level_count - 1)) && power_boost_limit) ?
255557e252bfSMichael Neumann 			PPSMC_STATEFLAG_POWERBOOST : 0;
255657e252bfSMichael Neumann 	}
255757e252bfSMichael Neumann 
255857e252bfSMichael Neumann 	return 0;
255957e252bfSMichael Neumann }
256057e252bfSMichael Neumann 
256157e252bfSMichael Neumann static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
256257e252bfSMichael Neumann 					 struct radeon_ps *radeon_state,
256357e252bfSMichael Neumann 					 NISLANDS_SMC_SWSTATE *smc_state)
256457e252bfSMichael Neumann {
256557e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
256657e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
256757e252bfSMichael Neumann 	u32 sq_power_throttle;
256857e252bfSMichael Neumann 	u32 sq_power_throttle2;
256957e252bfSMichael Neumann 	bool enable_sq_ramping = ni_pi->enable_sq_ramping;
257057e252bfSMichael Neumann 	int i;
257157e252bfSMichael Neumann 
257257e252bfSMichael Neumann 	if (state->performance_level_count == 0)
257357e252bfSMichael Neumann 		return -EINVAL;
257457e252bfSMichael Neumann 
257557e252bfSMichael Neumann 	if (smc_state->levelCount != state->performance_level_count)
257657e252bfSMichael Neumann 		return -EINVAL;
257757e252bfSMichael Neumann 
257857e252bfSMichael Neumann 	if (rdev->pm.dpm.sq_ramping_threshold == 0)
257957e252bfSMichael Neumann 		return -EINVAL;
258057e252bfSMichael Neumann 
258157e252bfSMichael Neumann 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
258257e252bfSMichael Neumann 		enable_sq_ramping = false;
258357e252bfSMichael Neumann 
258457e252bfSMichael Neumann 	if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
258557e252bfSMichael Neumann 		enable_sq_ramping = false;
258657e252bfSMichael Neumann 
258757e252bfSMichael Neumann 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
258857e252bfSMichael Neumann 		enable_sq_ramping = false;
258957e252bfSMichael Neumann 
259057e252bfSMichael Neumann 	if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
259157e252bfSMichael Neumann 		enable_sq_ramping = false;
259257e252bfSMichael Neumann 
2593*c6f73aabSFrançois Tigeot 	if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO > (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
259457e252bfSMichael Neumann 		enable_sq_ramping = false;
259557e252bfSMichael Neumann 
259657e252bfSMichael Neumann 	for (i = 0; i < state->performance_level_count; i++) {
259757e252bfSMichael Neumann 		sq_power_throttle  = 0;
259857e252bfSMichael Neumann 		sq_power_throttle2 = 0;
259957e252bfSMichael Neumann 
260057e252bfSMichael Neumann 		if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
260157e252bfSMichael Neumann 		    enable_sq_ramping) {
260257e252bfSMichael Neumann 			sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
260357e252bfSMichael Neumann 			sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
260457e252bfSMichael Neumann 			sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
260557e252bfSMichael Neumann 			sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
260657e252bfSMichael Neumann 			sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
260757e252bfSMichael Neumann 		} else {
260857e252bfSMichael Neumann 			sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
260957e252bfSMichael Neumann 			sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
261057e252bfSMichael Neumann 		}
261157e252bfSMichael Neumann 
261257e252bfSMichael Neumann 		smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
261357e252bfSMichael Neumann 		smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
261457e252bfSMichael Neumann 	}
261557e252bfSMichael Neumann 
261657e252bfSMichael Neumann 	return 0;
261757e252bfSMichael Neumann }
261857e252bfSMichael Neumann 
261957e252bfSMichael Neumann static int ni_enable_power_containment(struct radeon_device *rdev,
262057e252bfSMichael Neumann 				       struct radeon_ps *radeon_new_state,
262157e252bfSMichael Neumann 				       bool enable)
262257e252bfSMichael Neumann {
262357e252bfSMichael Neumann         struct ni_power_info *ni_pi = ni_get_pi(rdev);
262457e252bfSMichael Neumann 	PPSMC_Result smc_result;
262557e252bfSMichael Neumann 	int ret = 0;
262657e252bfSMichael Neumann 
262757e252bfSMichael Neumann 	if (ni_pi->enable_power_containment) {
262857e252bfSMichael Neumann 		if (enable) {
262957e252bfSMichael Neumann 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
263057e252bfSMichael Neumann 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
263157e252bfSMichael Neumann 				if (smc_result != PPSMC_Result_OK) {
263257e252bfSMichael Neumann 					ret = -EINVAL;
263357e252bfSMichael Neumann 					ni_pi->pc_enabled = false;
263457e252bfSMichael Neumann 				} else {
263557e252bfSMichael Neumann 					ni_pi->pc_enabled = true;
263657e252bfSMichael Neumann 				}
263757e252bfSMichael Neumann 			}
263857e252bfSMichael Neumann 		} else {
263957e252bfSMichael Neumann 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
264057e252bfSMichael Neumann 			if (smc_result != PPSMC_Result_OK)
264157e252bfSMichael Neumann 				ret = -EINVAL;
264257e252bfSMichael Neumann 			ni_pi->pc_enabled = false;
264357e252bfSMichael Neumann 		}
264457e252bfSMichael Neumann 	}
264557e252bfSMichael Neumann 
264657e252bfSMichael Neumann 	return ret;
264757e252bfSMichael Neumann }
264857e252bfSMichael Neumann 
264957e252bfSMichael Neumann static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
265057e252bfSMichael Neumann 					 struct radeon_ps *radeon_state,
265157e252bfSMichael Neumann 					 NISLANDS_SMC_SWSTATE *smc_state)
265257e252bfSMichael Neumann {
265357e252bfSMichael Neumann         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
265457e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
265557e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
265657e252bfSMichael Neumann 	int i, ret;
265757e252bfSMichael Neumann 	u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
265857e252bfSMichael Neumann 
265957e252bfSMichael Neumann 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
266057e252bfSMichael Neumann 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
266157e252bfSMichael Neumann 
266257e252bfSMichael Neumann 	smc_state->levelCount = 0;
266357e252bfSMichael Neumann 
266457e252bfSMichael Neumann 	if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
266557e252bfSMichael Neumann 		return -EINVAL;
266657e252bfSMichael Neumann 
266757e252bfSMichael Neumann 	for (i = 0; i < state->performance_level_count; i++) {
266857e252bfSMichael Neumann 		ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
266957e252bfSMichael Neumann 						    &smc_state->levels[i]);
267057e252bfSMichael Neumann 		smc_state->levels[i].arbRefreshState =
267157e252bfSMichael Neumann 			(u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
267257e252bfSMichael Neumann 
267357e252bfSMichael Neumann 		if (ret)
267457e252bfSMichael Neumann 			return ret;
267557e252bfSMichael Neumann 
267657e252bfSMichael Neumann 		if (ni_pi->enable_power_containment)
267757e252bfSMichael Neumann 			smc_state->levels[i].displayWatermark =
267857e252bfSMichael Neumann 				(state->performance_levels[i].sclk < threshold) ?
267957e252bfSMichael Neumann 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
268057e252bfSMichael Neumann 		else
268157e252bfSMichael Neumann 			smc_state->levels[i].displayWatermark = (i < 2) ?
268257e252bfSMichael Neumann 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
268357e252bfSMichael Neumann 
268457e252bfSMichael Neumann 		if (eg_pi->dynamic_ac_timing)
268557e252bfSMichael Neumann 			smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
268657e252bfSMichael Neumann 		else
268757e252bfSMichael Neumann 			smc_state->levels[i].ACIndex = 0;
268857e252bfSMichael Neumann 
268957e252bfSMichael Neumann 		smc_state->levelCount++;
269057e252bfSMichael Neumann 	}
269157e252bfSMichael Neumann 
269257e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
269357e252bfSMichael Neumann 				      cpu_to_be32(threshold / 512));
269457e252bfSMichael Neumann 
269557e252bfSMichael Neumann 	ni_populate_smc_sp(rdev, radeon_state, smc_state);
269657e252bfSMichael Neumann 
269757e252bfSMichael Neumann 	ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
269857e252bfSMichael Neumann 	if (ret)
269957e252bfSMichael Neumann 		ni_pi->enable_power_containment = false;
270057e252bfSMichael Neumann 
270157e252bfSMichael Neumann 	ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
270257e252bfSMichael Neumann 	if (ret)
270357e252bfSMichael Neumann 		ni_pi->enable_sq_ramping = false;
270457e252bfSMichael Neumann 
270557e252bfSMichael Neumann 	return ni_populate_smc_t(rdev, radeon_state, smc_state);
270657e252bfSMichael Neumann }
270757e252bfSMichael Neumann 
270857e252bfSMichael Neumann static int ni_upload_sw_state(struct radeon_device *rdev,
270957e252bfSMichael Neumann 			      struct radeon_ps *radeon_new_state)
271057e252bfSMichael Neumann {
271157e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
271257e252bfSMichael Neumann 	u16 address = pi->state_table_start +
271357e252bfSMichael Neumann 		offsetof(NISLANDS_SMC_STATETABLE, driverState);
271457e252bfSMichael Neumann 	u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
271557e252bfSMichael Neumann 		((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
271657e252bfSMichael Neumann 	int ret;
271757e252bfSMichael Neumann 	NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
271857e252bfSMichael Neumann 
271957e252bfSMichael Neumann 	if (smc_state == NULL)
272057e252bfSMichael Neumann 		return -ENOMEM;
272157e252bfSMichael Neumann 
272257e252bfSMichael Neumann 	ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
272357e252bfSMichael Neumann 	if (ret)
272457e252bfSMichael Neumann 		goto done;
272557e252bfSMichael Neumann 
272657e252bfSMichael Neumann 	ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
272757e252bfSMichael Neumann 
272857e252bfSMichael Neumann done:
272957e252bfSMichael Neumann 	kfree(smc_state);
273057e252bfSMichael Neumann 
273157e252bfSMichael Neumann 	return ret;
273257e252bfSMichael Neumann }
273357e252bfSMichael Neumann 
273457e252bfSMichael Neumann static int ni_set_mc_special_registers(struct radeon_device *rdev,
273557e252bfSMichael Neumann 				       struct ni_mc_reg_table *table)
273657e252bfSMichael Neumann {
273757e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
273857e252bfSMichael Neumann 	u8 i, j, k;
273957e252bfSMichael Neumann 	u32 temp_reg;
274057e252bfSMichael Neumann 
274157e252bfSMichael Neumann 	for (i = 0, j = table->last; i < table->last; i++) {
274257e252bfSMichael Neumann 		switch (table->mc_reg_address[i].s1) {
274357e252bfSMichael Neumann 		case MC_SEQ_MISC1 >> 2:
274457e252bfSMichael Neumann 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
274557e252bfSMichael Neumann 				return -EINVAL;
274657e252bfSMichael Neumann 			temp_reg = RREG32(MC_PMG_CMD_EMRS);
274757e252bfSMichael Neumann 			table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
274857e252bfSMichael Neumann 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
274957e252bfSMichael Neumann 			for (k = 0; k < table->num_entries; k++)
275057e252bfSMichael Neumann 				table->mc_reg_table_entry[k].mc_data[j] =
275157e252bfSMichael Neumann 					((temp_reg & 0xffff0000)) |
275257e252bfSMichael Neumann 					((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
275357e252bfSMichael Neumann 			j++;
275457e252bfSMichael Neumann 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
275557e252bfSMichael Neumann 				return -EINVAL;
275657e252bfSMichael Neumann 
275757e252bfSMichael Neumann 			temp_reg = RREG32(MC_PMG_CMD_MRS);
275857e252bfSMichael Neumann 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
275957e252bfSMichael Neumann 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
276057e252bfSMichael Neumann 			for(k = 0; k < table->num_entries; k++) {
276157e252bfSMichael Neumann 				table->mc_reg_table_entry[k].mc_data[j] =
276257e252bfSMichael Neumann 					(temp_reg & 0xffff0000) |
276357e252bfSMichael Neumann 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
276457e252bfSMichael Neumann 				if (!pi->mem_gddr5)
276557e252bfSMichael Neumann 					table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
276657e252bfSMichael Neumann 			}
276757e252bfSMichael Neumann 			j++;
276857e252bfSMichael Neumann 			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
276957e252bfSMichael Neumann 				return -EINVAL;
277057e252bfSMichael Neumann 			break;
277157e252bfSMichael Neumann 		case MC_SEQ_RESERVE_M >> 2:
277257e252bfSMichael Neumann 			temp_reg = RREG32(MC_PMG_CMD_MRS1);
277357e252bfSMichael Neumann 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
277457e252bfSMichael Neumann 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
277557e252bfSMichael Neumann 			for (k = 0; k < table->num_entries; k++)
277657e252bfSMichael Neumann 				table->mc_reg_table_entry[k].mc_data[j] =
277757e252bfSMichael Neumann 					(temp_reg & 0xffff0000) |
277857e252bfSMichael Neumann 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
277957e252bfSMichael Neumann 			j++;
278057e252bfSMichael Neumann 			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
278157e252bfSMichael Neumann 				return -EINVAL;
278257e252bfSMichael Neumann 			break;
278357e252bfSMichael Neumann 		default:
278457e252bfSMichael Neumann 			break;
278557e252bfSMichael Neumann 		}
278657e252bfSMichael Neumann 	}
278757e252bfSMichael Neumann 
278857e252bfSMichael Neumann 	table->last = j;
278957e252bfSMichael Neumann 
279057e252bfSMichael Neumann 	return 0;
279157e252bfSMichael Neumann }
279257e252bfSMichael Neumann 
279357e252bfSMichael Neumann static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
279457e252bfSMichael Neumann {
279557e252bfSMichael Neumann 	bool result = true;
279657e252bfSMichael Neumann 
279757e252bfSMichael Neumann 	switch (in_reg) {
279857e252bfSMichael Neumann         case  MC_SEQ_RAS_TIMING >> 2:
279957e252bfSMichael Neumann 		*out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
280057e252bfSMichael Neumann 		break;
280157e252bfSMichael Neumann         case MC_SEQ_CAS_TIMING >> 2:
280257e252bfSMichael Neumann 		*out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
280357e252bfSMichael Neumann 		break;
280457e252bfSMichael Neumann         case MC_SEQ_MISC_TIMING >> 2:
280557e252bfSMichael Neumann 		*out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
280657e252bfSMichael Neumann 		break;
280757e252bfSMichael Neumann         case MC_SEQ_MISC_TIMING2 >> 2:
280857e252bfSMichael Neumann 		*out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
280957e252bfSMichael Neumann 		break;
281057e252bfSMichael Neumann         case MC_SEQ_RD_CTL_D0 >> 2:
281157e252bfSMichael Neumann 		*out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
281257e252bfSMichael Neumann 		break;
281357e252bfSMichael Neumann         case MC_SEQ_RD_CTL_D1 >> 2:
281457e252bfSMichael Neumann 		*out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
281557e252bfSMichael Neumann 		break;
281657e252bfSMichael Neumann         case MC_SEQ_WR_CTL_D0 >> 2:
281757e252bfSMichael Neumann 		*out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
281857e252bfSMichael Neumann 		break;
281957e252bfSMichael Neumann         case MC_SEQ_WR_CTL_D1 >> 2:
282057e252bfSMichael Neumann 		*out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
282157e252bfSMichael Neumann 		break;
282257e252bfSMichael Neumann         case MC_PMG_CMD_EMRS >> 2:
282357e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
282457e252bfSMichael Neumann 		break;
282557e252bfSMichael Neumann         case MC_PMG_CMD_MRS >> 2:
282657e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
282757e252bfSMichael Neumann 		break;
282857e252bfSMichael Neumann         case MC_PMG_CMD_MRS1 >> 2:
282957e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
283057e252bfSMichael Neumann 		break;
283157e252bfSMichael Neumann         case MC_SEQ_PMG_TIMING >> 2:
283257e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
283357e252bfSMichael Neumann 		break;
283457e252bfSMichael Neumann         case MC_PMG_CMD_MRS2 >> 2:
283557e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
283657e252bfSMichael Neumann 		break;
283757e252bfSMichael Neumann         default:
283857e252bfSMichael Neumann 		result = false;
283957e252bfSMichael Neumann 		break;
284057e252bfSMichael Neumann 	}
284157e252bfSMichael Neumann 
284257e252bfSMichael Neumann 	return result;
284357e252bfSMichael Neumann }
284457e252bfSMichael Neumann 
284557e252bfSMichael Neumann static void ni_set_valid_flag(struct ni_mc_reg_table *table)
284657e252bfSMichael Neumann {
284757e252bfSMichael Neumann 	u8 i, j;
284857e252bfSMichael Neumann 
284957e252bfSMichael Neumann 	for (i = 0; i < table->last; i++) {
285057e252bfSMichael Neumann 		for (j = 1; j < table->num_entries; j++) {
285157e252bfSMichael Neumann 			if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
285257e252bfSMichael Neumann 				table->valid_flag |= 1 << i;
285357e252bfSMichael Neumann 				break;
285457e252bfSMichael Neumann 			}
285557e252bfSMichael Neumann 		}
285657e252bfSMichael Neumann 	}
285757e252bfSMichael Neumann }
285857e252bfSMichael Neumann 
285957e252bfSMichael Neumann static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
286057e252bfSMichael Neumann {
286157e252bfSMichael Neumann 	u32 i;
286257e252bfSMichael Neumann 	u16 address;
286357e252bfSMichael Neumann 
286457e252bfSMichael Neumann 	for (i = 0; i < table->last; i++)
286557e252bfSMichael Neumann 		table->mc_reg_address[i].s0 =
286657e252bfSMichael Neumann 			ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
286757e252bfSMichael Neumann 			address : table->mc_reg_address[i].s1;
286857e252bfSMichael Neumann }
286957e252bfSMichael Neumann 
287057e252bfSMichael Neumann static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
287157e252bfSMichael Neumann 				      struct ni_mc_reg_table *ni_table)
287257e252bfSMichael Neumann {
287357e252bfSMichael Neumann 	u8 i, j;
287457e252bfSMichael Neumann 
287557e252bfSMichael Neumann 	if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
287657e252bfSMichael Neumann 		return -EINVAL;
287757e252bfSMichael Neumann 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
287857e252bfSMichael Neumann 		return -EINVAL;
287957e252bfSMichael Neumann 
288057e252bfSMichael Neumann 	for (i = 0; i < table->last; i++)
288157e252bfSMichael Neumann 		ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
288257e252bfSMichael Neumann 	ni_table->last = table->last;
288357e252bfSMichael Neumann 
288457e252bfSMichael Neumann 	for (i = 0; i < table->num_entries; i++) {
288557e252bfSMichael Neumann 		ni_table->mc_reg_table_entry[i].mclk_max =
288657e252bfSMichael Neumann 			table->mc_reg_table_entry[i].mclk_max;
288757e252bfSMichael Neumann 		for (j = 0; j < table->last; j++)
288857e252bfSMichael Neumann 			ni_table->mc_reg_table_entry[i].mc_data[j] =
288957e252bfSMichael Neumann 				table->mc_reg_table_entry[i].mc_data[j];
289057e252bfSMichael Neumann 	}
289157e252bfSMichael Neumann 	ni_table->num_entries = table->num_entries;
289257e252bfSMichael Neumann 
289357e252bfSMichael Neumann 	return 0;
289457e252bfSMichael Neumann }
289557e252bfSMichael Neumann 
289657e252bfSMichael Neumann static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
289757e252bfSMichael Neumann {
289857e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
289957e252bfSMichael Neumann 	int ret;
290057e252bfSMichael Neumann 	struct atom_mc_reg_table *table;
290157e252bfSMichael Neumann 	struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
290257e252bfSMichael Neumann 	u8 module_index = rv770_get_memory_module_index(rdev);
290357e252bfSMichael Neumann 
290457e252bfSMichael Neumann         table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
290557e252bfSMichael Neumann         if (!table)
290657e252bfSMichael Neumann                 return -ENOMEM;
290757e252bfSMichael Neumann 
290857e252bfSMichael Neumann 	WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
290957e252bfSMichael Neumann 	WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
291057e252bfSMichael Neumann 	WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
291157e252bfSMichael Neumann 	WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
291257e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
291357e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
291457e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
291557e252bfSMichael Neumann 	WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
291657e252bfSMichael Neumann 	WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
291757e252bfSMichael Neumann 	WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
291857e252bfSMichael Neumann 	WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
291957e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
292057e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
292157e252bfSMichael Neumann 
292257e252bfSMichael Neumann 	ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
292357e252bfSMichael Neumann 
292457e252bfSMichael Neumann         if (ret)
292557e252bfSMichael Neumann                 goto init_mc_done;
292657e252bfSMichael Neumann 
292757e252bfSMichael Neumann 	ret = ni_copy_vbios_mc_reg_table(table, ni_table);
292857e252bfSMichael Neumann 
292957e252bfSMichael Neumann         if (ret)
293057e252bfSMichael Neumann                 goto init_mc_done;
293157e252bfSMichael Neumann 
293257e252bfSMichael Neumann 	ni_set_s0_mc_reg_index(ni_table);
293357e252bfSMichael Neumann 
293457e252bfSMichael Neumann 	ret = ni_set_mc_special_registers(rdev, ni_table);
293557e252bfSMichael Neumann 
293657e252bfSMichael Neumann         if (ret)
293757e252bfSMichael Neumann                 goto init_mc_done;
293857e252bfSMichael Neumann 
293957e252bfSMichael Neumann 	ni_set_valid_flag(ni_table);
294057e252bfSMichael Neumann 
294157e252bfSMichael Neumann init_mc_done:
294257e252bfSMichael Neumann         kfree(table);
294357e252bfSMichael Neumann 
294457e252bfSMichael Neumann 	return ret;
294557e252bfSMichael Neumann }
294657e252bfSMichael Neumann 
294757e252bfSMichael Neumann static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
294857e252bfSMichael Neumann 					 SMC_NIslands_MCRegisters *mc_reg_table)
294957e252bfSMichael Neumann {
295057e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
295157e252bfSMichael Neumann 	u32 i, j;
295257e252bfSMichael Neumann 
295357e252bfSMichael Neumann 	for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
295457e252bfSMichael Neumann 		if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
295557e252bfSMichael Neumann 			if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
295657e252bfSMichael Neumann 				break;
295757e252bfSMichael Neumann 			mc_reg_table->address[i].s0 =
295857e252bfSMichael Neumann 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
295957e252bfSMichael Neumann 			mc_reg_table->address[i].s1 =
296057e252bfSMichael Neumann 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
296157e252bfSMichael Neumann 			i++;
296257e252bfSMichael Neumann 		}
296357e252bfSMichael Neumann 	}
296457e252bfSMichael Neumann 	mc_reg_table->last = (u8)i;
296557e252bfSMichael Neumann }
296657e252bfSMichael Neumann 
296757e252bfSMichael Neumann 
296857e252bfSMichael Neumann static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
296957e252bfSMichael Neumann 				    SMC_NIslands_MCRegisterSet *data,
297057e252bfSMichael Neumann 				    u32 num_entries, u32 valid_flag)
297157e252bfSMichael Neumann {
297257e252bfSMichael Neumann 	u32 i, j;
297357e252bfSMichael Neumann 
297457e252bfSMichael Neumann 	for (i = 0, j = 0; j < num_entries; j++) {
297557e252bfSMichael Neumann 		if (valid_flag & (1 << j)) {
297657e252bfSMichael Neumann 			data->value[i] = cpu_to_be32(entry->mc_data[j]);
297757e252bfSMichael Neumann 			i++;
297857e252bfSMichael Neumann 		}
297957e252bfSMichael Neumann 	}
298057e252bfSMichael Neumann }
298157e252bfSMichael Neumann 
298257e252bfSMichael Neumann static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
298357e252bfSMichael Neumann 						 struct rv7xx_pl *pl,
298457e252bfSMichael Neumann 						 SMC_NIslands_MCRegisterSet *mc_reg_table_data)
298557e252bfSMichael Neumann {
298657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
298757e252bfSMichael Neumann 	u32 i = 0;
298857e252bfSMichael Neumann 
298957e252bfSMichael Neumann 	for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
299057e252bfSMichael Neumann 		if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
299157e252bfSMichael Neumann 			break;
299257e252bfSMichael Neumann 	}
299357e252bfSMichael Neumann 
299457e252bfSMichael Neumann 	if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
299557e252bfSMichael Neumann 		--i;
299657e252bfSMichael Neumann 
299757e252bfSMichael Neumann 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
299857e252bfSMichael Neumann 				mc_reg_table_data,
299957e252bfSMichael Neumann 				ni_pi->mc_reg_table.last,
300057e252bfSMichael Neumann 				ni_pi->mc_reg_table.valid_flag);
300157e252bfSMichael Neumann }
300257e252bfSMichael Neumann 
300357e252bfSMichael Neumann static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
300457e252bfSMichael Neumann 					   struct radeon_ps *radeon_state,
300557e252bfSMichael Neumann 					   SMC_NIslands_MCRegisters *mc_reg_table)
300657e252bfSMichael Neumann {
300757e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
300857e252bfSMichael Neumann 	int i;
300957e252bfSMichael Neumann 
301057e252bfSMichael Neumann 	for (i = 0; i < state->performance_level_count; i++) {
301157e252bfSMichael Neumann 		ni_convert_mc_reg_table_entry_to_smc(rdev,
301257e252bfSMichael Neumann 						     &state->performance_levels[i],
301357e252bfSMichael Neumann 						     &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
301457e252bfSMichael Neumann 	}
301557e252bfSMichael Neumann }
301657e252bfSMichael Neumann 
301757e252bfSMichael Neumann static int ni_populate_mc_reg_table(struct radeon_device *rdev,
301857e252bfSMichael Neumann 				    struct radeon_ps *radeon_boot_state)
301957e252bfSMichael Neumann {
302057e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
302157e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
302257e252bfSMichael Neumann         struct ni_power_info *ni_pi = ni_get_pi(rdev);
302357e252bfSMichael Neumann 	struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
302457e252bfSMichael Neumann 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
302557e252bfSMichael Neumann 
302657e252bfSMichael Neumann 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
302757e252bfSMichael Neumann 
302857e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
302957e252bfSMichael Neumann 
303057e252bfSMichael Neumann 	ni_populate_mc_reg_addresses(rdev, mc_reg_table);
303157e252bfSMichael Neumann 
303257e252bfSMichael Neumann 	ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
303357e252bfSMichael Neumann 					     &mc_reg_table->data[0]);
303457e252bfSMichael Neumann 
303557e252bfSMichael Neumann 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
303657e252bfSMichael Neumann 				&mc_reg_table->data[1],
303757e252bfSMichael Neumann 				ni_pi->mc_reg_table.last,
303857e252bfSMichael Neumann 				ni_pi->mc_reg_table.valid_flag);
303957e252bfSMichael Neumann 
304057e252bfSMichael Neumann 	ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
304157e252bfSMichael Neumann 
304257e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
304357e252bfSMichael Neumann 				       (u8 *)mc_reg_table,
304457e252bfSMichael Neumann 				       sizeof(SMC_NIslands_MCRegisters),
304557e252bfSMichael Neumann 				       pi->sram_end);
304657e252bfSMichael Neumann }
304757e252bfSMichael Neumann 
304857e252bfSMichael Neumann static int ni_upload_mc_reg_table(struct radeon_device *rdev,
304957e252bfSMichael Neumann 				  struct radeon_ps *radeon_new_state)
305057e252bfSMichael Neumann {
305157e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
305257e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
305357e252bfSMichael Neumann         struct ni_power_info *ni_pi = ni_get_pi(rdev);
305457e252bfSMichael Neumann 	struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
305557e252bfSMichael Neumann 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
305657e252bfSMichael Neumann 	u16 address;
305757e252bfSMichael Neumann 
305857e252bfSMichael Neumann 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
305957e252bfSMichael Neumann 
306057e252bfSMichael Neumann 	ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
306157e252bfSMichael Neumann 
306257e252bfSMichael Neumann 	address = eg_pi->mc_reg_table_start +
306357e252bfSMichael Neumann 		(u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
306457e252bfSMichael Neumann 
306557e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev, address,
306657e252bfSMichael Neumann 				       (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
306757e252bfSMichael Neumann 				       sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
306857e252bfSMichael Neumann 				       pi->sram_end);
306957e252bfSMichael Neumann }
307057e252bfSMichael Neumann 
307157e252bfSMichael Neumann static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
307257e252bfSMichael Neumann 						   PP_NIslands_CACTABLES *cac_tables)
307357e252bfSMichael Neumann {
307457e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
307557e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
307657e252bfSMichael Neumann 	u32 leakage = 0;
307757e252bfSMichael Neumann 	unsigned int i, j, table_size;
307857e252bfSMichael Neumann 	s32 t;
307957e252bfSMichael Neumann 	u32 smc_leakage, max_leakage = 0;
308057e252bfSMichael Neumann 	u32 scaling_factor;
308157e252bfSMichael Neumann 
308257e252bfSMichael Neumann 	table_size = eg_pi->vddc_voltage_table.count;
308357e252bfSMichael Neumann 
308457e252bfSMichael Neumann 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
308557e252bfSMichael Neumann 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
308657e252bfSMichael Neumann 
308757e252bfSMichael Neumann 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
308857e252bfSMichael Neumann 
308957e252bfSMichael Neumann 	for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
309057e252bfSMichael Neumann 		for (j = 0; j < table_size; j++) {
309157e252bfSMichael Neumann 			t = (1000 * ((i + 1) * 8));
309257e252bfSMichael Neumann 
309357e252bfSMichael Neumann 			if (t < ni_pi->cac_data.leakage_minimum_temperature)
309457e252bfSMichael Neumann 				t = ni_pi->cac_data.leakage_minimum_temperature;
309557e252bfSMichael Neumann 
309657e252bfSMichael Neumann 			ni_calculate_leakage_for_v_and_t(rdev,
309757e252bfSMichael Neumann 							 &ni_pi->cac_data.leakage_coefficients,
309857e252bfSMichael Neumann 							 eg_pi->vddc_voltage_table.entries[j].value,
309957e252bfSMichael Neumann 							 t,
310057e252bfSMichael Neumann 							 ni_pi->cac_data.i_leakage,
310157e252bfSMichael Neumann 							 &leakage);
310257e252bfSMichael Neumann 
310357e252bfSMichael Neumann 			smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
310457e252bfSMichael Neumann 			if (smc_leakage > max_leakage)
310557e252bfSMichael Neumann 				max_leakage = smc_leakage;
310657e252bfSMichael Neumann 
310757e252bfSMichael Neumann 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
310857e252bfSMichael Neumann 		}
310957e252bfSMichael Neumann 	}
311057e252bfSMichael Neumann 
311157e252bfSMichael Neumann 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
311257e252bfSMichael Neumann 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
311357e252bfSMichael Neumann 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
311457e252bfSMichael Neumann 	}
311557e252bfSMichael Neumann 	return 0;
311657e252bfSMichael Neumann }
311757e252bfSMichael Neumann 
311857e252bfSMichael Neumann static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
311957e252bfSMichael Neumann 					    PP_NIslands_CACTABLES *cac_tables)
312057e252bfSMichael Neumann {
312157e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
312257e252bfSMichael Neumann 	struct radeon_cac_leakage_table *leakage_table =
312357e252bfSMichael Neumann 		&rdev->pm.dpm.dyn_state.cac_leakage_table;
312457e252bfSMichael Neumann 	u32 i, j, table_size;
312557e252bfSMichael Neumann 	u32 smc_leakage, max_leakage = 0;
312657e252bfSMichael Neumann 	u32 scaling_factor;
312757e252bfSMichael Neumann 
312857e252bfSMichael Neumann 	if (!leakage_table)
312957e252bfSMichael Neumann 		return -EINVAL;
313057e252bfSMichael Neumann 
313157e252bfSMichael Neumann 	table_size = leakage_table->count;
313257e252bfSMichael Neumann 
313357e252bfSMichael Neumann 	if (eg_pi->vddc_voltage_table.count != table_size)
313457e252bfSMichael Neumann 		table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
313557e252bfSMichael Neumann 			eg_pi->vddc_voltage_table.count : leakage_table->count;
313657e252bfSMichael Neumann 
313757e252bfSMichael Neumann 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
313857e252bfSMichael Neumann 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
313957e252bfSMichael Neumann 
314057e252bfSMichael Neumann 	if (table_size == 0)
314157e252bfSMichael Neumann 		return -EINVAL;
314257e252bfSMichael Neumann 
314357e252bfSMichael Neumann 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
314457e252bfSMichael Neumann 
314557e252bfSMichael Neumann 	for (j = 0; j < table_size; j++) {
314657e252bfSMichael Neumann 		smc_leakage = leakage_table->entries[j].leakage;
314757e252bfSMichael Neumann 
314857e252bfSMichael Neumann 		if (smc_leakage > max_leakage)
314957e252bfSMichael Neumann 			max_leakage = smc_leakage;
315057e252bfSMichael Neumann 
315157e252bfSMichael Neumann 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
315257e252bfSMichael Neumann 			cac_tables->cac_lkge_lut[i][j] =
315357e252bfSMichael Neumann 				cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
315457e252bfSMichael Neumann 	}
315557e252bfSMichael Neumann 
315657e252bfSMichael Neumann 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
315757e252bfSMichael Neumann 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
315857e252bfSMichael Neumann 			cac_tables->cac_lkge_lut[i][j] =
315957e252bfSMichael Neumann 				cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
316057e252bfSMichael Neumann 	}
316157e252bfSMichael Neumann 	return 0;
316257e252bfSMichael Neumann }
316357e252bfSMichael Neumann 
316457e252bfSMichael Neumann static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
316557e252bfSMichael Neumann {
316657e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
316757e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
316857e252bfSMichael Neumann 	PP_NIslands_CACTABLES *cac_tables = NULL;
316957e252bfSMichael Neumann 	int i, ret;
317057e252bfSMichael Neumann         u32 reg;
317157e252bfSMichael Neumann 
317257e252bfSMichael Neumann 	if (ni_pi->enable_cac == false)
317357e252bfSMichael Neumann 		return 0;
317457e252bfSMichael Neumann 
317557e252bfSMichael Neumann 	cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
317657e252bfSMichael Neumann 	if (!cac_tables)
317757e252bfSMichael Neumann 		return -ENOMEM;
317857e252bfSMichael Neumann 
317957e252bfSMichael Neumann 	reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
318057e252bfSMichael Neumann 	reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
318157e252bfSMichael Neumann 		TID_UNIT(ni_pi->cac_weights->tid_unit));
318257e252bfSMichael Neumann 	WREG32(CG_CAC_CTRL, reg);
318357e252bfSMichael Neumann 
318457e252bfSMichael Neumann 	for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
318557e252bfSMichael Neumann 		ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
318657e252bfSMichael Neumann 
318757e252bfSMichael Neumann 	for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
318857e252bfSMichael Neumann 		cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
318957e252bfSMichael Neumann 
319057e252bfSMichael Neumann 	ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
319157e252bfSMichael Neumann 	ni_pi->cac_data.pwr_const = 0;
319257e252bfSMichael Neumann 	ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
319357e252bfSMichael Neumann 	ni_pi->cac_data.bif_cac_value = 0;
319457e252bfSMichael Neumann 	ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
319557e252bfSMichael Neumann 	ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
319657e252bfSMichael Neumann 	ni_pi->cac_data.allow_ovrflw = 0;
319757e252bfSMichael Neumann 	ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
319857e252bfSMichael Neumann 	ni_pi->cac_data.num_win_tdp = 0;
319957e252bfSMichael Neumann 	ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
320057e252bfSMichael Neumann 
320157e252bfSMichael Neumann 	if (ni_pi->driver_calculate_cac_leakage)
320257e252bfSMichael Neumann 		ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
320357e252bfSMichael Neumann 	else
320457e252bfSMichael Neumann 		ret = ni_init_simplified_leakage_table(rdev, cac_tables);
320557e252bfSMichael Neumann 
320657e252bfSMichael Neumann 	if (ret)
320757e252bfSMichael Neumann 		goto done_free;
320857e252bfSMichael Neumann 
320957e252bfSMichael Neumann 	cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
321057e252bfSMichael Neumann 	cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
321157e252bfSMichael Neumann 	cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
321257e252bfSMichael Neumann 	cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
321357e252bfSMichael Neumann 	cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
321457e252bfSMichael Neumann 	cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
321557e252bfSMichael Neumann 	cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
321657e252bfSMichael Neumann 	cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
321757e252bfSMichael Neumann 	cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
321857e252bfSMichael Neumann 
321957e252bfSMichael Neumann 	ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
322057e252bfSMichael Neumann 				      sizeof(PP_NIslands_CACTABLES), pi->sram_end);
322157e252bfSMichael Neumann 
322257e252bfSMichael Neumann done_free:
322357e252bfSMichael Neumann 	if (ret) {
322457e252bfSMichael Neumann 		ni_pi->enable_cac = false;
322557e252bfSMichael Neumann 		ni_pi->enable_power_containment = false;
322657e252bfSMichael Neumann 	}
322757e252bfSMichael Neumann 
322857e252bfSMichael Neumann 	kfree(cac_tables);
322957e252bfSMichael Neumann 
323057e252bfSMichael Neumann 	return 0;
323157e252bfSMichael Neumann }
323257e252bfSMichael Neumann 
323357e252bfSMichael Neumann static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
323457e252bfSMichael Neumann {
323557e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
323657e252bfSMichael Neumann 	u32 reg;
323757e252bfSMichael Neumann 
323857e252bfSMichael Neumann 	if (!ni_pi->enable_cac ||
323957e252bfSMichael Neumann 	    !ni_pi->cac_configuration_required)
324057e252bfSMichael Neumann 		return 0;
324157e252bfSMichael Neumann 
324257e252bfSMichael Neumann 	if (ni_pi->cac_weights == NULL)
324357e252bfSMichael Neumann 		return -EINVAL;
324457e252bfSMichael Neumann 
324557e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
324657e252bfSMichael Neumann 						      WEIGHT_TCP_SIG1_MASK |
324757e252bfSMichael Neumann 						      WEIGHT_TA_SIG_MASK);
324857e252bfSMichael Neumann 	reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
324957e252bfSMichael Neumann 		WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
325057e252bfSMichael Neumann 		WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
325157e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
325257e252bfSMichael Neumann 
325357e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
325457e252bfSMichael Neumann 						      WEIGHT_TCC_EN1_MASK |
325557e252bfSMichael Neumann 						      WEIGHT_TCC_EN2_MASK);
325657e252bfSMichael Neumann 	reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
325757e252bfSMichael Neumann 		WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
325857e252bfSMichael Neumann 		WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
325957e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
326057e252bfSMichael Neumann 
326157e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
326257e252bfSMichael Neumann 						      WEIGHT_CB_EN1_MASK |
326357e252bfSMichael Neumann 						      WEIGHT_CB_EN2_MASK |
326457e252bfSMichael Neumann 						      WEIGHT_CB_EN3_MASK);
326557e252bfSMichael Neumann 	reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
326657e252bfSMichael Neumann 		WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
326757e252bfSMichael Neumann 		WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
326857e252bfSMichael Neumann 		WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
326957e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
327057e252bfSMichael Neumann 
327157e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
327257e252bfSMichael Neumann 						      WEIGHT_DB_SIG1_MASK |
327357e252bfSMichael Neumann 						      WEIGHT_DB_SIG2_MASK |
327457e252bfSMichael Neumann 						      WEIGHT_DB_SIG3_MASK);
327557e252bfSMichael Neumann 	reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
327657e252bfSMichael Neumann 		WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
327757e252bfSMichael Neumann 		WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
327857e252bfSMichael Neumann 		WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
327957e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
328057e252bfSMichael Neumann 
328157e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
328257e252bfSMichael Neumann 						      WEIGHT_SXM_SIG1_MASK |
328357e252bfSMichael Neumann 						      WEIGHT_SXM_SIG2_MASK |
328457e252bfSMichael Neumann 						      WEIGHT_SXS_SIG0_MASK |
328557e252bfSMichael Neumann 						      WEIGHT_SXS_SIG1_MASK);
328657e252bfSMichael Neumann 	reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
328757e252bfSMichael Neumann 		WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
328857e252bfSMichael Neumann 		WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
328957e252bfSMichael Neumann 		WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
329057e252bfSMichael Neumann 		WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
329157e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
329257e252bfSMichael Neumann 
329357e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
329457e252bfSMichael Neumann 						      WEIGHT_XBR_1_MASK |
329557e252bfSMichael Neumann 						      WEIGHT_XBR_2_MASK |
329657e252bfSMichael Neumann 						      WEIGHT_SPI_SIG0_MASK);
329757e252bfSMichael Neumann 	reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
329857e252bfSMichael Neumann 		WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
329957e252bfSMichael Neumann 		WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
330057e252bfSMichael Neumann 		WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
330157e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
330257e252bfSMichael Neumann 
330357e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
330457e252bfSMichael Neumann 						      WEIGHT_SPI_SIG2_MASK |
330557e252bfSMichael Neumann 						      WEIGHT_SPI_SIG3_MASK |
330657e252bfSMichael Neumann 						      WEIGHT_SPI_SIG4_MASK |
330757e252bfSMichael Neumann 						      WEIGHT_SPI_SIG5_MASK);
330857e252bfSMichael Neumann 	reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
330957e252bfSMichael Neumann 		WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
331057e252bfSMichael Neumann 		WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
331157e252bfSMichael Neumann 		WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
331257e252bfSMichael Neumann 		WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
331357e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
331457e252bfSMichael Neumann 
331557e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
331657e252bfSMichael Neumann 						      WEIGHT_LDS_SIG1_MASK |
331757e252bfSMichael Neumann 						      WEIGHT_SC_MASK);
331857e252bfSMichael Neumann 	reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
331957e252bfSMichael Neumann 		WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
332057e252bfSMichael Neumann 		WEIGHT_SC(ni_pi->cac_weights->weight_sc));
332157e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
332257e252bfSMichael Neumann 
332357e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
332457e252bfSMichael Neumann 						      WEIGHT_CP_MASK |
332557e252bfSMichael Neumann 						      WEIGHT_PA_SIG0_MASK |
332657e252bfSMichael Neumann 						      WEIGHT_PA_SIG1_MASK |
332757e252bfSMichael Neumann 						      WEIGHT_VGT_SIG0_MASK);
332857e252bfSMichael Neumann 	reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
332957e252bfSMichael Neumann 		WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
333057e252bfSMichael Neumann 		WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
333157e252bfSMichael Neumann 		WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
333257e252bfSMichael Neumann 		WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
333357e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
333457e252bfSMichael Neumann 
333557e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
333657e252bfSMichael Neumann 						      WEIGHT_VGT_SIG2_MASK |
333757e252bfSMichael Neumann 						      WEIGHT_DC_SIG0_MASK |
333857e252bfSMichael Neumann 						      WEIGHT_DC_SIG1_MASK |
333957e252bfSMichael Neumann 						      WEIGHT_DC_SIG2_MASK);
334057e252bfSMichael Neumann 	reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
334157e252bfSMichael Neumann 		WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
334257e252bfSMichael Neumann 		WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
334357e252bfSMichael Neumann 		WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
334457e252bfSMichael Neumann 		WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
334557e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
334657e252bfSMichael Neumann 
334757e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
334857e252bfSMichael Neumann 						      WEIGHT_UVD_SIG0_MASK |
334957e252bfSMichael Neumann 						      WEIGHT_UVD_SIG1_MASK |
335057e252bfSMichael Neumann 						      WEIGHT_SPARE0_MASK |
335157e252bfSMichael Neumann 						      WEIGHT_SPARE1_MASK);
335257e252bfSMichael Neumann 	reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
335357e252bfSMichael Neumann 		WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
335457e252bfSMichael Neumann 		WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
335557e252bfSMichael Neumann 		WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
335657e252bfSMichael Neumann 		WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
335757e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
335857e252bfSMichael Neumann 
335957e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
336057e252bfSMichael Neumann 						      WEIGHT_SQ_VSP0_MASK);
336157e252bfSMichael Neumann 	reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
336257e252bfSMichael Neumann 		WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
336357e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
336457e252bfSMichael Neumann 
336557e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
336657e252bfSMichael Neumann 	reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
336757e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
336857e252bfSMichael Neumann 
336957e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
337057e252bfSMichael Neumann 							OVR_VAL_SPARE_0_MASK |
337157e252bfSMichael Neumann 							OVR_MODE_SPARE_1_MASK |
337257e252bfSMichael Neumann 							OVR_VAL_SPARE_1_MASK);
337357e252bfSMichael Neumann 	reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
337457e252bfSMichael Neumann 		OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
337557e252bfSMichael Neumann 		OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
337657e252bfSMichael Neumann 		OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
337757e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
337857e252bfSMichael Neumann 
337957e252bfSMichael Neumann 	reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
338057e252bfSMichael Neumann 					   VSP0_MASK |
338157e252bfSMichael Neumann 					   GPR_MASK);
338257e252bfSMichael Neumann 	reg |= (VSP(ni_pi->cac_weights->vsp) |
338357e252bfSMichael Neumann 		VSP0(ni_pi->cac_weights->vsp0) |
338457e252bfSMichael Neumann 		GPR(ni_pi->cac_weights->gpr));
338557e252bfSMichael Neumann 	WREG32(SQ_CAC_THRESHOLD, reg);
338657e252bfSMichael Neumann 
338757e252bfSMichael Neumann 	reg = (MCDW_WR_ENABLE |
338857e252bfSMichael Neumann 	       MCDX_WR_ENABLE |
338957e252bfSMichael Neumann 	       MCDY_WR_ENABLE |
339057e252bfSMichael Neumann 	       MCDZ_WR_ENABLE |
339157e252bfSMichael Neumann 	       INDEX(0x09D4));
339257e252bfSMichael Neumann 	WREG32(MC_CG_CONFIG, reg);
339357e252bfSMichael Neumann 
339457e252bfSMichael Neumann 	reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
339557e252bfSMichael Neumann 	       WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
339657e252bfSMichael Neumann 	       ALLOW_OVERFLOW);
339757e252bfSMichael Neumann 	WREG32(MC_CG_DATAPORT, reg);
339857e252bfSMichael Neumann 
339957e252bfSMichael Neumann 	return 0;
340057e252bfSMichael Neumann }
340157e252bfSMichael Neumann 
340257e252bfSMichael Neumann static int ni_enable_smc_cac(struct radeon_device *rdev,
340357e252bfSMichael Neumann 			     struct radeon_ps *radeon_new_state,
340457e252bfSMichael Neumann 			     bool enable)
340557e252bfSMichael Neumann {
340657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
340757e252bfSMichael Neumann 	int ret = 0;
340857e252bfSMichael Neumann 	PPSMC_Result smc_result;
340957e252bfSMichael Neumann 
341057e252bfSMichael Neumann 	if (ni_pi->enable_cac) {
341157e252bfSMichael Neumann 		if (enable) {
341257e252bfSMichael Neumann 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
341357e252bfSMichael Neumann 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
341457e252bfSMichael Neumann 
341557e252bfSMichael Neumann 				if (ni_pi->support_cac_long_term_average) {
341657e252bfSMichael Neumann 					smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
341757e252bfSMichael Neumann 					if (PPSMC_Result_OK != smc_result)
341857e252bfSMichael Neumann 						ni_pi->support_cac_long_term_average = false;
341957e252bfSMichael Neumann 				}
342057e252bfSMichael Neumann 
342157e252bfSMichael Neumann 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
342257e252bfSMichael Neumann 				if (PPSMC_Result_OK != smc_result)
342357e252bfSMichael Neumann 					ret = -EINVAL;
342457e252bfSMichael Neumann 
342557e252bfSMichael Neumann 				ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
342657e252bfSMichael Neumann 			}
342757e252bfSMichael Neumann 		} else if (ni_pi->cac_enabled) {
342857e252bfSMichael Neumann 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
342957e252bfSMichael Neumann 
343057e252bfSMichael Neumann 			ni_pi->cac_enabled = false;
343157e252bfSMichael Neumann 
343257e252bfSMichael Neumann 			if (ni_pi->support_cac_long_term_average) {
343357e252bfSMichael Neumann 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
343457e252bfSMichael Neumann 				if (PPSMC_Result_OK != smc_result)
343557e252bfSMichael Neumann 					ni_pi->support_cac_long_term_average = false;
343657e252bfSMichael Neumann 			}
343757e252bfSMichael Neumann 		}
343857e252bfSMichael Neumann 	}
343957e252bfSMichael Neumann 
344057e252bfSMichael Neumann 	return ret;
344157e252bfSMichael Neumann }
344257e252bfSMichael Neumann 
344357e252bfSMichael Neumann static int ni_pcie_performance_request(struct radeon_device *rdev,
344457e252bfSMichael Neumann 				       u8 perf_req, bool advertise)
344557e252bfSMichael Neumann {
3446*c6f73aabSFrançois Tigeot #if defined(CONFIG_ACPI)
344757e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
344857e252bfSMichael Neumann 
344957e252bfSMichael Neumann 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
345057e252bfSMichael Neumann             (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
345157e252bfSMichael Neumann 		if (eg_pi->pcie_performance_request_registered == false)
345257e252bfSMichael Neumann 			radeon_acpi_pcie_notify_device_ready(rdev);
345357e252bfSMichael Neumann 		eg_pi->pcie_performance_request_registered = true;
345457e252bfSMichael Neumann 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
345557e252bfSMichael Neumann 	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
345657e252bfSMichael Neumann                    eg_pi->pcie_performance_request_registered) {
345757e252bfSMichael Neumann 		eg_pi->pcie_performance_request_registered = false;
345857e252bfSMichael Neumann 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
345957e252bfSMichael Neumann 	}
346057e252bfSMichael Neumann #endif
346157e252bfSMichael Neumann 	return 0;
346257e252bfSMichael Neumann }
346357e252bfSMichael Neumann 
346457e252bfSMichael Neumann static int ni_advertise_gen2_capability(struct radeon_device *rdev)
346557e252bfSMichael Neumann {
346657e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
346757e252bfSMichael Neumann 	u32 tmp;
346857e252bfSMichael Neumann 
346957e252bfSMichael Neumann         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
347057e252bfSMichael Neumann 
347157e252bfSMichael Neumann         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
347257e252bfSMichael Neumann             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
347357e252bfSMichael Neumann                 pi->pcie_gen2 = true;
347457e252bfSMichael Neumann         else
347557e252bfSMichael Neumann 		pi->pcie_gen2 = false;
347657e252bfSMichael Neumann 
347757e252bfSMichael Neumann 	if (!pi->pcie_gen2)
347857e252bfSMichael Neumann 		ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
347957e252bfSMichael Neumann 
348057e252bfSMichael Neumann 	return 0;
348157e252bfSMichael Neumann }
348257e252bfSMichael Neumann 
348357e252bfSMichael Neumann static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
348457e252bfSMichael Neumann 					    bool enable)
348557e252bfSMichael Neumann {
348657e252bfSMichael Neumann         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
348757e252bfSMichael Neumann         u32 tmp, bif;
348857e252bfSMichael Neumann 
348957e252bfSMichael Neumann 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
349057e252bfSMichael Neumann 
349157e252bfSMichael Neumann 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
349257e252bfSMichael Neumann 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
349357e252bfSMichael Neumann 		if (enable) {
349457e252bfSMichael Neumann 			if (!pi->boot_in_gen2) {
349557e252bfSMichael Neumann 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
349657e252bfSMichael Neumann 				bif |= CG_CLIENT_REQ(0xd);
349757e252bfSMichael Neumann 				WREG32(CG_BIF_REQ_AND_RSP, bif);
349857e252bfSMichael Neumann 			}
349957e252bfSMichael Neumann 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
350057e252bfSMichael Neumann 			tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
350157e252bfSMichael Neumann 			tmp |= LC_GEN2_EN_STRAP;
350257e252bfSMichael Neumann 
350357e252bfSMichael Neumann 			tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
350457e252bfSMichael Neumann 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3505c4ef309bSzrj 			udelay(10);
350657e252bfSMichael Neumann 			tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
350757e252bfSMichael Neumann 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
350857e252bfSMichael Neumann 		} else {
350957e252bfSMichael Neumann 			if (!pi->boot_in_gen2) {
351057e252bfSMichael Neumann 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
351157e252bfSMichael Neumann 				bif |= CG_CLIENT_REQ(0xd);
351257e252bfSMichael Neumann 				WREG32(CG_BIF_REQ_AND_RSP, bif);
351357e252bfSMichael Neumann 
351457e252bfSMichael Neumann 				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
351557e252bfSMichael Neumann 				tmp &= ~LC_GEN2_EN_STRAP;
351657e252bfSMichael Neumann 			}
351757e252bfSMichael Neumann 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
351857e252bfSMichael Neumann 		}
351957e252bfSMichael Neumann 	}
352057e252bfSMichael Neumann }
352157e252bfSMichael Neumann 
352257e252bfSMichael Neumann static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
352357e252bfSMichael Neumann 					bool enable)
352457e252bfSMichael Neumann {
352557e252bfSMichael Neumann 	ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
352657e252bfSMichael Neumann 
352757e252bfSMichael Neumann 	if (enable)
352857e252bfSMichael Neumann 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
352957e252bfSMichael Neumann 	else
353057e252bfSMichael Neumann                 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
353157e252bfSMichael Neumann }
353257e252bfSMichael Neumann 
353357e252bfSMichael Neumann void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
353457e252bfSMichael Neumann 					   struct radeon_ps *new_ps,
353557e252bfSMichael Neumann 					   struct radeon_ps *old_ps)
353657e252bfSMichael Neumann {
353757e252bfSMichael Neumann 	struct ni_ps *new_state = ni_get_ps(new_ps);
353857e252bfSMichael Neumann 	struct ni_ps *current_state = ni_get_ps(old_ps);
353957e252bfSMichael Neumann 
354057e252bfSMichael Neumann 	if ((new_ps->vclk == old_ps->vclk) &&
354157e252bfSMichael Neumann 	    (new_ps->dclk == old_ps->dclk))
354257e252bfSMichael Neumann 		return;
354357e252bfSMichael Neumann 
354457e252bfSMichael Neumann 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
354557e252bfSMichael Neumann 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
354657e252bfSMichael Neumann 		return;
354757e252bfSMichael Neumann 
354857e252bfSMichael Neumann 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
354957e252bfSMichael Neumann }
355057e252bfSMichael Neumann 
355157e252bfSMichael Neumann void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
355257e252bfSMichael Neumann 					  struct radeon_ps *new_ps,
355357e252bfSMichael Neumann 					  struct radeon_ps *old_ps)
355457e252bfSMichael Neumann {
355557e252bfSMichael Neumann 	struct ni_ps *new_state = ni_get_ps(new_ps);
355657e252bfSMichael Neumann 	struct ni_ps *current_state = ni_get_ps(old_ps);
355757e252bfSMichael Neumann 
355857e252bfSMichael Neumann 	if ((new_ps->vclk == old_ps->vclk) &&
355957e252bfSMichael Neumann 	    (new_ps->dclk == old_ps->dclk))
356057e252bfSMichael Neumann 		return;
356157e252bfSMichael Neumann 
356257e252bfSMichael Neumann 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
356357e252bfSMichael Neumann 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
356457e252bfSMichael Neumann 		return;
356557e252bfSMichael Neumann 
356657e252bfSMichael Neumann 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
356757e252bfSMichael Neumann }
356857e252bfSMichael Neumann 
356957e252bfSMichael Neumann void ni_dpm_setup_asic(struct radeon_device *rdev)
357057e252bfSMichael Neumann {
357157e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3572*c6f73aabSFrançois Tigeot 	int r;
357357e252bfSMichael Neumann 
3574*c6f73aabSFrançois Tigeot 	r = ni_mc_load_microcode(rdev);
3575*c6f73aabSFrançois Tigeot 	if (r)
3576*c6f73aabSFrançois Tigeot 		DRM_ERROR("Failed to load MC firmware!\n");
357757e252bfSMichael Neumann 	ni_read_clock_registers(rdev);
357857e252bfSMichael Neumann 	btc_read_arb_registers(rdev);
357957e252bfSMichael Neumann 	rv770_get_memory_type(rdev);
358057e252bfSMichael Neumann 	if (eg_pi->pcie_performance_request)
358157e252bfSMichael Neumann 		ni_advertise_gen2_capability(rdev);
358257e252bfSMichael Neumann 	rv770_get_pcie_gen2_status(rdev);
358357e252bfSMichael Neumann 	rv770_enable_acpi_pm(rdev);
358457e252bfSMichael Neumann }
358557e252bfSMichael Neumann 
358657e252bfSMichael Neumann void ni_update_current_ps(struct radeon_device *rdev,
358757e252bfSMichael Neumann 			  struct radeon_ps *rps)
358857e252bfSMichael Neumann {
358957e252bfSMichael Neumann 	struct ni_ps *new_ps = ni_get_ps(rps);
359057e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
359157e252bfSMichael Neumann         struct ni_power_info *ni_pi = ni_get_pi(rdev);
359257e252bfSMichael Neumann 
359357e252bfSMichael Neumann 	eg_pi->current_rps = *rps;
359457e252bfSMichael Neumann 	ni_pi->current_ps = *new_ps;
359557e252bfSMichael Neumann 	eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
359657e252bfSMichael Neumann }
359757e252bfSMichael Neumann 
359857e252bfSMichael Neumann void ni_update_requested_ps(struct radeon_device *rdev,
359957e252bfSMichael Neumann 			    struct radeon_ps *rps)
360057e252bfSMichael Neumann {
360157e252bfSMichael Neumann 	struct ni_ps *new_ps = ni_get_ps(rps);
360257e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
360357e252bfSMichael Neumann         struct ni_power_info *ni_pi = ni_get_pi(rdev);
360457e252bfSMichael Neumann 
360557e252bfSMichael Neumann 	eg_pi->requested_rps = *rps;
360657e252bfSMichael Neumann 	ni_pi->requested_ps = *new_ps;
360757e252bfSMichael Neumann 	eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
360857e252bfSMichael Neumann }
360957e252bfSMichael Neumann 
361057e252bfSMichael Neumann int ni_dpm_enable(struct radeon_device *rdev)
361157e252bfSMichael Neumann {
361257e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
361357e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
361457e252bfSMichael Neumann 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
361557e252bfSMichael Neumann 	int ret;
361657e252bfSMichael Neumann 
361757e252bfSMichael Neumann 	if (pi->gfx_clock_gating)
361857e252bfSMichael Neumann 		ni_cg_clockgating_default(rdev);
361957e252bfSMichael Neumann         if (btc_dpm_enabled(rdev))
362057e252bfSMichael Neumann                 return -EINVAL;
362157e252bfSMichael Neumann 	if (pi->mg_clock_gating)
362257e252bfSMichael Neumann 		ni_mg_clockgating_default(rdev);
362357e252bfSMichael Neumann 	if (eg_pi->ls_clock_gating)
362457e252bfSMichael Neumann 		ni_ls_clockgating_default(rdev);
362557e252bfSMichael Neumann 	if (pi->voltage_control) {
362657e252bfSMichael Neumann 		rv770_enable_voltage_control(rdev, true);
362757e252bfSMichael Neumann 		ret = cypress_construct_voltage_tables(rdev);
362857e252bfSMichael Neumann 		if (ret) {
362957e252bfSMichael Neumann 			DRM_ERROR("cypress_construct_voltage_tables failed\n");
363057e252bfSMichael Neumann 			return ret;
363157e252bfSMichael Neumann 		}
363257e252bfSMichael Neumann 	}
363357e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
363457e252bfSMichael Neumann 		ret = ni_initialize_mc_reg_table(rdev);
363557e252bfSMichael Neumann 		if (ret)
363657e252bfSMichael Neumann 			eg_pi->dynamic_ac_timing = false;
363757e252bfSMichael Neumann 	}
363857e252bfSMichael Neumann 	if (pi->dynamic_ss)
363957e252bfSMichael Neumann 		cypress_enable_spread_spectrum(rdev, true);
364057e252bfSMichael Neumann 	if (pi->thermal_protection)
364157e252bfSMichael Neumann 		rv770_enable_thermal_protection(rdev, true);
364257e252bfSMichael Neumann 	rv770_setup_bsp(rdev);
364357e252bfSMichael Neumann 	rv770_program_git(rdev);
364457e252bfSMichael Neumann 	rv770_program_tp(rdev);
364557e252bfSMichael Neumann 	rv770_program_tpp(rdev);
364657e252bfSMichael Neumann 	rv770_program_sstp(rdev);
364757e252bfSMichael Neumann 	cypress_enable_display_gap(rdev);
364857e252bfSMichael Neumann 	rv770_program_vc(rdev);
364957e252bfSMichael Neumann 	if (pi->dynamic_pcie_gen2)
365057e252bfSMichael Neumann 		ni_enable_dynamic_pcie_gen2(rdev, true);
365157e252bfSMichael Neumann 	ret = rv770_upload_firmware(rdev);
365257e252bfSMichael Neumann 	if (ret) {
365357e252bfSMichael Neumann 		DRM_ERROR("rv770_upload_firmware failed\n");
365457e252bfSMichael Neumann 		return ret;
365557e252bfSMichael Neumann 	}
365657e252bfSMichael Neumann 	ret = ni_process_firmware_header(rdev);
365757e252bfSMichael Neumann 	if (ret) {
365857e252bfSMichael Neumann 		DRM_ERROR("ni_process_firmware_header failed\n");
365957e252bfSMichael Neumann 		return ret;
366057e252bfSMichael Neumann 	}
366157e252bfSMichael Neumann 	ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
366257e252bfSMichael Neumann 	if (ret) {
366357e252bfSMichael Neumann 		DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
366457e252bfSMichael Neumann 		return ret;
366557e252bfSMichael Neumann 	}
366657e252bfSMichael Neumann 	ret = ni_init_smc_table(rdev);
366757e252bfSMichael Neumann 	if (ret) {
366857e252bfSMichael Neumann 		DRM_ERROR("ni_init_smc_table failed\n");
366957e252bfSMichael Neumann 		return ret;
367057e252bfSMichael Neumann 	}
367157e252bfSMichael Neumann 	ret = ni_init_smc_spll_table(rdev);
367257e252bfSMichael Neumann 	if (ret) {
367357e252bfSMichael Neumann 		DRM_ERROR("ni_init_smc_spll_table failed\n");
367457e252bfSMichael Neumann 		return ret;
367557e252bfSMichael Neumann 	}
367657e252bfSMichael Neumann 	ret = ni_init_arb_table_index(rdev);
367757e252bfSMichael Neumann 	if (ret) {
367857e252bfSMichael Neumann 		DRM_ERROR("ni_init_arb_table_index failed\n");
367957e252bfSMichael Neumann 		return ret;
368057e252bfSMichael Neumann 	}
368157e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
368257e252bfSMichael Neumann 		ret = ni_populate_mc_reg_table(rdev, boot_ps);
368357e252bfSMichael Neumann 		if (ret) {
368457e252bfSMichael Neumann 			DRM_ERROR("ni_populate_mc_reg_table failed\n");
368557e252bfSMichael Neumann 			return ret;
368657e252bfSMichael Neumann 		}
368757e252bfSMichael Neumann 	}
368857e252bfSMichael Neumann 	ret = ni_initialize_smc_cac_tables(rdev);
368957e252bfSMichael Neumann 	if (ret) {
369057e252bfSMichael Neumann 		DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
369157e252bfSMichael Neumann 		return ret;
369257e252bfSMichael Neumann 	}
369357e252bfSMichael Neumann 	ret = ni_initialize_hardware_cac_manager(rdev);
369457e252bfSMichael Neumann 	if (ret) {
369557e252bfSMichael Neumann 		DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
369657e252bfSMichael Neumann 		return ret;
369757e252bfSMichael Neumann 	}
369857e252bfSMichael Neumann 	ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
369957e252bfSMichael Neumann 	if (ret) {
370057e252bfSMichael Neumann 		DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
370157e252bfSMichael Neumann 		return ret;
370257e252bfSMichael Neumann 	}
370357e252bfSMichael Neumann 	ni_program_response_times(rdev);
370457e252bfSMichael Neumann 	r7xx_start_smc(rdev);
370557e252bfSMichael Neumann 	ret = cypress_notify_smc_display_change(rdev, false);
370657e252bfSMichael Neumann 	if (ret) {
370757e252bfSMichael Neumann 		DRM_ERROR("cypress_notify_smc_display_change failed\n");
370857e252bfSMichael Neumann 		return ret;
370957e252bfSMichael Neumann 	}
371057e252bfSMichael Neumann 	cypress_enable_sclk_control(rdev, true);
371157e252bfSMichael Neumann 	if (eg_pi->memory_transition)
371257e252bfSMichael Neumann 		cypress_enable_mclk_control(rdev, true);
371357e252bfSMichael Neumann 	cypress_start_dpm(rdev);
371457e252bfSMichael Neumann 	if (pi->gfx_clock_gating)
371557e252bfSMichael Neumann 		ni_gfx_clockgating_enable(rdev, true);
371657e252bfSMichael Neumann 	if (pi->mg_clock_gating)
371757e252bfSMichael Neumann 		ni_mg_clockgating_enable(rdev, true);
371857e252bfSMichael Neumann 	if (eg_pi->ls_clock_gating)
371957e252bfSMichael Neumann 		ni_ls_clockgating_enable(rdev, true);
372057e252bfSMichael Neumann 
372157e252bfSMichael Neumann 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
372257e252bfSMichael Neumann 
372357e252bfSMichael Neumann 	ni_update_current_ps(rdev, boot_ps);
372457e252bfSMichael Neumann 
372557e252bfSMichael Neumann 	return 0;
372657e252bfSMichael Neumann }
372757e252bfSMichael Neumann 
372857e252bfSMichael Neumann void ni_dpm_disable(struct radeon_device *rdev)
372957e252bfSMichael Neumann {
373057e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
373157e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
373257e252bfSMichael Neumann 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
373357e252bfSMichael Neumann 
373457e252bfSMichael Neumann 	if (!btc_dpm_enabled(rdev))
373557e252bfSMichael Neumann 		return;
373657e252bfSMichael Neumann 	rv770_clear_vc(rdev);
373757e252bfSMichael Neumann 	if (pi->thermal_protection)
373857e252bfSMichael Neumann 		rv770_enable_thermal_protection(rdev, false);
373957e252bfSMichael Neumann 	ni_enable_power_containment(rdev, boot_ps, false);
374057e252bfSMichael Neumann 	ni_enable_smc_cac(rdev, boot_ps, false);
374157e252bfSMichael Neumann 	cypress_enable_spread_spectrum(rdev, false);
374257e252bfSMichael Neumann 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
374357e252bfSMichael Neumann 	if (pi->dynamic_pcie_gen2)
374457e252bfSMichael Neumann 		ni_enable_dynamic_pcie_gen2(rdev, false);
374557e252bfSMichael Neumann 
374657e252bfSMichael Neumann 	if (rdev->irq.installed &&
374757e252bfSMichael Neumann 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
374857e252bfSMichael Neumann 		rdev->irq.dpm_thermal = false;
374957e252bfSMichael Neumann 		radeon_irq_set(rdev);
375057e252bfSMichael Neumann 	}
375157e252bfSMichael Neumann 
375257e252bfSMichael Neumann 	if (pi->gfx_clock_gating)
375357e252bfSMichael Neumann 		ni_gfx_clockgating_enable(rdev, false);
375457e252bfSMichael Neumann 	if (pi->mg_clock_gating)
375557e252bfSMichael Neumann 		ni_mg_clockgating_enable(rdev, false);
375657e252bfSMichael Neumann 	if (eg_pi->ls_clock_gating)
375757e252bfSMichael Neumann 		ni_ls_clockgating_enable(rdev, false);
375857e252bfSMichael Neumann 	ni_stop_dpm(rdev);
375957e252bfSMichael Neumann 	btc_reset_to_default(rdev);
376057e252bfSMichael Neumann 	ni_stop_smc(rdev);
376157e252bfSMichael Neumann 	ni_force_switch_to_arb_f0(rdev);
376257e252bfSMichael Neumann 
376357e252bfSMichael Neumann 	ni_update_current_ps(rdev, boot_ps);
376457e252bfSMichael Neumann }
376557e252bfSMichael Neumann 
376657e252bfSMichael Neumann static int ni_power_control_set_level(struct radeon_device *rdev)
376757e252bfSMichael Neumann {
376857e252bfSMichael Neumann 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
376957e252bfSMichael Neumann 	int ret;
377057e252bfSMichael Neumann 
377157e252bfSMichael Neumann 	ret = ni_restrict_performance_levels_before_switch(rdev);
377257e252bfSMichael Neumann 	if (ret)
377357e252bfSMichael Neumann 		return ret;
377457e252bfSMichael Neumann 	ret = rv770_halt_smc(rdev);
377557e252bfSMichael Neumann 	if (ret)
377657e252bfSMichael Neumann 		return ret;
377757e252bfSMichael Neumann 	ret = ni_populate_smc_tdp_limits(rdev, new_ps);
377857e252bfSMichael Neumann 	if (ret)
377957e252bfSMichael Neumann 		return ret;
378057e252bfSMichael Neumann 	ret = rv770_resume_smc(rdev);
378157e252bfSMichael Neumann 	if (ret)
378257e252bfSMichael Neumann 		return ret;
378357e252bfSMichael Neumann 	ret = rv770_set_sw_state(rdev);
378457e252bfSMichael Neumann 	if (ret)
378557e252bfSMichael Neumann 		return ret;
378657e252bfSMichael Neumann 
378757e252bfSMichael Neumann 	return 0;
378857e252bfSMichael Neumann }
378957e252bfSMichael Neumann 
379057e252bfSMichael Neumann int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
379157e252bfSMichael Neumann {
379257e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
379357e252bfSMichael Neumann 	struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
379457e252bfSMichael Neumann 	struct radeon_ps *new_ps = &requested_ps;
379557e252bfSMichael Neumann 
379657e252bfSMichael Neumann 	ni_update_requested_ps(rdev, new_ps);
379757e252bfSMichael Neumann 
379857e252bfSMichael Neumann 	ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
379957e252bfSMichael Neumann 
380057e252bfSMichael Neumann 	return 0;
380157e252bfSMichael Neumann }
380257e252bfSMichael Neumann 
380357e252bfSMichael Neumann int ni_dpm_set_power_state(struct radeon_device *rdev)
380457e252bfSMichael Neumann {
380557e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
380657e252bfSMichael Neumann 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
380757e252bfSMichael Neumann 	struct radeon_ps *old_ps = &eg_pi->current_rps;
380857e252bfSMichael Neumann 	int ret;
380957e252bfSMichael Neumann 
381057e252bfSMichael Neumann 	ret = ni_restrict_performance_levels_before_switch(rdev);
381157e252bfSMichael Neumann 	if (ret) {
381257e252bfSMichael Neumann 		DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
381357e252bfSMichael Neumann 		return ret;
381457e252bfSMichael Neumann 	}
381557e252bfSMichael Neumann 	ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
381657e252bfSMichael Neumann 	ret = ni_enable_power_containment(rdev, new_ps, false);
381757e252bfSMichael Neumann 	if (ret) {
381857e252bfSMichael Neumann 		DRM_ERROR("ni_enable_power_containment failed\n");
381957e252bfSMichael Neumann 		return ret;
382057e252bfSMichael Neumann 	}
382157e252bfSMichael Neumann 	ret = ni_enable_smc_cac(rdev, new_ps, false);
382257e252bfSMichael Neumann 	if (ret) {
382357e252bfSMichael Neumann 		DRM_ERROR("ni_enable_smc_cac failed\n");
382457e252bfSMichael Neumann 		return ret;
382557e252bfSMichael Neumann 	}
382657e252bfSMichael Neumann 	ret = rv770_halt_smc(rdev);
382757e252bfSMichael Neumann 	if (ret) {
382857e252bfSMichael Neumann 		DRM_ERROR("rv770_halt_smc failed\n");
382957e252bfSMichael Neumann 		return ret;
383057e252bfSMichael Neumann 	}
383157e252bfSMichael Neumann 	if (eg_pi->smu_uvd_hs)
383257e252bfSMichael Neumann 		btc_notify_uvd_to_smc(rdev, new_ps);
383357e252bfSMichael Neumann 	ret = ni_upload_sw_state(rdev, new_ps);
383457e252bfSMichael Neumann 	if (ret) {
383557e252bfSMichael Neumann 		DRM_ERROR("ni_upload_sw_state failed\n");
383657e252bfSMichael Neumann 		return ret;
383757e252bfSMichael Neumann 	}
383857e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
383957e252bfSMichael Neumann 		ret = ni_upload_mc_reg_table(rdev, new_ps);
384057e252bfSMichael Neumann 		if (ret) {
384157e252bfSMichael Neumann 			DRM_ERROR("ni_upload_mc_reg_table failed\n");
384257e252bfSMichael Neumann 			return ret;
384357e252bfSMichael Neumann 		}
384457e252bfSMichael Neumann 	}
384557e252bfSMichael Neumann 	ret = ni_program_memory_timing_parameters(rdev, new_ps);
384657e252bfSMichael Neumann 	if (ret) {
384757e252bfSMichael Neumann 		DRM_ERROR("ni_program_memory_timing_parameters failed\n");
384857e252bfSMichael Neumann 		return ret;
384957e252bfSMichael Neumann 	}
385057e252bfSMichael Neumann 	ret = rv770_resume_smc(rdev);
385157e252bfSMichael Neumann 	if (ret) {
385257e252bfSMichael Neumann 		DRM_ERROR("rv770_resume_smc failed\n");
385357e252bfSMichael Neumann 		return ret;
385457e252bfSMichael Neumann 	}
385557e252bfSMichael Neumann 	ret = rv770_set_sw_state(rdev);
385657e252bfSMichael Neumann 	if (ret) {
385757e252bfSMichael Neumann 		DRM_ERROR("rv770_set_sw_state failed\n");
385857e252bfSMichael Neumann 		return ret;
385957e252bfSMichael Neumann 	}
386057e252bfSMichael Neumann 	ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
386157e252bfSMichael Neumann 	ret = ni_enable_smc_cac(rdev, new_ps, true);
386257e252bfSMichael Neumann 	if (ret) {
386357e252bfSMichael Neumann 		DRM_ERROR("ni_enable_smc_cac failed\n");
386457e252bfSMichael Neumann 		return ret;
386557e252bfSMichael Neumann 	}
386657e252bfSMichael Neumann 	ret = ni_enable_power_containment(rdev, new_ps, true);
386757e252bfSMichael Neumann 	if (ret) {
386857e252bfSMichael Neumann 		DRM_ERROR("ni_enable_power_containment failed\n");
386957e252bfSMichael Neumann 		return ret;
387057e252bfSMichael Neumann 	}
387157e252bfSMichael Neumann 
387257e252bfSMichael Neumann 	/* update tdp */
387357e252bfSMichael Neumann 	ret = ni_power_control_set_level(rdev);
387457e252bfSMichael Neumann 	if (ret) {
387557e252bfSMichael Neumann 		DRM_ERROR("ni_power_control_set_level failed\n");
387657e252bfSMichael Neumann 		return ret;
387757e252bfSMichael Neumann 	}
387857e252bfSMichael Neumann 
387957e252bfSMichael Neumann 	return 0;
388057e252bfSMichael Neumann }
388157e252bfSMichael Neumann 
388257e252bfSMichael Neumann void ni_dpm_post_set_power_state(struct radeon_device *rdev)
388357e252bfSMichael Neumann {
388457e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
388557e252bfSMichael Neumann 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
388657e252bfSMichael Neumann 
388757e252bfSMichael Neumann 	ni_update_current_ps(rdev, new_ps);
388857e252bfSMichael Neumann }
388957e252bfSMichael Neumann 
389057e252bfSMichael Neumann void ni_dpm_reset_asic(struct radeon_device *rdev)
389157e252bfSMichael Neumann {
389257e252bfSMichael Neumann 	ni_restrict_performance_levels_before_switch(rdev);
389357e252bfSMichael Neumann 	rv770_set_boot_state(rdev);
389457e252bfSMichael Neumann }
389557e252bfSMichael Neumann 
389657e252bfSMichael Neumann union power_info {
389757e252bfSMichael Neumann 	struct _ATOM_POWERPLAY_INFO info;
389857e252bfSMichael Neumann 	struct _ATOM_POWERPLAY_INFO_V2 info_2;
389957e252bfSMichael Neumann 	struct _ATOM_POWERPLAY_INFO_V3 info_3;
390057e252bfSMichael Neumann 	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
390157e252bfSMichael Neumann 	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
390257e252bfSMichael Neumann 	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
390357e252bfSMichael Neumann };
390457e252bfSMichael Neumann 
390557e252bfSMichael Neumann union pplib_clock_info {
390657e252bfSMichael Neumann 	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
390757e252bfSMichael Neumann 	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
390857e252bfSMichael Neumann 	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
390957e252bfSMichael Neumann 	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
391057e252bfSMichael Neumann };
391157e252bfSMichael Neumann 
391257e252bfSMichael Neumann union pplib_power_state {
391357e252bfSMichael Neumann 	struct _ATOM_PPLIB_STATE v1;
391457e252bfSMichael Neumann 	struct _ATOM_PPLIB_STATE_V2 v2;
391557e252bfSMichael Neumann };
391657e252bfSMichael Neumann 
391757e252bfSMichael Neumann static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
391857e252bfSMichael Neumann 					  struct radeon_ps *rps,
391957e252bfSMichael Neumann 					  struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
392057e252bfSMichael Neumann 					  u8 table_rev)
392157e252bfSMichael Neumann {
392257e252bfSMichael Neumann 	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
392357e252bfSMichael Neumann 	rps->class = le16_to_cpu(non_clock_info->usClassification);
392457e252bfSMichael Neumann 	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
392557e252bfSMichael Neumann 
392657e252bfSMichael Neumann 	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
392757e252bfSMichael Neumann 		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
392857e252bfSMichael Neumann 		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
392957e252bfSMichael Neumann 	} else if (r600_is_uvd_state(rps->class, rps->class2)) {
393057e252bfSMichael Neumann 		rps->vclk = RV770_DEFAULT_VCLK_FREQ;
393157e252bfSMichael Neumann 		rps->dclk = RV770_DEFAULT_DCLK_FREQ;
393257e252bfSMichael Neumann 	} else {
393357e252bfSMichael Neumann 		rps->vclk = 0;
393457e252bfSMichael Neumann 		rps->dclk = 0;
393557e252bfSMichael Neumann 	}
393657e252bfSMichael Neumann 
393757e252bfSMichael Neumann 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
393857e252bfSMichael Neumann 		rdev->pm.dpm.boot_ps = rps;
393957e252bfSMichael Neumann 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
394057e252bfSMichael Neumann 		rdev->pm.dpm.uvd_ps = rps;
394157e252bfSMichael Neumann }
394257e252bfSMichael Neumann 
394357e252bfSMichael Neumann static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
394457e252bfSMichael Neumann 				      struct radeon_ps *rps, int index,
394557e252bfSMichael Neumann 				      union pplib_clock_info *clock_info)
394657e252bfSMichael Neumann {
394757e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
394857e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
394957e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(rps);
395057e252bfSMichael Neumann 	struct rv7xx_pl *pl = &ps->performance_levels[index];
395157e252bfSMichael Neumann 
395257e252bfSMichael Neumann 	ps->performance_level_count = index + 1;
395357e252bfSMichael Neumann 
395457e252bfSMichael Neumann 	pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
395557e252bfSMichael Neumann 	pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
395657e252bfSMichael Neumann 	pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
395757e252bfSMichael Neumann 	pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
395857e252bfSMichael Neumann 
395957e252bfSMichael Neumann 	pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
396057e252bfSMichael Neumann 	pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
396157e252bfSMichael Neumann 	pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
396257e252bfSMichael Neumann 
396357e252bfSMichael Neumann 	/* patch up vddc if necessary */
396457e252bfSMichael Neumann 	if (pl->vddc == 0xff01) {
3965*c6f73aabSFrançois Tigeot 		if (pi->max_vddc)
3966*c6f73aabSFrançois Tigeot 			pl->vddc = pi->max_vddc;
396757e252bfSMichael Neumann 	}
396857e252bfSMichael Neumann 
396957e252bfSMichael Neumann 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
397057e252bfSMichael Neumann 		pi->acpi_vddc = pl->vddc;
397157e252bfSMichael Neumann 		eg_pi->acpi_vddci = pl->vddci;
397257e252bfSMichael Neumann 		if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
397357e252bfSMichael Neumann 			pi->acpi_pcie_gen2 = true;
397457e252bfSMichael Neumann 		else
397557e252bfSMichael Neumann 			pi->acpi_pcie_gen2 = false;
397657e252bfSMichael Neumann 	}
397757e252bfSMichael Neumann 
397857e252bfSMichael Neumann 	if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
397957e252bfSMichael Neumann 		eg_pi->ulv.supported = true;
398057e252bfSMichael Neumann 		eg_pi->ulv.pl = pl;
398157e252bfSMichael Neumann 	}
398257e252bfSMichael Neumann 
398357e252bfSMichael Neumann 	if (pi->min_vddc_in_table > pl->vddc)
398457e252bfSMichael Neumann 		pi->min_vddc_in_table = pl->vddc;
398557e252bfSMichael Neumann 
398657e252bfSMichael Neumann 	if (pi->max_vddc_in_table < pl->vddc)
398757e252bfSMichael Neumann 		pi->max_vddc_in_table = pl->vddc;
398857e252bfSMichael Neumann 
398957e252bfSMichael Neumann 	/* patch up boot state */
399057e252bfSMichael Neumann 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
399157e252bfSMichael Neumann 		u16 vddc, vddci, mvdd;
399257e252bfSMichael Neumann 		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
399357e252bfSMichael Neumann 		pl->mclk = rdev->clock.default_mclk;
399457e252bfSMichael Neumann 		pl->sclk = rdev->clock.default_sclk;
399557e252bfSMichael Neumann 		pl->vddc = vddc;
399657e252bfSMichael Neumann 		pl->vddci = vddci;
399757e252bfSMichael Neumann 	}
399857e252bfSMichael Neumann 
399957e252bfSMichael Neumann 	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
400057e252bfSMichael Neumann 	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
400157e252bfSMichael Neumann 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
400257e252bfSMichael Neumann 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
400357e252bfSMichael Neumann 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
400457e252bfSMichael Neumann 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
400557e252bfSMichael Neumann 	}
400657e252bfSMichael Neumann }
400757e252bfSMichael Neumann 
400857e252bfSMichael Neumann static int ni_parse_power_table(struct radeon_device *rdev)
400957e252bfSMichael Neumann {
401057e252bfSMichael Neumann 	struct radeon_mode_info *mode_info = &rdev->mode_info;
401157e252bfSMichael Neumann 	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
401257e252bfSMichael Neumann 	union pplib_power_state *power_state;
401357e252bfSMichael Neumann 	int i, j;
401457e252bfSMichael Neumann 	union pplib_clock_info *clock_info;
401557e252bfSMichael Neumann 	union power_info *power_info;
401657e252bfSMichael Neumann 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
401757e252bfSMichael Neumann         u16 data_offset;
401857e252bfSMichael Neumann 	u8 frev, crev;
401957e252bfSMichael Neumann 	struct ni_ps *ps;
402057e252bfSMichael Neumann 
402157e252bfSMichael Neumann 	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
402257e252bfSMichael Neumann 				   &frev, &crev, &data_offset))
402357e252bfSMichael Neumann 		return -EINVAL;
402457e252bfSMichael Neumann 	power_info = (union power_info *)((uint8_t*)mode_info->atom_context->bios + data_offset);
402557e252bfSMichael Neumann 
402657e252bfSMichael Neumann 	rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
402757e252bfSMichael Neumann 				  power_info->pplib.ucNumStates, GFP_KERNEL);
402857e252bfSMichael Neumann 	if (!rdev->pm.dpm.ps)
402957e252bfSMichael Neumann 		return -ENOMEM;
403057e252bfSMichael Neumann 
403157e252bfSMichael Neumann 	for (i = 0; i < power_info->pplib.ucNumStates; i++) {
403257e252bfSMichael Neumann 		power_state = (union pplib_power_state *)
403357e252bfSMichael Neumann 			((uint8_t*)mode_info->atom_context->bios + data_offset +
403457e252bfSMichael Neumann 			 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
403557e252bfSMichael Neumann 			 i * power_info->pplib.ucStateEntrySize);
403657e252bfSMichael Neumann 		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
403757e252bfSMichael Neumann 			((uint8_t*)mode_info->atom_context->bios + data_offset +
403857e252bfSMichael Neumann 			 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
403957e252bfSMichael Neumann 			 (power_state->v1.ucNonClockStateIndex *
404057e252bfSMichael Neumann 			  power_info->pplib.ucNonClockSize));
404157e252bfSMichael Neumann 		if (power_info->pplib.ucStateEntrySize - 1) {
40424cd92098Szrj 			u8 *idx;
404357e252bfSMichael Neumann 			ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
404457e252bfSMichael Neumann 			if (ps == NULL) {
404557e252bfSMichael Neumann 				kfree(rdev->pm.dpm.ps);
404657e252bfSMichael Neumann 				return -ENOMEM;
404757e252bfSMichael Neumann 			}
404857e252bfSMichael Neumann 			rdev->pm.dpm.ps[i].ps_priv = ps;
404957e252bfSMichael Neumann 			ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
405057e252bfSMichael Neumann 							 non_clock_info,
405157e252bfSMichael Neumann 							 power_info->pplib.ucNonClockSize);
40524cd92098Szrj 			idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
405357e252bfSMichael Neumann 			for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
405457e252bfSMichael Neumann 				clock_info = (union pplib_clock_info *)
405557e252bfSMichael Neumann 					((uint8_t*)mode_info->atom_context->bios + data_offset +
405657e252bfSMichael Neumann 					 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
40574cd92098Szrj 					 (idx[j] * power_info->pplib.ucClockInfoSize));
405857e252bfSMichael Neumann 				ni_parse_pplib_clock_info(rdev,
405957e252bfSMichael Neumann 							  &rdev->pm.dpm.ps[i], j,
406057e252bfSMichael Neumann 							  clock_info);
406157e252bfSMichael Neumann 			}
406257e252bfSMichael Neumann 		}
406357e252bfSMichael Neumann 	}
406457e252bfSMichael Neumann 	rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
406557e252bfSMichael Neumann 	return 0;
406657e252bfSMichael Neumann }
406757e252bfSMichael Neumann 
406857e252bfSMichael Neumann int ni_dpm_init(struct radeon_device *rdev)
406957e252bfSMichael Neumann {
407057e252bfSMichael Neumann 	struct rv7xx_power_info *pi;
407157e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi;
407257e252bfSMichael Neumann 	struct ni_power_info *ni_pi;
407357e252bfSMichael Neumann 	struct atom_clock_dividers dividers;
407457e252bfSMichael Neumann 	int ret;
407557e252bfSMichael Neumann 
407657e252bfSMichael Neumann 	ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
407757e252bfSMichael Neumann 	if (ni_pi == NULL)
407857e252bfSMichael Neumann 		return -ENOMEM;
407957e252bfSMichael Neumann 	rdev->pm.dpm.priv = ni_pi;
408057e252bfSMichael Neumann 	eg_pi = &ni_pi->eg;
408157e252bfSMichael Neumann 	pi = &eg_pi->rv7xx;
408257e252bfSMichael Neumann 
408357e252bfSMichael Neumann 	rv770_get_max_vddc(rdev);
408457e252bfSMichael Neumann 
408557e252bfSMichael Neumann 	eg_pi->ulv.supported = false;
408657e252bfSMichael Neumann 	pi->acpi_vddc = 0;
408757e252bfSMichael Neumann 	eg_pi->acpi_vddci = 0;
408857e252bfSMichael Neumann 	pi->min_vddc_in_table = 0;
408957e252bfSMichael Neumann 	pi->max_vddc_in_table = 0;
409057e252bfSMichael Neumann 
4091*c6f73aabSFrançois Tigeot 	ret = r600_get_platform_caps(rdev);
4092*c6f73aabSFrançois Tigeot 	if (ret)
4093*c6f73aabSFrançois Tigeot 		return ret;
4094*c6f73aabSFrançois Tigeot 
409557e252bfSMichael Neumann 	ret = ni_parse_power_table(rdev);
409657e252bfSMichael Neumann 	if (ret)
409757e252bfSMichael Neumann 		return ret;
409857e252bfSMichael Neumann 	ret = r600_parse_extended_power_table(rdev);
409957e252bfSMichael Neumann 	if (ret)
410057e252bfSMichael Neumann 		return ret;
410157e252bfSMichael Neumann 
410257e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
410357e252bfSMichael Neumann 		kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
410457e252bfSMichael Neumann 	if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
410557e252bfSMichael Neumann 		r600_free_extended_power_table(rdev);
410657e252bfSMichael Neumann 		return -ENOMEM;
410757e252bfSMichael Neumann 	}
410857e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
410957e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
411057e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
411157e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
411257e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
411357e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
411457e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
411557e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
411657e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
411757e252bfSMichael Neumann 
411857e252bfSMichael Neumann 	ni_patch_dependency_tables_based_on_leakage(rdev);
411957e252bfSMichael Neumann 
412057e252bfSMichael Neumann 	if (rdev->pm.dpm.voltage_response_time == 0)
412157e252bfSMichael Neumann 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
412257e252bfSMichael Neumann 	if (rdev->pm.dpm.backbias_response_time == 0)
412357e252bfSMichael Neumann 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
412457e252bfSMichael Neumann 
412557e252bfSMichael Neumann 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
412657e252bfSMichael Neumann 					     0, false, &dividers);
412757e252bfSMichael Neumann 	if (ret)
412857e252bfSMichael Neumann 		pi->ref_div = dividers.ref_div + 1;
412957e252bfSMichael Neumann 	else
413057e252bfSMichael Neumann 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
413157e252bfSMichael Neumann 
413257e252bfSMichael Neumann 	pi->rlp = RV770_RLP_DFLT;
413357e252bfSMichael Neumann 	pi->rmp = RV770_RMP_DFLT;
413457e252bfSMichael Neumann 	pi->lhp = RV770_LHP_DFLT;
413557e252bfSMichael Neumann 	pi->lmp = RV770_LMP_DFLT;
413657e252bfSMichael Neumann 
413757e252bfSMichael Neumann 	eg_pi->ats[0].rlp = RV770_RLP_DFLT;
413857e252bfSMichael Neumann 	eg_pi->ats[0].rmp = RV770_RMP_DFLT;
413957e252bfSMichael Neumann 	eg_pi->ats[0].lhp = RV770_LHP_DFLT;
414057e252bfSMichael Neumann 	eg_pi->ats[0].lmp = RV770_LMP_DFLT;
414157e252bfSMichael Neumann 
414257e252bfSMichael Neumann 	eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
414357e252bfSMichael Neumann 	eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
414457e252bfSMichael Neumann 	eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
414557e252bfSMichael Neumann 	eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
414657e252bfSMichael Neumann 
414757e252bfSMichael Neumann 	eg_pi->smu_uvd_hs = true;
414857e252bfSMichael Neumann 
4149*c6f73aabSFrançois Tigeot 	if (rdev->pdev->device == 0x6707) {
415057e252bfSMichael Neumann 		pi->mclk_strobe_mode_threshold = 55000;
415157e252bfSMichael Neumann 		pi->mclk_edc_enable_threshold = 55000;
415257e252bfSMichael Neumann 		eg_pi->mclk_edc_wr_enable_threshold = 55000;
415357e252bfSMichael Neumann 	} else {
415457e252bfSMichael Neumann 		pi->mclk_strobe_mode_threshold = 40000;
415557e252bfSMichael Neumann 		pi->mclk_edc_enable_threshold = 40000;
415657e252bfSMichael Neumann 		eg_pi->mclk_edc_wr_enable_threshold = 40000;
415757e252bfSMichael Neumann 	}
415857e252bfSMichael Neumann 	ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
415957e252bfSMichael Neumann 
416057e252bfSMichael Neumann 	pi->voltage_control =
416157e252bfSMichael Neumann 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
416257e252bfSMichael Neumann 
416357e252bfSMichael Neumann 	pi->mvdd_control =
416457e252bfSMichael Neumann 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
416557e252bfSMichael Neumann 
416657e252bfSMichael Neumann 	eg_pi->vddci_control =
416757e252bfSMichael Neumann 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
416857e252bfSMichael Neumann 
416957e252bfSMichael Neumann 	rv770_get_engine_memory_ss(rdev);
417057e252bfSMichael Neumann 
417157e252bfSMichael Neumann 	pi->asi = RV770_ASI_DFLT;
417257e252bfSMichael Neumann 	pi->pasi = CYPRESS_HASI_DFLT;
417357e252bfSMichael Neumann 	pi->vrc = CYPRESS_VRC_DFLT;
417457e252bfSMichael Neumann 
417557e252bfSMichael Neumann 	pi->power_gating = false;
417657e252bfSMichael Neumann 
417757e252bfSMichael Neumann 	pi->gfx_clock_gating = true;
417857e252bfSMichael Neumann 
417957e252bfSMichael Neumann 	pi->mg_clock_gating = true;
418057e252bfSMichael Neumann 	pi->mgcgtssm = true;
418157e252bfSMichael Neumann 	eg_pi->ls_clock_gating = false;
418257e252bfSMichael Neumann 	eg_pi->sclk_deep_sleep = false;
418357e252bfSMichael Neumann 
418457e252bfSMichael Neumann 	pi->dynamic_pcie_gen2 = true;
418557e252bfSMichael Neumann 
418657e252bfSMichael Neumann 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
418757e252bfSMichael Neumann 		pi->thermal_protection = true;
418857e252bfSMichael Neumann 	else
418957e252bfSMichael Neumann 		pi->thermal_protection = false;
419057e252bfSMichael Neumann 
419157e252bfSMichael Neumann 	pi->display_gap = true;
419257e252bfSMichael Neumann 
419357e252bfSMichael Neumann 	pi->dcodt = true;
419457e252bfSMichael Neumann 
419557e252bfSMichael Neumann 	pi->ulps = true;
419657e252bfSMichael Neumann 
419757e252bfSMichael Neumann 	eg_pi->dynamic_ac_timing = true;
419857e252bfSMichael Neumann 	eg_pi->abm = true;
419957e252bfSMichael Neumann 	eg_pi->mcls = true;
420057e252bfSMichael Neumann 	eg_pi->light_sleep = true;
420157e252bfSMichael Neumann 	eg_pi->memory_transition = true;
420257e252bfSMichael Neumann #if defined(CONFIG_ACPI)
420357e252bfSMichael Neumann 	eg_pi->pcie_performance_request =
420457e252bfSMichael Neumann 		radeon_acpi_is_pcie_performance_request_supported(rdev);
420557e252bfSMichael Neumann #else
420657e252bfSMichael Neumann 	eg_pi->pcie_performance_request = false;
420757e252bfSMichael Neumann #endif
420857e252bfSMichael Neumann 
420957e252bfSMichael Neumann 	eg_pi->dll_default_on = false;
421057e252bfSMichael Neumann 
421157e252bfSMichael Neumann 	eg_pi->sclk_deep_sleep = false;
421257e252bfSMichael Neumann 
421357e252bfSMichael Neumann 	pi->mclk_stutter_mode_threshold = 0;
421457e252bfSMichael Neumann 
421557e252bfSMichael Neumann 	pi->sram_end = SMC_RAM_END;
421657e252bfSMichael Neumann 
421757e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
421857e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
421957e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
422057e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
422157e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
422257e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
422357e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
422457e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
422557e252bfSMichael Neumann 
422657e252bfSMichael Neumann 	ni_pi->cac_data.leakage_coefficients.at = 516;
422757e252bfSMichael Neumann 	ni_pi->cac_data.leakage_coefficients.bt = 18;
422857e252bfSMichael Neumann 	ni_pi->cac_data.leakage_coefficients.av = 51;
422957e252bfSMichael Neumann 	ni_pi->cac_data.leakage_coefficients.bv = 2957;
423057e252bfSMichael Neumann 
4231*c6f73aabSFrançois Tigeot 	switch (rdev->pdev->device) {
423257e252bfSMichael Neumann 	case 0x6700:
423357e252bfSMichael Neumann 	case 0x6701:
423457e252bfSMichael Neumann 	case 0x6702:
423557e252bfSMichael Neumann 	case 0x6703:
423657e252bfSMichael Neumann 	case 0x6718:
423757e252bfSMichael Neumann 		ni_pi->cac_weights = &cac_weights_cayman_xt;
423857e252bfSMichael Neumann 		break;
423957e252bfSMichael Neumann 	case 0x6705:
424057e252bfSMichael Neumann 	case 0x6719:
424157e252bfSMichael Neumann 	case 0x671D:
424257e252bfSMichael Neumann 	case 0x671C:
424357e252bfSMichael Neumann 	default:
424457e252bfSMichael Neumann 		ni_pi->cac_weights = &cac_weights_cayman_pro;
424557e252bfSMichael Neumann 		break;
424657e252bfSMichael Neumann 	case 0x6704:
424757e252bfSMichael Neumann 	case 0x6706:
424857e252bfSMichael Neumann 	case 0x6707:
424957e252bfSMichael Neumann 	case 0x6708:
425057e252bfSMichael Neumann 	case 0x6709:
425157e252bfSMichael Neumann 		ni_pi->cac_weights = &cac_weights_cayman_le;
425257e252bfSMichael Neumann 		break;
425357e252bfSMichael Neumann 	}
425457e252bfSMichael Neumann 
425557e252bfSMichael Neumann 	if (ni_pi->cac_weights->enable_power_containment_by_default) {
425657e252bfSMichael Neumann 		ni_pi->enable_power_containment = true;
425757e252bfSMichael Neumann 		ni_pi->enable_cac = true;
425857e252bfSMichael Neumann 		ni_pi->enable_sq_ramping = true;
425957e252bfSMichael Neumann 	} else {
426057e252bfSMichael Neumann 		ni_pi->enable_power_containment = false;
426157e252bfSMichael Neumann 		ni_pi->enable_cac = false;
426257e252bfSMichael Neumann 		ni_pi->enable_sq_ramping = false;
426357e252bfSMichael Neumann 	}
426457e252bfSMichael Neumann 
426557e252bfSMichael Neumann 	ni_pi->driver_calculate_cac_leakage = false;
426657e252bfSMichael Neumann 	ni_pi->cac_configuration_required = true;
426757e252bfSMichael Neumann 
426857e252bfSMichael Neumann 	if (ni_pi->cac_configuration_required) {
426957e252bfSMichael Neumann 		ni_pi->support_cac_long_term_average = true;
427057e252bfSMichael Neumann 		ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
427157e252bfSMichael Neumann 		ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
427257e252bfSMichael Neumann 	} else {
427357e252bfSMichael Neumann 		ni_pi->support_cac_long_term_average = false;
427457e252bfSMichael Neumann 		ni_pi->lta_window_size = 0;
427557e252bfSMichael Neumann 		ni_pi->lts_truncate = 0;
427657e252bfSMichael Neumann 	}
427757e252bfSMichael Neumann 
427857e252bfSMichael Neumann 	ni_pi->use_power_boost_limit = true;
427957e252bfSMichael Neumann 
42804cd92098Szrj 	/* make sure dc limits are valid */
42814cd92098Szrj 	if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
42824cd92098Szrj 	    (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
42834cd92098Szrj 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
42844cd92098Szrj 			rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
42854cd92098Szrj 
428657e252bfSMichael Neumann 	return 0;
428757e252bfSMichael Neumann }
428857e252bfSMichael Neumann 
428957e252bfSMichael Neumann void ni_dpm_fini(struct radeon_device *rdev)
429057e252bfSMichael Neumann {
429157e252bfSMichael Neumann 	int i;
429257e252bfSMichael Neumann 
429357e252bfSMichael Neumann 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
429457e252bfSMichael Neumann 		kfree(rdev->pm.dpm.ps[i].ps_priv);
429557e252bfSMichael Neumann 	}
429657e252bfSMichael Neumann 	kfree(rdev->pm.dpm.ps);
429757e252bfSMichael Neumann 	kfree(rdev->pm.dpm.priv);
429857e252bfSMichael Neumann 	kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
429957e252bfSMichael Neumann 	r600_free_extended_power_table(rdev);
430057e252bfSMichael Neumann }
430157e252bfSMichael Neumann 
430257e252bfSMichael Neumann void ni_dpm_print_power_state(struct radeon_device *rdev,
430357e252bfSMichael Neumann 			      struct radeon_ps *rps)
430457e252bfSMichael Neumann {
430557e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(rps);
430657e252bfSMichael Neumann 	struct rv7xx_pl *pl;
430757e252bfSMichael Neumann 	int i;
430857e252bfSMichael Neumann 
430957e252bfSMichael Neumann 	r600_dpm_print_class_info(rps->class, rps->class2);
431057e252bfSMichael Neumann 	r600_dpm_print_cap_info(rps->caps);
431157e252bfSMichael Neumann 	printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
431257e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++) {
431357e252bfSMichael Neumann 		pl = &ps->performance_levels[i];
431457e252bfSMichael Neumann 		if (rdev->family >= CHIP_TAHITI)
431557e252bfSMichael Neumann 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
431657e252bfSMichael Neumann 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
431757e252bfSMichael Neumann 		else
431857e252bfSMichael Neumann 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
431957e252bfSMichael Neumann 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
432057e252bfSMichael Neumann 	}
432157e252bfSMichael Neumann 	r600_dpm_print_ps_status(rdev, rps);
432257e252bfSMichael Neumann }
432357e252bfSMichael Neumann 
432457e252bfSMichael Neumann void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
432557e252bfSMichael Neumann 						    struct seq_file *m)
432657e252bfSMichael Neumann {
4327*c6f73aabSFrançois Tigeot 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4328*c6f73aabSFrançois Tigeot 	struct radeon_ps *rps = &eg_pi->current_rps;
432957e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(rps);
433057e252bfSMichael Neumann 	struct rv7xx_pl *pl;
433157e252bfSMichael Neumann 	u32 current_index =
433257e252bfSMichael Neumann 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
433357e252bfSMichael Neumann 		CURRENT_STATE_INDEX_SHIFT;
433457e252bfSMichael Neumann 
433557e252bfSMichael Neumann 	if (current_index >= ps->performance_level_count) {
433657e252bfSMichael Neumann 		seq_printf(m, "invalid dpm profile %d\n", current_index);
433757e252bfSMichael Neumann 	} else {
433857e252bfSMichael Neumann 		pl = &ps->performance_levels[current_index];
433957e252bfSMichael Neumann 		seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
434057e252bfSMichael Neumann 		seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
434157e252bfSMichael Neumann 			   current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
434257e252bfSMichael Neumann 	}
434357e252bfSMichael Neumann }
434457e252bfSMichael Neumann 
434557e252bfSMichael Neumann u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
434657e252bfSMichael Neumann {
434757e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
434857e252bfSMichael Neumann 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
434957e252bfSMichael Neumann 
435057e252bfSMichael Neumann 	if (low)
435157e252bfSMichael Neumann 		return requested_state->performance_levels[0].sclk;
435257e252bfSMichael Neumann 	else
435357e252bfSMichael Neumann 		return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
435457e252bfSMichael Neumann }
435557e252bfSMichael Neumann 
435657e252bfSMichael Neumann u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
435757e252bfSMichael Neumann {
435857e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
435957e252bfSMichael Neumann 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
436057e252bfSMichael Neumann 
436157e252bfSMichael Neumann 	if (low)
436257e252bfSMichael Neumann 		return requested_state->performance_levels[0].mclk;
436357e252bfSMichael Neumann 	else
436457e252bfSMichael Neumann 		return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
436557e252bfSMichael Neumann }
4366