xref: /dragonfly/sys/dev/drm/radeon/ni_dpm.c (revision d78d3a22)
157e252bfSMichael Neumann /*
257e252bfSMichael Neumann  * Copyright 2012 Advanced Micro Devices, Inc.
357e252bfSMichael Neumann  *
457e252bfSMichael Neumann  * Permission is hereby granted, free of charge, to any person obtaining a
557e252bfSMichael Neumann  * copy of this software and associated documentation files (the "Software"),
657e252bfSMichael Neumann  * to deal in the Software without restriction, including without limitation
757e252bfSMichael Neumann  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
857e252bfSMichael Neumann  * and/or sell copies of the Software, and to permit persons to whom the
957e252bfSMichael Neumann  * Software is furnished to do so, subject to the following conditions:
1057e252bfSMichael Neumann  *
1157e252bfSMichael Neumann  * The above copyright notice and this permission notice shall be included in
1257e252bfSMichael Neumann  * all copies or substantial portions of the Software.
1357e252bfSMichael Neumann  *
1457e252bfSMichael Neumann  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
1557e252bfSMichael Neumann  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1657e252bfSMichael Neumann  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
1757e252bfSMichael Neumann  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
1857e252bfSMichael Neumann  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
1957e252bfSMichael Neumann  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
2057e252bfSMichael Neumann  * OTHER DEALINGS IN THE SOFTWARE.
2157e252bfSMichael Neumann  *
2257e252bfSMichael Neumann  */
2357e252bfSMichael Neumann 
2457e252bfSMichael Neumann #include <drm/drmP.h>
2557e252bfSMichael Neumann #include "radeon.h"
2657e252bfSMichael Neumann #include "radeon_asic.h"
2757e252bfSMichael Neumann #include "nid.h"
2857e252bfSMichael Neumann #include "r600_dpm.h"
2957e252bfSMichael Neumann #include "ni_dpm.h"
3057e252bfSMichael Neumann #include "atom.h"
3157e252bfSMichael Neumann #include <linux/math64.h>
3257e252bfSMichael Neumann #include <linux/seq_file.h>
3357e252bfSMichael Neumann 
3457e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F0           0x0a
3557e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F1           0x0b
3657e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F2           0x0c
3757e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F3           0x0d
3857e252bfSMichael Neumann 
3957e252bfSMichael Neumann #define SMC_RAM_END 0xC000
4057e252bfSMichael Neumann 
4157e252bfSMichael Neumann static const struct ni_cac_weights cac_weights_cayman_xt =
4257e252bfSMichael Neumann {
4357e252bfSMichael Neumann 	0x15,
4457e252bfSMichael Neumann 	0x2,
4557e252bfSMichael Neumann 	0x19,
4657e252bfSMichael Neumann 	0x2,
4757e252bfSMichael Neumann 	0x8,
4857e252bfSMichael Neumann 	0x14,
4957e252bfSMichael Neumann 	0x2,
5057e252bfSMichael Neumann 	0x16,
5157e252bfSMichael Neumann 	0xE,
5257e252bfSMichael Neumann 	0x17,
5357e252bfSMichael Neumann 	0x13,
5457e252bfSMichael Neumann 	0x2B,
5557e252bfSMichael Neumann 	0x10,
5657e252bfSMichael Neumann 	0x7,
5757e252bfSMichael Neumann 	0x5,
5857e252bfSMichael Neumann 	0x5,
5957e252bfSMichael Neumann 	0x5,
6057e252bfSMichael Neumann 	0x2,
6157e252bfSMichael Neumann 	0x3,
6257e252bfSMichael Neumann 	0x9,
6357e252bfSMichael Neumann 	0x10,
6457e252bfSMichael Neumann 	0x10,
6557e252bfSMichael Neumann 	0x2B,
6657e252bfSMichael Neumann 	0xA,
6757e252bfSMichael Neumann 	0x9,
6857e252bfSMichael Neumann 	0x4,
6957e252bfSMichael Neumann 	0xD,
7057e252bfSMichael Neumann 	0xD,
7157e252bfSMichael Neumann 	0x3E,
7257e252bfSMichael Neumann 	0x18,
7357e252bfSMichael Neumann 	0x14,
7457e252bfSMichael Neumann 	0,
7557e252bfSMichael Neumann 	0x3,
7657e252bfSMichael Neumann 	0x3,
7757e252bfSMichael Neumann 	0x5,
7857e252bfSMichael Neumann 	0,
7957e252bfSMichael Neumann 	0x2,
8057e252bfSMichael Neumann 	0,
8157e252bfSMichael Neumann 	0,
8257e252bfSMichael Neumann 	0,
8357e252bfSMichael Neumann 	0,
8457e252bfSMichael Neumann 	0,
8557e252bfSMichael Neumann 	0,
8657e252bfSMichael Neumann 	0,
8757e252bfSMichael Neumann 	0,
8857e252bfSMichael Neumann 	0,
8957e252bfSMichael Neumann 	0x1CC,
9057e252bfSMichael Neumann 	0,
9157e252bfSMichael Neumann 	0x164,
9257e252bfSMichael Neumann 	1,
9357e252bfSMichael Neumann 	1,
9457e252bfSMichael Neumann 	1,
9557e252bfSMichael Neumann 	1,
9657e252bfSMichael Neumann 	12,
9757e252bfSMichael Neumann 	12,
9857e252bfSMichael Neumann 	12,
9957e252bfSMichael Neumann 	0x12,
10057e252bfSMichael Neumann 	0x1F,
10157e252bfSMichael Neumann 	132,
10257e252bfSMichael Neumann 	5,
10357e252bfSMichael Neumann 	7,
10457e252bfSMichael Neumann 	0,
10557e252bfSMichael Neumann 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
10657e252bfSMichael Neumann 	{ 0, 0, 0, 0 },
10757e252bfSMichael Neumann 	true
10857e252bfSMichael Neumann };
10957e252bfSMichael Neumann 
11057e252bfSMichael Neumann static const struct ni_cac_weights cac_weights_cayman_pro =
11157e252bfSMichael Neumann {
11257e252bfSMichael Neumann 	0x16,
11357e252bfSMichael Neumann 	0x4,
11457e252bfSMichael Neumann 	0x10,
11557e252bfSMichael Neumann 	0x2,
11657e252bfSMichael Neumann 	0xA,
11757e252bfSMichael Neumann 	0x16,
11857e252bfSMichael Neumann 	0x2,
11957e252bfSMichael Neumann 	0x18,
12057e252bfSMichael Neumann 	0x10,
12157e252bfSMichael Neumann 	0x1A,
12257e252bfSMichael Neumann 	0x16,
12357e252bfSMichael Neumann 	0x2D,
12457e252bfSMichael Neumann 	0x12,
12557e252bfSMichael Neumann 	0xA,
12657e252bfSMichael Neumann 	0x6,
12757e252bfSMichael Neumann 	0x6,
12857e252bfSMichael Neumann 	0x6,
12957e252bfSMichael Neumann 	0x2,
13057e252bfSMichael Neumann 	0x4,
13157e252bfSMichael Neumann 	0xB,
13257e252bfSMichael Neumann 	0x11,
13357e252bfSMichael Neumann 	0x11,
13457e252bfSMichael Neumann 	0x2D,
13557e252bfSMichael Neumann 	0xC,
13657e252bfSMichael Neumann 	0xC,
13757e252bfSMichael Neumann 	0x7,
13857e252bfSMichael Neumann 	0x10,
13957e252bfSMichael Neumann 	0x10,
14057e252bfSMichael Neumann 	0x3F,
14157e252bfSMichael Neumann 	0x1A,
14257e252bfSMichael Neumann 	0x16,
14357e252bfSMichael Neumann 	0,
14457e252bfSMichael Neumann 	0x7,
14557e252bfSMichael Neumann 	0x4,
14657e252bfSMichael Neumann 	0x6,
14757e252bfSMichael Neumann 	1,
14857e252bfSMichael Neumann 	0x2,
14957e252bfSMichael Neumann 	0x1,
15057e252bfSMichael Neumann 	0,
15157e252bfSMichael Neumann 	0,
15257e252bfSMichael Neumann 	0,
15357e252bfSMichael Neumann 	0,
15457e252bfSMichael Neumann 	0,
15557e252bfSMichael Neumann 	0,
15657e252bfSMichael Neumann 	0x30,
15757e252bfSMichael Neumann 	0,
15857e252bfSMichael Neumann 	0x1CF,
15957e252bfSMichael Neumann 	0,
16057e252bfSMichael Neumann 	0x166,
16157e252bfSMichael Neumann 	1,
16257e252bfSMichael Neumann 	1,
16357e252bfSMichael Neumann 	1,
16457e252bfSMichael Neumann 	1,
16557e252bfSMichael Neumann 	12,
16657e252bfSMichael Neumann 	12,
16757e252bfSMichael Neumann 	12,
16857e252bfSMichael Neumann 	0x15,
16957e252bfSMichael Neumann 	0x1F,
17057e252bfSMichael Neumann 	132,
17157e252bfSMichael Neumann 	6,
17257e252bfSMichael Neumann 	6,
17357e252bfSMichael Neumann 	0,
17457e252bfSMichael Neumann 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
17557e252bfSMichael Neumann 	{ 0, 0, 0, 0 },
17657e252bfSMichael Neumann 	true
17757e252bfSMichael Neumann };
17857e252bfSMichael Neumann 
17957e252bfSMichael Neumann static const struct ni_cac_weights cac_weights_cayman_le =
18057e252bfSMichael Neumann {
18157e252bfSMichael Neumann 	0x7,
18257e252bfSMichael Neumann 	0xE,
18357e252bfSMichael Neumann 	0x1,
18457e252bfSMichael Neumann 	0xA,
18557e252bfSMichael Neumann 	0x1,
18657e252bfSMichael Neumann 	0x3F,
18757e252bfSMichael Neumann 	0x2,
18857e252bfSMichael Neumann 	0x18,
18957e252bfSMichael Neumann 	0x10,
19057e252bfSMichael Neumann 	0x1A,
19157e252bfSMichael Neumann 	0x1,
19257e252bfSMichael Neumann 	0x3F,
19357e252bfSMichael Neumann 	0x1,
19457e252bfSMichael Neumann 	0xE,
19557e252bfSMichael Neumann 	0x6,
19657e252bfSMichael Neumann 	0x6,
19757e252bfSMichael Neumann 	0x6,
19857e252bfSMichael Neumann 	0x2,
19957e252bfSMichael Neumann 	0x4,
20057e252bfSMichael Neumann 	0x9,
20157e252bfSMichael Neumann 	0x1A,
20257e252bfSMichael Neumann 	0x1A,
20357e252bfSMichael Neumann 	0x2C,
20457e252bfSMichael Neumann 	0xA,
20557e252bfSMichael Neumann 	0x11,
20657e252bfSMichael Neumann 	0x8,
20757e252bfSMichael Neumann 	0x19,
20857e252bfSMichael Neumann 	0x19,
20957e252bfSMichael Neumann 	0x1,
21057e252bfSMichael Neumann 	0x1,
21157e252bfSMichael Neumann 	0x1A,
21257e252bfSMichael Neumann 	0,
21357e252bfSMichael Neumann 	0x8,
21457e252bfSMichael Neumann 	0x5,
21557e252bfSMichael Neumann 	0x8,
21657e252bfSMichael Neumann 	0x1,
21757e252bfSMichael Neumann 	0x3,
21857e252bfSMichael Neumann 	0x1,
21957e252bfSMichael Neumann 	0,
22057e252bfSMichael Neumann 	0,
22157e252bfSMichael Neumann 	0,
22257e252bfSMichael Neumann 	0,
22357e252bfSMichael Neumann 	0,
22457e252bfSMichael Neumann 	0,
22557e252bfSMichael Neumann 	0x38,
22657e252bfSMichael Neumann 	0x38,
22757e252bfSMichael Neumann 	0x239,
22857e252bfSMichael Neumann 	0x3,
22957e252bfSMichael Neumann 	0x18A,
23057e252bfSMichael Neumann 	1,
23157e252bfSMichael Neumann 	1,
23257e252bfSMichael Neumann 	1,
23357e252bfSMichael Neumann 	1,
23457e252bfSMichael Neumann 	12,
23557e252bfSMichael Neumann 	12,
23657e252bfSMichael Neumann 	12,
23757e252bfSMichael Neumann 	0x15,
23857e252bfSMichael Neumann 	0x22,
23957e252bfSMichael Neumann 	132,
24057e252bfSMichael Neumann 	6,
24157e252bfSMichael Neumann 	6,
24257e252bfSMichael Neumann 	0,
24357e252bfSMichael Neumann 	{ 0, 0, 0, 0, 0, 0, 0, 0 },
24457e252bfSMichael Neumann 	{ 0, 0, 0, 0 },
24557e252bfSMichael Neumann 	true
24657e252bfSMichael Neumann };
24757e252bfSMichael Neumann 
24857e252bfSMichael Neumann #define NISLANDS_MGCG_SEQUENCE  300
24957e252bfSMichael Neumann 
25057e252bfSMichael Neumann static const u32 cayman_cgcg_cgls_default[] =
25157e252bfSMichael Neumann {
25257e252bfSMichael Neumann 	0x000008f8, 0x00000010, 0xffffffff,
25357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
25457e252bfSMichael Neumann 	0x000008f8, 0x00000011, 0xffffffff,
25557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
25657e252bfSMichael Neumann 	0x000008f8, 0x00000012, 0xffffffff,
25757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
25857e252bfSMichael Neumann 	0x000008f8, 0x00000013, 0xffffffff,
25957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26057e252bfSMichael Neumann 	0x000008f8, 0x00000014, 0xffffffff,
26157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26257e252bfSMichael Neumann 	0x000008f8, 0x00000015, 0xffffffff,
26357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26457e252bfSMichael Neumann 	0x000008f8, 0x00000016, 0xffffffff,
26557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26657e252bfSMichael Neumann 	0x000008f8, 0x00000017, 0xffffffff,
26757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
26857e252bfSMichael Neumann 	0x000008f8, 0x00000018, 0xffffffff,
26957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27057e252bfSMichael Neumann 	0x000008f8, 0x00000019, 0xffffffff,
27157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27257e252bfSMichael Neumann 	0x000008f8, 0x0000001a, 0xffffffff,
27357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27457e252bfSMichael Neumann 	0x000008f8, 0x0000001b, 0xffffffff,
27557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27657e252bfSMichael Neumann 	0x000008f8, 0x00000020, 0xffffffff,
27757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
27857e252bfSMichael Neumann 	0x000008f8, 0x00000021, 0xffffffff,
27957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28057e252bfSMichael Neumann 	0x000008f8, 0x00000022, 0xffffffff,
28157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28257e252bfSMichael Neumann 	0x000008f8, 0x00000023, 0xffffffff,
28357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28457e252bfSMichael Neumann 	0x000008f8, 0x00000024, 0xffffffff,
28557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28657e252bfSMichael Neumann 	0x000008f8, 0x00000025, 0xffffffff,
28757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
28857e252bfSMichael Neumann 	0x000008f8, 0x00000026, 0xffffffff,
28957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29057e252bfSMichael Neumann 	0x000008f8, 0x00000027, 0xffffffff,
29157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29257e252bfSMichael Neumann 	0x000008f8, 0x00000028, 0xffffffff,
29357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29457e252bfSMichael Neumann 	0x000008f8, 0x00000029, 0xffffffff,
29557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29657e252bfSMichael Neumann 	0x000008f8, 0x0000002a, 0xffffffff,
29757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
29857e252bfSMichael Neumann 	0x000008f8, 0x0000002b, 0xffffffff,
29957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff
30057e252bfSMichael Neumann };
30157e252bfSMichael Neumann #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
30257e252bfSMichael Neumann 
30357e252bfSMichael Neumann static const u32 cayman_cgcg_cgls_disable[] =
30457e252bfSMichael Neumann {
30557e252bfSMichael Neumann 	0x000008f8, 0x00000010, 0xffffffff,
30657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
30757e252bfSMichael Neumann 	0x000008f8, 0x00000011, 0xffffffff,
30857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
30957e252bfSMichael Neumann 	0x000008f8, 0x00000012, 0xffffffff,
31057e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31157e252bfSMichael Neumann 	0x000008f8, 0x00000013, 0xffffffff,
31257e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31357e252bfSMichael Neumann 	0x000008f8, 0x00000014, 0xffffffff,
31457e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31557e252bfSMichael Neumann 	0x000008f8, 0x00000015, 0xffffffff,
31657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31757e252bfSMichael Neumann 	0x000008f8, 0x00000016, 0xffffffff,
31857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
31957e252bfSMichael Neumann 	0x000008f8, 0x00000017, 0xffffffff,
32057e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32157e252bfSMichael Neumann 	0x000008f8, 0x00000018, 0xffffffff,
32257e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32357e252bfSMichael Neumann 	0x000008f8, 0x00000019, 0xffffffff,
32457e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32557e252bfSMichael Neumann 	0x000008f8, 0x0000001a, 0xffffffff,
32657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32757e252bfSMichael Neumann 	0x000008f8, 0x0000001b, 0xffffffff,
32857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
32957e252bfSMichael Neumann 	0x000008f8, 0x00000020, 0xffffffff,
33057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33157e252bfSMichael Neumann 	0x000008f8, 0x00000021, 0xffffffff,
33257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33357e252bfSMichael Neumann 	0x000008f8, 0x00000022, 0xffffffff,
33457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33557e252bfSMichael Neumann 	0x000008f8, 0x00000023, 0xffffffff,
33657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33757e252bfSMichael Neumann 	0x000008f8, 0x00000024, 0xffffffff,
33857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
33957e252bfSMichael Neumann 	0x000008f8, 0x00000025, 0xffffffff,
34057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34157e252bfSMichael Neumann 	0x000008f8, 0x00000026, 0xffffffff,
34257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34357e252bfSMichael Neumann 	0x000008f8, 0x00000027, 0xffffffff,
34457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34557e252bfSMichael Neumann 	0x000008f8, 0x00000028, 0xffffffff,
34657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34757e252bfSMichael Neumann 	0x000008f8, 0x00000029, 0xffffffff,
34857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
34957e252bfSMichael Neumann 	0x000008f8, 0x0000002a, 0xffffffff,
35057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
35157e252bfSMichael Neumann 	0x000008f8, 0x0000002b, 0xffffffff,
35257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
35357e252bfSMichael Neumann 	0x00000644, 0x000f7902, 0x001f4180,
35457e252bfSMichael Neumann 	0x00000644, 0x000f3802, 0x001f4180
35557e252bfSMichael Neumann };
35657e252bfSMichael Neumann #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
35757e252bfSMichael Neumann 
35857e252bfSMichael Neumann static const u32 cayman_cgcg_cgls_enable[] =
35957e252bfSMichael Neumann {
36057e252bfSMichael Neumann 	0x00000644, 0x000f7882, 0x001f4080,
36157e252bfSMichael Neumann 	0x000008f8, 0x00000010, 0xffffffff,
36257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
36357e252bfSMichael Neumann 	0x000008f8, 0x00000011, 0xffffffff,
36457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
36557e252bfSMichael Neumann 	0x000008f8, 0x00000012, 0xffffffff,
36657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
36757e252bfSMichael Neumann 	0x000008f8, 0x00000013, 0xffffffff,
36857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
36957e252bfSMichael Neumann 	0x000008f8, 0x00000014, 0xffffffff,
37057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37157e252bfSMichael Neumann 	0x000008f8, 0x00000015, 0xffffffff,
37257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37357e252bfSMichael Neumann 	0x000008f8, 0x00000016, 0xffffffff,
37457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37557e252bfSMichael Neumann 	0x000008f8, 0x00000017, 0xffffffff,
37657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37757e252bfSMichael Neumann 	0x000008f8, 0x00000018, 0xffffffff,
37857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
37957e252bfSMichael Neumann 	0x000008f8, 0x00000019, 0xffffffff,
38057e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
38157e252bfSMichael Neumann 	0x000008f8, 0x0000001a, 0xffffffff,
38257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
38357e252bfSMichael Neumann 	0x000008f8, 0x0000001b, 0xffffffff,
38457e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
38557e252bfSMichael Neumann 	0x000008f8, 0x00000020, 0xffffffff,
38657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
38757e252bfSMichael Neumann 	0x000008f8, 0x00000021, 0xffffffff,
38857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
38957e252bfSMichael Neumann 	0x000008f8, 0x00000022, 0xffffffff,
39057e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39157e252bfSMichael Neumann 	0x000008f8, 0x00000023, 0xffffffff,
39257e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39357e252bfSMichael Neumann 	0x000008f8, 0x00000024, 0xffffffff,
39457e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39557e252bfSMichael Neumann 	0x000008f8, 0x00000025, 0xffffffff,
39657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39757e252bfSMichael Neumann 	0x000008f8, 0x00000026, 0xffffffff,
39857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
39957e252bfSMichael Neumann 	0x000008f8, 0x00000027, 0xffffffff,
40057e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
40157e252bfSMichael Neumann 	0x000008f8, 0x00000028, 0xffffffff,
40257e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
40357e252bfSMichael Neumann 	0x000008f8, 0x00000029, 0xffffffff,
40457e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
40557e252bfSMichael Neumann 	0x000008f8, 0x0000002a, 0xffffffff,
40657e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
40757e252bfSMichael Neumann 	0x000008f8, 0x0000002b, 0xffffffff,
40857e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff
40957e252bfSMichael Neumann };
41057e252bfSMichael Neumann #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
41157e252bfSMichael Neumann 
41257e252bfSMichael Neumann static const u32 cayman_mgcg_default[] =
41357e252bfSMichael Neumann {
41457e252bfSMichael Neumann 	0x0000802c, 0xc0000000, 0xffffffff,
41557e252bfSMichael Neumann 	0x00003fc4, 0xc0000000, 0xffffffff,
41657e252bfSMichael Neumann 	0x00005448, 0x00000100, 0xffffffff,
41757e252bfSMichael Neumann 	0x000055e4, 0x00000100, 0xffffffff,
41857e252bfSMichael Neumann 	0x0000160c, 0x00000100, 0xffffffff,
41957e252bfSMichael Neumann 	0x00008984, 0x06000100, 0xffffffff,
42057e252bfSMichael Neumann 	0x0000c164, 0x00000100, 0xffffffff,
42157e252bfSMichael Neumann 	0x00008a18, 0x00000100, 0xffffffff,
42257e252bfSMichael Neumann 	0x0000897c, 0x06000100, 0xffffffff,
42357e252bfSMichael Neumann 	0x00008b28, 0x00000100, 0xffffffff,
42457e252bfSMichael Neumann 	0x00009144, 0x00800200, 0xffffffff,
42557e252bfSMichael Neumann 	0x00009a60, 0x00000100, 0xffffffff,
42657e252bfSMichael Neumann 	0x00009868, 0x00000100, 0xffffffff,
42757e252bfSMichael Neumann 	0x00008d58, 0x00000100, 0xffffffff,
42857e252bfSMichael Neumann 	0x00009510, 0x00000100, 0xffffffff,
42957e252bfSMichael Neumann 	0x0000949c, 0x00000100, 0xffffffff,
43057e252bfSMichael Neumann 	0x00009654, 0x00000100, 0xffffffff,
43157e252bfSMichael Neumann 	0x00009030, 0x00000100, 0xffffffff,
43257e252bfSMichael Neumann 	0x00009034, 0x00000100, 0xffffffff,
43357e252bfSMichael Neumann 	0x00009038, 0x00000100, 0xffffffff,
43457e252bfSMichael Neumann 	0x0000903c, 0x00000100, 0xffffffff,
43557e252bfSMichael Neumann 	0x00009040, 0x00000100, 0xffffffff,
43657e252bfSMichael Neumann 	0x0000a200, 0x00000100, 0xffffffff,
43757e252bfSMichael Neumann 	0x0000a204, 0x00000100, 0xffffffff,
43857e252bfSMichael Neumann 	0x0000a208, 0x00000100, 0xffffffff,
43957e252bfSMichael Neumann 	0x0000a20c, 0x00000100, 0xffffffff,
44057e252bfSMichael Neumann 	0x00009744, 0x00000100, 0xffffffff,
44157e252bfSMichael Neumann 	0x00003f80, 0x00000100, 0xffffffff,
44257e252bfSMichael Neumann 	0x0000a210, 0x00000100, 0xffffffff,
44357e252bfSMichael Neumann 	0x0000a214, 0x00000100, 0xffffffff,
44457e252bfSMichael Neumann 	0x000004d8, 0x00000100, 0xffffffff,
44557e252bfSMichael Neumann 	0x00009664, 0x00000100, 0xffffffff,
44657e252bfSMichael Neumann 	0x00009698, 0x00000100, 0xffffffff,
44757e252bfSMichael Neumann 	0x000004d4, 0x00000200, 0xffffffff,
44857e252bfSMichael Neumann 	0x000004d0, 0x00000000, 0xffffffff,
44957e252bfSMichael Neumann 	0x000030cc, 0x00000104, 0xffffffff,
45057e252bfSMichael Neumann 	0x0000d0c0, 0x00000100, 0xffffffff,
45157e252bfSMichael Neumann 	0x0000d8c0, 0x00000100, 0xffffffff,
45257e252bfSMichael Neumann 	0x0000802c, 0x40000000, 0xffffffff,
45357e252bfSMichael Neumann 	0x00003fc4, 0x40000000, 0xffffffff,
45457e252bfSMichael Neumann 	0x0000915c, 0x00010000, 0xffffffff,
45557e252bfSMichael Neumann 	0x00009160, 0x00030002, 0xffffffff,
45657e252bfSMichael Neumann 	0x00009164, 0x00050004, 0xffffffff,
45757e252bfSMichael Neumann 	0x00009168, 0x00070006, 0xffffffff,
45857e252bfSMichael Neumann 	0x00009178, 0x00070000, 0xffffffff,
45957e252bfSMichael Neumann 	0x0000917c, 0x00030002, 0xffffffff,
46057e252bfSMichael Neumann 	0x00009180, 0x00050004, 0xffffffff,
46157e252bfSMichael Neumann 	0x0000918c, 0x00010006, 0xffffffff,
46257e252bfSMichael Neumann 	0x00009190, 0x00090008, 0xffffffff,
46357e252bfSMichael Neumann 	0x00009194, 0x00070000, 0xffffffff,
46457e252bfSMichael Neumann 	0x00009198, 0x00030002, 0xffffffff,
46557e252bfSMichael Neumann 	0x0000919c, 0x00050004, 0xffffffff,
46657e252bfSMichael Neumann 	0x000091a8, 0x00010006, 0xffffffff,
46757e252bfSMichael Neumann 	0x000091ac, 0x00090008, 0xffffffff,
46857e252bfSMichael Neumann 	0x000091b0, 0x00070000, 0xffffffff,
46957e252bfSMichael Neumann 	0x000091b4, 0x00030002, 0xffffffff,
47057e252bfSMichael Neumann 	0x000091b8, 0x00050004, 0xffffffff,
47157e252bfSMichael Neumann 	0x000091c4, 0x00010006, 0xffffffff,
47257e252bfSMichael Neumann 	0x000091c8, 0x00090008, 0xffffffff,
47357e252bfSMichael Neumann 	0x000091cc, 0x00070000, 0xffffffff,
47457e252bfSMichael Neumann 	0x000091d0, 0x00030002, 0xffffffff,
47557e252bfSMichael Neumann 	0x000091d4, 0x00050004, 0xffffffff,
47657e252bfSMichael Neumann 	0x000091e0, 0x00010006, 0xffffffff,
47757e252bfSMichael Neumann 	0x000091e4, 0x00090008, 0xffffffff,
47857e252bfSMichael Neumann 	0x000091e8, 0x00000000, 0xffffffff,
47957e252bfSMichael Neumann 	0x000091ec, 0x00070000, 0xffffffff,
48057e252bfSMichael Neumann 	0x000091f0, 0x00030002, 0xffffffff,
48157e252bfSMichael Neumann 	0x000091f4, 0x00050004, 0xffffffff,
48257e252bfSMichael Neumann 	0x00009200, 0x00010006, 0xffffffff,
48357e252bfSMichael Neumann 	0x00009204, 0x00090008, 0xffffffff,
48457e252bfSMichael Neumann 	0x00009208, 0x00070000, 0xffffffff,
48557e252bfSMichael Neumann 	0x0000920c, 0x00030002, 0xffffffff,
48657e252bfSMichael Neumann 	0x00009210, 0x00050004, 0xffffffff,
48757e252bfSMichael Neumann 	0x0000921c, 0x00010006, 0xffffffff,
48857e252bfSMichael Neumann 	0x00009220, 0x00090008, 0xffffffff,
48957e252bfSMichael Neumann 	0x00009224, 0x00070000, 0xffffffff,
49057e252bfSMichael Neumann 	0x00009228, 0x00030002, 0xffffffff,
49157e252bfSMichael Neumann 	0x0000922c, 0x00050004, 0xffffffff,
49257e252bfSMichael Neumann 	0x00009238, 0x00010006, 0xffffffff,
49357e252bfSMichael Neumann 	0x0000923c, 0x00090008, 0xffffffff,
49457e252bfSMichael Neumann 	0x00009240, 0x00070000, 0xffffffff,
49557e252bfSMichael Neumann 	0x00009244, 0x00030002, 0xffffffff,
49657e252bfSMichael Neumann 	0x00009248, 0x00050004, 0xffffffff,
49757e252bfSMichael Neumann 	0x00009254, 0x00010006, 0xffffffff,
49857e252bfSMichael Neumann 	0x00009258, 0x00090008, 0xffffffff,
49957e252bfSMichael Neumann 	0x0000925c, 0x00070000, 0xffffffff,
50057e252bfSMichael Neumann 	0x00009260, 0x00030002, 0xffffffff,
50157e252bfSMichael Neumann 	0x00009264, 0x00050004, 0xffffffff,
50257e252bfSMichael Neumann 	0x00009270, 0x00010006, 0xffffffff,
50357e252bfSMichael Neumann 	0x00009274, 0x00090008, 0xffffffff,
50457e252bfSMichael Neumann 	0x00009278, 0x00070000, 0xffffffff,
50557e252bfSMichael Neumann 	0x0000927c, 0x00030002, 0xffffffff,
50657e252bfSMichael Neumann 	0x00009280, 0x00050004, 0xffffffff,
50757e252bfSMichael Neumann 	0x0000928c, 0x00010006, 0xffffffff,
50857e252bfSMichael Neumann 	0x00009290, 0x00090008, 0xffffffff,
50957e252bfSMichael Neumann 	0x000092a8, 0x00070000, 0xffffffff,
51057e252bfSMichael Neumann 	0x000092ac, 0x00030002, 0xffffffff,
51157e252bfSMichael Neumann 	0x000092b0, 0x00050004, 0xffffffff,
51257e252bfSMichael Neumann 	0x000092bc, 0x00010006, 0xffffffff,
51357e252bfSMichael Neumann 	0x000092c0, 0x00090008, 0xffffffff,
51457e252bfSMichael Neumann 	0x000092c4, 0x00070000, 0xffffffff,
51557e252bfSMichael Neumann 	0x000092c8, 0x00030002, 0xffffffff,
51657e252bfSMichael Neumann 	0x000092cc, 0x00050004, 0xffffffff,
51757e252bfSMichael Neumann 	0x000092d8, 0x00010006, 0xffffffff,
51857e252bfSMichael Neumann 	0x000092dc, 0x00090008, 0xffffffff,
51957e252bfSMichael Neumann 	0x00009294, 0x00000000, 0xffffffff,
52057e252bfSMichael Neumann 	0x0000802c, 0x40010000, 0xffffffff,
52157e252bfSMichael Neumann 	0x00003fc4, 0x40010000, 0xffffffff,
52257e252bfSMichael Neumann 	0x0000915c, 0x00010000, 0xffffffff,
52357e252bfSMichael Neumann 	0x00009160, 0x00030002, 0xffffffff,
52457e252bfSMichael Neumann 	0x00009164, 0x00050004, 0xffffffff,
52557e252bfSMichael Neumann 	0x00009168, 0x00070006, 0xffffffff,
52657e252bfSMichael Neumann 	0x00009178, 0x00070000, 0xffffffff,
52757e252bfSMichael Neumann 	0x0000917c, 0x00030002, 0xffffffff,
52857e252bfSMichael Neumann 	0x00009180, 0x00050004, 0xffffffff,
52957e252bfSMichael Neumann 	0x0000918c, 0x00010006, 0xffffffff,
53057e252bfSMichael Neumann 	0x00009190, 0x00090008, 0xffffffff,
53157e252bfSMichael Neumann 	0x00009194, 0x00070000, 0xffffffff,
53257e252bfSMichael Neumann 	0x00009198, 0x00030002, 0xffffffff,
53357e252bfSMichael Neumann 	0x0000919c, 0x00050004, 0xffffffff,
53457e252bfSMichael Neumann 	0x000091a8, 0x00010006, 0xffffffff,
53557e252bfSMichael Neumann 	0x000091ac, 0x00090008, 0xffffffff,
53657e252bfSMichael Neumann 	0x000091b0, 0x00070000, 0xffffffff,
53757e252bfSMichael Neumann 	0x000091b4, 0x00030002, 0xffffffff,
53857e252bfSMichael Neumann 	0x000091b8, 0x00050004, 0xffffffff,
53957e252bfSMichael Neumann 	0x000091c4, 0x00010006, 0xffffffff,
54057e252bfSMichael Neumann 	0x000091c8, 0x00090008, 0xffffffff,
54157e252bfSMichael Neumann 	0x000091cc, 0x00070000, 0xffffffff,
54257e252bfSMichael Neumann 	0x000091d0, 0x00030002, 0xffffffff,
54357e252bfSMichael Neumann 	0x000091d4, 0x00050004, 0xffffffff,
54457e252bfSMichael Neumann 	0x000091e0, 0x00010006, 0xffffffff,
54557e252bfSMichael Neumann 	0x000091e4, 0x00090008, 0xffffffff,
54657e252bfSMichael Neumann 	0x000091e8, 0x00000000, 0xffffffff,
54757e252bfSMichael Neumann 	0x000091ec, 0x00070000, 0xffffffff,
54857e252bfSMichael Neumann 	0x000091f0, 0x00030002, 0xffffffff,
54957e252bfSMichael Neumann 	0x000091f4, 0x00050004, 0xffffffff,
55057e252bfSMichael Neumann 	0x00009200, 0x00010006, 0xffffffff,
55157e252bfSMichael Neumann 	0x00009204, 0x00090008, 0xffffffff,
55257e252bfSMichael Neumann 	0x00009208, 0x00070000, 0xffffffff,
55357e252bfSMichael Neumann 	0x0000920c, 0x00030002, 0xffffffff,
55457e252bfSMichael Neumann 	0x00009210, 0x00050004, 0xffffffff,
55557e252bfSMichael Neumann 	0x0000921c, 0x00010006, 0xffffffff,
55657e252bfSMichael Neumann 	0x00009220, 0x00090008, 0xffffffff,
55757e252bfSMichael Neumann 	0x00009224, 0x00070000, 0xffffffff,
55857e252bfSMichael Neumann 	0x00009228, 0x00030002, 0xffffffff,
55957e252bfSMichael Neumann 	0x0000922c, 0x00050004, 0xffffffff,
56057e252bfSMichael Neumann 	0x00009238, 0x00010006, 0xffffffff,
56157e252bfSMichael Neumann 	0x0000923c, 0x00090008, 0xffffffff,
56257e252bfSMichael Neumann 	0x00009240, 0x00070000, 0xffffffff,
56357e252bfSMichael Neumann 	0x00009244, 0x00030002, 0xffffffff,
56457e252bfSMichael Neumann 	0x00009248, 0x00050004, 0xffffffff,
56557e252bfSMichael Neumann 	0x00009254, 0x00010006, 0xffffffff,
56657e252bfSMichael Neumann 	0x00009258, 0x00090008, 0xffffffff,
56757e252bfSMichael Neumann 	0x0000925c, 0x00070000, 0xffffffff,
56857e252bfSMichael Neumann 	0x00009260, 0x00030002, 0xffffffff,
56957e252bfSMichael Neumann 	0x00009264, 0x00050004, 0xffffffff,
57057e252bfSMichael Neumann 	0x00009270, 0x00010006, 0xffffffff,
57157e252bfSMichael Neumann 	0x00009274, 0x00090008, 0xffffffff,
57257e252bfSMichael Neumann 	0x00009278, 0x00070000, 0xffffffff,
57357e252bfSMichael Neumann 	0x0000927c, 0x00030002, 0xffffffff,
57457e252bfSMichael Neumann 	0x00009280, 0x00050004, 0xffffffff,
57557e252bfSMichael Neumann 	0x0000928c, 0x00010006, 0xffffffff,
57657e252bfSMichael Neumann 	0x00009290, 0x00090008, 0xffffffff,
57757e252bfSMichael Neumann 	0x000092a8, 0x00070000, 0xffffffff,
57857e252bfSMichael Neumann 	0x000092ac, 0x00030002, 0xffffffff,
57957e252bfSMichael Neumann 	0x000092b0, 0x00050004, 0xffffffff,
58057e252bfSMichael Neumann 	0x000092bc, 0x00010006, 0xffffffff,
58157e252bfSMichael Neumann 	0x000092c0, 0x00090008, 0xffffffff,
58257e252bfSMichael Neumann 	0x000092c4, 0x00070000, 0xffffffff,
58357e252bfSMichael Neumann 	0x000092c8, 0x00030002, 0xffffffff,
58457e252bfSMichael Neumann 	0x000092cc, 0x00050004, 0xffffffff,
58557e252bfSMichael Neumann 	0x000092d8, 0x00010006, 0xffffffff,
58657e252bfSMichael Neumann 	0x000092dc, 0x00090008, 0xffffffff,
58757e252bfSMichael Neumann 	0x00009294, 0x00000000, 0xffffffff,
58857e252bfSMichael Neumann 	0x0000802c, 0xc0000000, 0xffffffff,
58957e252bfSMichael Neumann 	0x00003fc4, 0xc0000000, 0xffffffff,
59057e252bfSMichael Neumann 	0x000008f8, 0x00000010, 0xffffffff,
59157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
59257e252bfSMichael Neumann 	0x000008f8, 0x00000011, 0xffffffff,
59357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
59457e252bfSMichael Neumann 	0x000008f8, 0x00000012, 0xffffffff,
59557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
59657e252bfSMichael Neumann 	0x000008f8, 0x00000013, 0xffffffff,
59757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
59857e252bfSMichael Neumann 	0x000008f8, 0x00000014, 0xffffffff,
59957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60057e252bfSMichael Neumann 	0x000008f8, 0x00000015, 0xffffffff,
60157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60257e252bfSMichael Neumann 	0x000008f8, 0x00000016, 0xffffffff,
60357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60457e252bfSMichael Neumann 	0x000008f8, 0x00000017, 0xffffffff,
60557e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60657e252bfSMichael Neumann 	0x000008f8, 0x00000018, 0xffffffff,
60757e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
60857e252bfSMichael Neumann 	0x000008f8, 0x00000019, 0xffffffff,
60957e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
61057e252bfSMichael Neumann 	0x000008f8, 0x0000001a, 0xffffffff,
61157e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
61257e252bfSMichael Neumann 	0x000008f8, 0x0000001b, 0xffffffff,
61357e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff
61457e252bfSMichael Neumann };
61557e252bfSMichael Neumann #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
61657e252bfSMichael Neumann 
61757e252bfSMichael Neumann static const u32 cayman_mgcg_disable[] =
61857e252bfSMichael Neumann {
61957e252bfSMichael Neumann 	0x0000802c, 0xc0000000, 0xffffffff,
62057e252bfSMichael Neumann 	0x000008f8, 0x00000000, 0xffffffff,
62157e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
62257e252bfSMichael Neumann 	0x000008f8, 0x00000001, 0xffffffff,
62357e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
62457e252bfSMichael Neumann 	0x000008f8, 0x00000002, 0xffffffff,
62557e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
62657e252bfSMichael Neumann 	0x000008f8, 0x00000003, 0xffffffff,
62757e252bfSMichael Neumann 	0x000008fc, 0xffffffff, 0xffffffff,
62857e252bfSMichael Neumann 	0x00009150, 0x00600000, 0xffffffff
62957e252bfSMichael Neumann };
63057e252bfSMichael Neumann #define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
63157e252bfSMichael Neumann 
63257e252bfSMichael Neumann static const u32 cayman_mgcg_enable[] =
63357e252bfSMichael Neumann {
63457e252bfSMichael Neumann 	0x0000802c, 0xc0000000, 0xffffffff,
63557e252bfSMichael Neumann 	0x000008f8, 0x00000000, 0xffffffff,
63657e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
63757e252bfSMichael Neumann 	0x000008f8, 0x00000001, 0xffffffff,
63857e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
63957e252bfSMichael Neumann 	0x000008f8, 0x00000002, 0xffffffff,
64057e252bfSMichael Neumann 	0x000008fc, 0x00600000, 0xffffffff,
64157e252bfSMichael Neumann 	0x000008f8, 0x00000003, 0xffffffff,
64257e252bfSMichael Neumann 	0x000008fc, 0x00000000, 0xffffffff,
64357e252bfSMichael Neumann 	0x00009150, 0x96944200, 0xffffffff
64457e252bfSMichael Neumann };
64557e252bfSMichael Neumann 
64657e252bfSMichael Neumann #define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
64757e252bfSMichael Neumann 
64857e252bfSMichael Neumann #define NISLANDS_SYSLS_SEQUENCE  100
64957e252bfSMichael Neumann 
65057e252bfSMichael Neumann static const u32 cayman_sysls_default[] =
65157e252bfSMichael Neumann {
65257e252bfSMichael Neumann 	/* Register,   Value,     Mask bits */
65357e252bfSMichael Neumann 	0x000055e8, 0x00000000, 0xffffffff,
65457e252bfSMichael Neumann 	0x0000d0bc, 0x00000000, 0xffffffff,
65557e252bfSMichael Neumann 	0x0000d8bc, 0x00000000, 0xffffffff,
65657e252bfSMichael Neumann 	0x000015c0, 0x000c1401, 0xffffffff,
65757e252bfSMichael Neumann 	0x0000264c, 0x000c0400, 0xffffffff,
65857e252bfSMichael Neumann 	0x00002648, 0x000c0400, 0xffffffff,
65957e252bfSMichael Neumann 	0x00002650, 0x000c0400, 0xffffffff,
66057e252bfSMichael Neumann 	0x000020b8, 0x000c0400, 0xffffffff,
66157e252bfSMichael Neumann 	0x000020bc, 0x000c0400, 0xffffffff,
66257e252bfSMichael Neumann 	0x000020c0, 0x000c0c80, 0xffffffff,
66357e252bfSMichael Neumann 	0x0000f4a0, 0x000000c0, 0xffffffff,
66457e252bfSMichael Neumann 	0x0000f4a4, 0x00680fff, 0xffffffff,
66557e252bfSMichael Neumann 	0x00002f50, 0x00000404, 0xffffffff,
66657e252bfSMichael Neumann 	0x000004c8, 0x00000001, 0xffffffff,
66757e252bfSMichael Neumann 	0x000064ec, 0x00000000, 0xffffffff,
66857e252bfSMichael Neumann 	0x00000c7c, 0x00000000, 0xffffffff,
66957e252bfSMichael Neumann 	0x00008dfc, 0x00000000, 0xffffffff
67057e252bfSMichael Neumann };
67157e252bfSMichael Neumann #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
67257e252bfSMichael Neumann 
67357e252bfSMichael Neumann static const u32 cayman_sysls_disable[] =
67457e252bfSMichael Neumann {
67557e252bfSMichael Neumann 	/* Register,   Value,     Mask bits */
67657e252bfSMichael Neumann 	0x0000d0c0, 0x00000000, 0xffffffff,
67757e252bfSMichael Neumann 	0x0000d8c0, 0x00000000, 0xffffffff,
67857e252bfSMichael Neumann 	0x000055e8, 0x00000000, 0xffffffff,
67957e252bfSMichael Neumann 	0x0000d0bc, 0x00000000, 0xffffffff,
68057e252bfSMichael Neumann 	0x0000d8bc, 0x00000000, 0xffffffff,
68157e252bfSMichael Neumann 	0x000015c0, 0x00041401, 0xffffffff,
68257e252bfSMichael Neumann 	0x0000264c, 0x00040400, 0xffffffff,
68357e252bfSMichael Neumann 	0x00002648, 0x00040400, 0xffffffff,
68457e252bfSMichael Neumann 	0x00002650, 0x00040400, 0xffffffff,
68557e252bfSMichael Neumann 	0x000020b8, 0x00040400, 0xffffffff,
68657e252bfSMichael Neumann 	0x000020bc, 0x00040400, 0xffffffff,
68757e252bfSMichael Neumann 	0x000020c0, 0x00040c80, 0xffffffff,
68857e252bfSMichael Neumann 	0x0000f4a0, 0x000000c0, 0xffffffff,
68957e252bfSMichael Neumann 	0x0000f4a4, 0x00680000, 0xffffffff,
69057e252bfSMichael Neumann 	0x00002f50, 0x00000404, 0xffffffff,
69157e252bfSMichael Neumann 	0x000004c8, 0x00000001, 0xffffffff,
69257e252bfSMichael Neumann 	0x000064ec, 0x00007ffd, 0xffffffff,
69357e252bfSMichael Neumann 	0x00000c7c, 0x0000ff00, 0xffffffff,
69457e252bfSMichael Neumann 	0x00008dfc, 0x0000007f, 0xffffffff
69557e252bfSMichael Neumann };
69657e252bfSMichael Neumann #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
69757e252bfSMichael Neumann 
69857e252bfSMichael Neumann static const u32 cayman_sysls_enable[] =
69957e252bfSMichael Neumann {
70057e252bfSMichael Neumann 	/* Register,   Value,     Mask bits */
70157e252bfSMichael Neumann 	0x000055e8, 0x00000001, 0xffffffff,
70257e252bfSMichael Neumann 	0x0000d0bc, 0x00000100, 0xffffffff,
70357e252bfSMichael Neumann 	0x0000d8bc, 0x00000100, 0xffffffff,
70457e252bfSMichael Neumann 	0x000015c0, 0x000c1401, 0xffffffff,
70557e252bfSMichael Neumann 	0x0000264c, 0x000c0400, 0xffffffff,
70657e252bfSMichael Neumann 	0x00002648, 0x000c0400, 0xffffffff,
70757e252bfSMichael Neumann 	0x00002650, 0x000c0400, 0xffffffff,
70857e252bfSMichael Neumann 	0x000020b8, 0x000c0400, 0xffffffff,
70957e252bfSMichael Neumann 	0x000020bc, 0x000c0400, 0xffffffff,
71057e252bfSMichael Neumann 	0x000020c0, 0x000c0c80, 0xffffffff,
71157e252bfSMichael Neumann 	0x0000f4a0, 0x000000c0, 0xffffffff,
71257e252bfSMichael Neumann 	0x0000f4a4, 0x00680fff, 0xffffffff,
71357e252bfSMichael Neumann 	0x00002f50, 0x00000903, 0xffffffff,
71457e252bfSMichael Neumann 	0x000004c8, 0x00000000, 0xffffffff,
71557e252bfSMichael Neumann 	0x000064ec, 0x00000000, 0xffffffff,
71657e252bfSMichael Neumann 	0x00000c7c, 0x00000000, 0xffffffff,
71757e252bfSMichael Neumann 	0x00008dfc, 0x00000000, 0xffffffff
71857e252bfSMichael Neumann };
71957e252bfSMichael Neumann #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
72057e252bfSMichael Neumann 
72157e252bfSMichael Neumann struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
72257e252bfSMichael Neumann struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
72357e252bfSMichael Neumann struct ni_power_info *ni_get_pi(struct radeon_device *rdev);
72457e252bfSMichael Neumann struct ni_ps *ni_get_ps(struct radeon_ps *rps);
72557e252bfSMichael Neumann 
ni_get_pi(struct radeon_device * rdev)72657e252bfSMichael Neumann struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
72757e252bfSMichael Neumann {
72857e252bfSMichael Neumann 	struct ni_power_info *pi = rdev->pm.dpm.priv;
72957e252bfSMichael Neumann 
73057e252bfSMichael Neumann 	return pi;
73157e252bfSMichael Neumann }
73257e252bfSMichael Neumann 
ni_get_ps(struct radeon_ps * rps)73357e252bfSMichael Neumann struct ni_ps *ni_get_ps(struct radeon_ps *rps)
73457e252bfSMichael Neumann {
73557e252bfSMichael Neumann 	struct ni_ps *ps = rps->ps_priv;
73657e252bfSMichael Neumann 
73757e252bfSMichael Neumann 	return ps;
73857e252bfSMichael Neumann }
73957e252bfSMichael Neumann 
ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients * coeff,u16 v,s32 t,u32 ileakage,u32 * leakage)74057e252bfSMichael Neumann static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
74157e252bfSMichael Neumann 						     u16 v, s32 t,
74257e252bfSMichael Neumann 						     u32 ileakage,
74357e252bfSMichael Neumann 						     u32 *leakage)
74457e252bfSMichael Neumann {
74557e252bfSMichael Neumann 	s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
74657e252bfSMichael Neumann 
74757e252bfSMichael Neumann 	i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
74857e252bfSMichael Neumann 	vddc = div64_s64(drm_int2fixp(v), 1000);
74957e252bfSMichael Neumann 	temperature = div64_s64(drm_int2fixp(t), 1000);
75057e252bfSMichael Neumann 
75157e252bfSMichael Neumann 	kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
75257e252bfSMichael Neumann 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
75357e252bfSMichael Neumann 	kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
75457e252bfSMichael Neumann 			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
75557e252bfSMichael Neumann 
75657e252bfSMichael Neumann 	leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
75757e252bfSMichael Neumann 
75857e252bfSMichael Neumann 	*leakage = drm_fixp2int(leakage_w * 1000);
75957e252bfSMichael Neumann }
76057e252bfSMichael Neumann 
ni_calculate_leakage_for_v_and_t(struct radeon_device * rdev,const struct ni_leakage_coeffients * coeff,u16 v,s32 t,u32 i_leakage,u32 * leakage)76157e252bfSMichael Neumann static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
76257e252bfSMichael Neumann 					     const struct ni_leakage_coeffients *coeff,
76357e252bfSMichael Neumann 					     u16 v,
76457e252bfSMichael Neumann 					     s32 t,
76557e252bfSMichael Neumann 					     u32 i_leakage,
76657e252bfSMichael Neumann 					     u32 *leakage)
76757e252bfSMichael Neumann {
76857e252bfSMichael Neumann 	ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
76957e252bfSMichael Neumann }
77057e252bfSMichael Neumann 
ni_dpm_vblank_too_short(struct radeon_device * rdev)77157e252bfSMichael Neumann bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
77257e252bfSMichael Neumann {
77357e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
77457e252bfSMichael Neumann 	u32 vblank_time = r600_dpm_get_vblank_time(rdev);
7754cd92098Szrj 	/* we never hit the non-gddr5 limit so disable it */
7764cd92098Szrj 	u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
77757e252bfSMichael Neumann 
77857e252bfSMichael Neumann 	if (vblank_time < switch_limit)
77957e252bfSMichael Neumann 		return true;
78057e252bfSMichael Neumann 	else
78157e252bfSMichael Neumann 		return false;
78257e252bfSMichael Neumann 
78357e252bfSMichael Neumann }
78457e252bfSMichael Neumann 
ni_apply_state_adjust_rules(struct radeon_device * rdev,struct radeon_ps * rps)78557e252bfSMichael Neumann static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
78657e252bfSMichael Neumann 					struct radeon_ps *rps)
78757e252bfSMichael Neumann {
78857e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(rps);
78957e252bfSMichael Neumann 	struct radeon_clock_and_voltage_limits *max_limits;
79057e252bfSMichael Neumann 	bool disable_mclk_switching;
791c6f73aabSFrançois Tigeot 	u32 mclk;
792c6f73aabSFrançois Tigeot 	u16 vddci;
79357e252bfSMichael Neumann 	int i;
79457e252bfSMichael Neumann 
79557e252bfSMichael Neumann 	if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
79657e252bfSMichael Neumann 	    ni_dpm_vblank_too_short(rdev))
79757e252bfSMichael Neumann 		disable_mclk_switching = true;
79857e252bfSMichael Neumann 	else
79957e252bfSMichael Neumann 		disable_mclk_switching = false;
80057e252bfSMichael Neumann 
80157e252bfSMichael Neumann 	if (rdev->pm.dpm.ac_power)
80257e252bfSMichael Neumann 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
80357e252bfSMichael Neumann 	else
80457e252bfSMichael Neumann 		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
80557e252bfSMichael Neumann 
80657e252bfSMichael Neumann 	if (rdev->pm.dpm.ac_power == false) {
80757e252bfSMichael Neumann 		for (i = 0; i < ps->performance_level_count; i++) {
80857e252bfSMichael Neumann 			if (ps->performance_levels[i].mclk > max_limits->mclk)
80957e252bfSMichael Neumann 				ps->performance_levels[i].mclk = max_limits->mclk;
81057e252bfSMichael Neumann 			if (ps->performance_levels[i].sclk > max_limits->sclk)
81157e252bfSMichael Neumann 				ps->performance_levels[i].sclk = max_limits->sclk;
81257e252bfSMichael Neumann 			if (ps->performance_levels[i].vddc > max_limits->vddc)
81357e252bfSMichael Neumann 				ps->performance_levels[i].vddc = max_limits->vddc;
81457e252bfSMichael Neumann 			if (ps->performance_levels[i].vddci > max_limits->vddci)
81557e252bfSMichael Neumann 				ps->performance_levels[i].vddci = max_limits->vddci;
81657e252bfSMichael Neumann 		}
81757e252bfSMichael Neumann 	}
81857e252bfSMichael Neumann 
81957e252bfSMichael Neumann 	/* XXX validate the min clocks required for display */
82057e252bfSMichael Neumann 
821c6f73aabSFrançois Tigeot 	/* adjust low state */
82257e252bfSMichael Neumann 	if (disable_mclk_switching) {
823c6f73aabSFrançois Tigeot 		ps->performance_levels[0].mclk =
824c6f73aabSFrançois Tigeot 			ps->performance_levels[ps->performance_level_count - 1].mclk;
825c6f73aabSFrançois Tigeot 		ps->performance_levels[0].vddci =
826c6f73aabSFrançois Tigeot 			ps->performance_levels[ps->performance_level_count - 1].vddci;
82757e252bfSMichael Neumann 	}
82857e252bfSMichael Neumann 
82957e252bfSMichael Neumann 	btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
83057e252bfSMichael Neumann 				  &ps->performance_levels[0].sclk,
83157e252bfSMichael Neumann 				  &ps->performance_levels[0].mclk);
83257e252bfSMichael Neumann 
83357e252bfSMichael Neumann 	for (i = 1; i < ps->performance_level_count; i++) {
83457e252bfSMichael Neumann 		if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
83557e252bfSMichael Neumann 			ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
83657e252bfSMichael Neumann 		if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
83757e252bfSMichael Neumann 			ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
83857e252bfSMichael Neumann 	}
83957e252bfSMichael Neumann 
840c6f73aabSFrançois Tigeot 	/* adjust remaining states */
84157e252bfSMichael Neumann 	if (disable_mclk_switching) {
84257e252bfSMichael Neumann 		mclk = ps->performance_levels[0].mclk;
843c6f73aabSFrançois Tigeot 		vddci = ps->performance_levels[0].vddci;
84457e252bfSMichael Neumann 		for (i = 1; i < ps->performance_level_count; i++) {
84557e252bfSMichael Neumann 			if (mclk < ps->performance_levels[i].mclk)
84657e252bfSMichael Neumann 				mclk = ps->performance_levels[i].mclk;
847c6f73aabSFrançois Tigeot 			if (vddci < ps->performance_levels[i].vddci)
848c6f73aabSFrançois Tigeot 				vddci = ps->performance_levels[i].vddci;
84957e252bfSMichael Neumann 		}
85057e252bfSMichael Neumann 		for (i = 0; i < ps->performance_level_count; i++) {
85157e252bfSMichael Neumann 			ps->performance_levels[i].mclk = mclk;
85257e252bfSMichael Neumann 			ps->performance_levels[i].vddci = vddci;
85357e252bfSMichael Neumann 		}
85457e252bfSMichael Neumann 	} else {
85557e252bfSMichael Neumann 		for (i = 1; i < ps->performance_level_count; i++) {
85657e252bfSMichael Neumann 			if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
85757e252bfSMichael Neumann 				ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
85857e252bfSMichael Neumann 			if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
85957e252bfSMichael Neumann 				ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
86057e252bfSMichael Neumann 		}
86157e252bfSMichael Neumann 	}
86257e252bfSMichael Neumann 
86357e252bfSMichael Neumann 	for (i = 1; i < ps->performance_level_count; i++)
86457e252bfSMichael Neumann 		btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
86557e252bfSMichael Neumann 					  &ps->performance_levels[i].sclk,
86657e252bfSMichael Neumann 					  &ps->performance_levels[i].mclk);
86757e252bfSMichael Neumann 
86857e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++)
86957e252bfSMichael Neumann 		btc_adjust_clock_combinations(rdev, max_limits,
87057e252bfSMichael Neumann 					      &ps->performance_levels[i]);
87157e252bfSMichael Neumann 
87257e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++) {
87357e252bfSMichael Neumann 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
87457e252bfSMichael Neumann 						   ps->performance_levels[i].sclk,
87557e252bfSMichael Neumann 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
87657e252bfSMichael Neumann 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
87757e252bfSMichael Neumann 						   ps->performance_levels[i].mclk,
87857e252bfSMichael Neumann 						   max_limits->vddci, &ps->performance_levels[i].vddci);
87957e252bfSMichael Neumann 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
88057e252bfSMichael Neumann 						   ps->performance_levels[i].mclk,
88157e252bfSMichael Neumann 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
88257e252bfSMichael Neumann 		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
88357e252bfSMichael Neumann 						   rdev->clock.current_dispclk,
88457e252bfSMichael Neumann 						   max_limits->vddc,  &ps->performance_levels[i].vddc);
88557e252bfSMichael Neumann 	}
88657e252bfSMichael Neumann 
88757e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++) {
88857e252bfSMichael Neumann 		btc_apply_voltage_delta_rules(rdev,
88957e252bfSMichael Neumann 					      max_limits->vddc, max_limits->vddci,
89057e252bfSMichael Neumann 					      &ps->performance_levels[i].vddc,
89157e252bfSMichael Neumann 					      &ps->performance_levels[i].vddci);
89257e252bfSMichael Neumann 	}
89357e252bfSMichael Neumann 
89457e252bfSMichael Neumann 	ps->dc_compatible = true;
89557e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++) {
89657e252bfSMichael Neumann 		if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
89757e252bfSMichael Neumann 			ps->dc_compatible = false;
89857e252bfSMichael Neumann 
89957e252bfSMichael Neumann 		if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
90057e252bfSMichael Neumann 			ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
90157e252bfSMichael Neumann 	}
90257e252bfSMichael Neumann }
90357e252bfSMichael Neumann 
ni_cg_clockgating_default(struct radeon_device * rdev)90457e252bfSMichael Neumann static void ni_cg_clockgating_default(struct radeon_device *rdev)
90557e252bfSMichael Neumann {
90657e252bfSMichael Neumann 	u32 count;
90757e252bfSMichael Neumann 	const u32 *ps = NULL;
90857e252bfSMichael Neumann 
90957e252bfSMichael Neumann 	ps = (const u32 *)&cayman_cgcg_cgls_default;
91057e252bfSMichael Neumann 	count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
91157e252bfSMichael Neumann 
91257e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
91357e252bfSMichael Neumann }
91457e252bfSMichael Neumann 
ni_gfx_clockgating_enable(struct radeon_device * rdev,bool enable)91557e252bfSMichael Neumann static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
91657e252bfSMichael Neumann 				      bool enable)
91757e252bfSMichael Neumann {
91857e252bfSMichael Neumann 	u32 count;
91957e252bfSMichael Neumann 	const u32 *ps = NULL;
92057e252bfSMichael Neumann 
92157e252bfSMichael Neumann 	if (enable) {
92257e252bfSMichael Neumann 		ps = (const u32 *)&cayman_cgcg_cgls_enable;
92357e252bfSMichael Neumann 		count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
92457e252bfSMichael Neumann 	} else {
92557e252bfSMichael Neumann 		ps = (const u32 *)&cayman_cgcg_cgls_disable;
92657e252bfSMichael Neumann 		count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
92757e252bfSMichael Neumann 	}
92857e252bfSMichael Neumann 
92957e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
93057e252bfSMichael Neumann }
93157e252bfSMichael Neumann 
ni_mg_clockgating_default(struct radeon_device * rdev)93257e252bfSMichael Neumann static void ni_mg_clockgating_default(struct radeon_device *rdev)
93357e252bfSMichael Neumann {
93457e252bfSMichael Neumann 	u32 count;
93557e252bfSMichael Neumann 	const u32 *ps = NULL;
93657e252bfSMichael Neumann 
93757e252bfSMichael Neumann 	ps = (const u32 *)&cayman_mgcg_default;
93857e252bfSMichael Neumann 	count = CAYMAN_MGCG_DEFAULT_LENGTH;
93957e252bfSMichael Neumann 
94057e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
94157e252bfSMichael Neumann }
94257e252bfSMichael Neumann 
ni_mg_clockgating_enable(struct radeon_device * rdev,bool enable)94357e252bfSMichael Neumann static void ni_mg_clockgating_enable(struct radeon_device *rdev,
94457e252bfSMichael Neumann 				     bool enable)
94557e252bfSMichael Neumann {
94657e252bfSMichael Neumann 	u32 count;
94757e252bfSMichael Neumann 	const u32 *ps = NULL;
94857e252bfSMichael Neumann 
94957e252bfSMichael Neumann 	if (enable) {
95057e252bfSMichael Neumann 		ps = (const u32 *)&cayman_mgcg_enable;
95157e252bfSMichael Neumann 		count = CAYMAN_MGCG_ENABLE_LENGTH;
95257e252bfSMichael Neumann 	} else {
95357e252bfSMichael Neumann 		ps = (const u32 *)&cayman_mgcg_disable;
95457e252bfSMichael Neumann 		count = CAYMAN_MGCG_DISABLE_LENGTH;
95557e252bfSMichael Neumann 	}
95657e252bfSMichael Neumann 
95757e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
95857e252bfSMichael Neumann }
95957e252bfSMichael Neumann 
ni_ls_clockgating_default(struct radeon_device * rdev)96057e252bfSMichael Neumann static void ni_ls_clockgating_default(struct radeon_device *rdev)
96157e252bfSMichael Neumann {
96257e252bfSMichael Neumann 	u32 count;
96357e252bfSMichael Neumann 	const u32 *ps = NULL;
96457e252bfSMichael Neumann 
96557e252bfSMichael Neumann 	ps = (const u32 *)&cayman_sysls_default;
96657e252bfSMichael Neumann 	count = CAYMAN_SYSLS_DEFAULT_LENGTH;
96757e252bfSMichael Neumann 
96857e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
96957e252bfSMichael Neumann }
97057e252bfSMichael Neumann 
ni_ls_clockgating_enable(struct radeon_device * rdev,bool enable)97157e252bfSMichael Neumann static void ni_ls_clockgating_enable(struct radeon_device *rdev,
97257e252bfSMichael Neumann 				     bool enable)
97357e252bfSMichael Neumann {
97457e252bfSMichael Neumann 	u32 count;
97557e252bfSMichael Neumann 	const u32 *ps = NULL;
97657e252bfSMichael Neumann 
97757e252bfSMichael Neumann 	if (enable) {
97857e252bfSMichael Neumann 		ps = (const u32 *)&cayman_sysls_enable;
97957e252bfSMichael Neumann 		count = CAYMAN_SYSLS_ENABLE_LENGTH;
98057e252bfSMichael Neumann 	} else {
98157e252bfSMichael Neumann 		ps = (const u32 *)&cayman_sysls_disable;
98257e252bfSMichael Neumann 		count = CAYMAN_SYSLS_DISABLE_LENGTH;
98357e252bfSMichael Neumann 	}
98457e252bfSMichael Neumann 
98557e252bfSMichael Neumann 	btc_program_mgcg_hw_sequence(rdev, ps, count);
98657e252bfSMichael Neumann 
98757e252bfSMichael Neumann }
98857e252bfSMichael Neumann 
ni_patch_single_dependency_table_based_on_leakage(struct radeon_device * rdev,struct radeon_clock_voltage_dependency_table * table)98957e252bfSMichael Neumann static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
99057e252bfSMichael Neumann 							     struct radeon_clock_voltage_dependency_table *table)
99157e252bfSMichael Neumann {
99257e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
99357e252bfSMichael Neumann 	u32 i;
99457e252bfSMichael Neumann 
99557e252bfSMichael Neumann 	if (table) {
99657e252bfSMichael Neumann 		for (i = 0; i < table->count; i++) {
99757e252bfSMichael Neumann 			if (0xff01 == table->entries[i].v) {
99857e252bfSMichael Neumann 				if (pi->max_vddc == 0)
99957e252bfSMichael Neumann 					return -EINVAL;
100057e252bfSMichael Neumann 				table->entries[i].v = pi->max_vddc;
100157e252bfSMichael Neumann 			}
100257e252bfSMichael Neumann 		}
100357e252bfSMichael Neumann 	}
100457e252bfSMichael Neumann 	return 0;
100557e252bfSMichael Neumann }
100657e252bfSMichael Neumann 
ni_patch_dependency_tables_based_on_leakage(struct radeon_device * rdev)100757e252bfSMichael Neumann static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
100857e252bfSMichael Neumann {
100957e252bfSMichael Neumann 	int ret = 0;
101057e252bfSMichael Neumann 
101157e252bfSMichael Neumann 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
101257e252bfSMichael Neumann 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
101357e252bfSMichael Neumann 
101457e252bfSMichael Neumann 	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
101557e252bfSMichael Neumann 								&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
101657e252bfSMichael Neumann 	return ret;
101757e252bfSMichael Neumann }
101857e252bfSMichael Neumann 
ni_stop_dpm(struct radeon_device * rdev)101957e252bfSMichael Neumann static void ni_stop_dpm(struct radeon_device *rdev)
102057e252bfSMichael Neumann {
102157e252bfSMichael Neumann 	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
102257e252bfSMichael Neumann }
102357e252bfSMichael Neumann 
102457e252bfSMichael Neumann #if 0
102557e252bfSMichael Neumann static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
102657e252bfSMichael Neumann 					bool ac_power)
102757e252bfSMichael Neumann {
102857e252bfSMichael Neumann 	if (ac_power)
102957e252bfSMichael Neumann 		return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
103057e252bfSMichael Neumann 			0 : -EINVAL;
103157e252bfSMichael Neumann 
103257e252bfSMichael Neumann 	return 0;
103357e252bfSMichael Neumann }
103457e252bfSMichael Neumann #endif
103557e252bfSMichael Neumann 
ni_send_msg_to_smc_with_parameter(struct radeon_device * rdev,PPSMC_Msg msg,u32 parameter)103657e252bfSMichael Neumann static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
103757e252bfSMichael Neumann 						      PPSMC_Msg msg, u32 parameter)
103857e252bfSMichael Neumann {
103957e252bfSMichael Neumann 	WREG32(SMC_SCRATCH0, parameter);
104057e252bfSMichael Neumann 	return rv770_send_msg_to_smc(rdev, msg);
104157e252bfSMichael Neumann }
104257e252bfSMichael Neumann 
ni_restrict_performance_levels_before_switch(struct radeon_device * rdev)104357e252bfSMichael Neumann static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
104457e252bfSMichael Neumann {
104557e252bfSMichael Neumann 	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
104657e252bfSMichael Neumann 		return -EINVAL;
104757e252bfSMichael Neumann 
104857e252bfSMichael Neumann 	return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
104957e252bfSMichael Neumann 		0 : -EINVAL;
105057e252bfSMichael Neumann }
105157e252bfSMichael Neumann 
ni_dpm_force_performance_level(struct radeon_device * rdev,enum radeon_dpm_forced_level level)105257e252bfSMichael Neumann int ni_dpm_force_performance_level(struct radeon_device *rdev,
105357e252bfSMichael Neumann 				   enum radeon_dpm_forced_level level)
105457e252bfSMichael Neumann {
105557e252bfSMichael Neumann 	if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
105657e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
105757e252bfSMichael Neumann 			return -EINVAL;
105857e252bfSMichael Neumann 
105957e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
106057e252bfSMichael Neumann 			return -EINVAL;
106157e252bfSMichael Neumann 	} else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
106257e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
106357e252bfSMichael Neumann 			return -EINVAL;
106457e252bfSMichael Neumann 
106557e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
106657e252bfSMichael Neumann 			return -EINVAL;
106757e252bfSMichael Neumann 	} else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
106857e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
106957e252bfSMichael Neumann 			return -EINVAL;
107057e252bfSMichael Neumann 
107157e252bfSMichael Neumann 		if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
107257e252bfSMichael Neumann 			return -EINVAL;
107357e252bfSMichael Neumann 	}
107457e252bfSMichael Neumann 
107557e252bfSMichael Neumann 	rdev->pm.dpm.forced_level = level;
107657e252bfSMichael Neumann 
107757e252bfSMichael Neumann 	return 0;
107857e252bfSMichael Neumann }
107957e252bfSMichael Neumann 
ni_stop_smc(struct radeon_device * rdev)108057e252bfSMichael Neumann static void ni_stop_smc(struct radeon_device *rdev)
108157e252bfSMichael Neumann {
108257e252bfSMichael Neumann 	u32 tmp;
108357e252bfSMichael Neumann 	int i;
108457e252bfSMichael Neumann 
108557e252bfSMichael Neumann 	for (i = 0; i < rdev->usec_timeout; i++) {
108657e252bfSMichael Neumann 		tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
108757e252bfSMichael Neumann 		if (tmp != 1)
108857e252bfSMichael Neumann 			break;
1089c4ef309bSzrj 		udelay(1);
109057e252bfSMichael Neumann 	}
109157e252bfSMichael Neumann 
1092c4ef309bSzrj 	udelay(100);
109357e252bfSMichael Neumann 
109457e252bfSMichael Neumann 	r7xx_stop_smc(rdev);
109557e252bfSMichael Neumann }
109657e252bfSMichael Neumann 
ni_process_firmware_header(struct radeon_device * rdev)109757e252bfSMichael Neumann static int ni_process_firmware_header(struct radeon_device *rdev)
109857e252bfSMichael Neumann {
109957e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
110057e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
110157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
110257e252bfSMichael Neumann 	u32 tmp;
110357e252bfSMichael Neumann 	int ret;
110457e252bfSMichael Neumann 
110557e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
110657e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
110757e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
110857e252bfSMichael Neumann 					&tmp, pi->sram_end);
110957e252bfSMichael Neumann 
111057e252bfSMichael Neumann 	if (ret)
111157e252bfSMichael Neumann 		return ret;
111257e252bfSMichael Neumann 
111357e252bfSMichael Neumann 	pi->state_table_start = (u16)tmp;
111457e252bfSMichael Neumann 
111557e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
111657e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
111757e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
111857e252bfSMichael Neumann 					&tmp, pi->sram_end);
111957e252bfSMichael Neumann 
112057e252bfSMichael Neumann 	if (ret)
112157e252bfSMichael Neumann 		return ret;
112257e252bfSMichael Neumann 
112357e252bfSMichael Neumann 	pi->soft_regs_start = (u16)tmp;
112457e252bfSMichael Neumann 
112557e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
112657e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
112757e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
112857e252bfSMichael Neumann 					&tmp, pi->sram_end);
112957e252bfSMichael Neumann 
113057e252bfSMichael Neumann 	if (ret)
113157e252bfSMichael Neumann 		return ret;
113257e252bfSMichael Neumann 
113357e252bfSMichael Neumann 	eg_pi->mc_reg_table_start = (u16)tmp;
113457e252bfSMichael Neumann 
113557e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
113657e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
113757e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
113857e252bfSMichael Neumann 					&tmp, pi->sram_end);
113957e252bfSMichael Neumann 
114057e252bfSMichael Neumann 	if (ret)
114157e252bfSMichael Neumann 		return ret;
114257e252bfSMichael Neumann 
114357e252bfSMichael Neumann 	ni_pi->fan_table_start = (u16)tmp;
114457e252bfSMichael Neumann 
114557e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
114657e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
114757e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
114857e252bfSMichael Neumann 					&tmp, pi->sram_end);
114957e252bfSMichael Neumann 
115057e252bfSMichael Neumann 	if (ret)
115157e252bfSMichael Neumann 		return ret;
115257e252bfSMichael Neumann 
115357e252bfSMichael Neumann 	ni_pi->arb_table_start = (u16)tmp;
115457e252bfSMichael Neumann 
115557e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
115657e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
115757e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
115857e252bfSMichael Neumann 					&tmp, pi->sram_end);
115957e252bfSMichael Neumann 
116057e252bfSMichael Neumann 	if (ret)
116157e252bfSMichael Neumann 		return ret;
116257e252bfSMichael Neumann 
116357e252bfSMichael Neumann 	ni_pi->cac_table_start = (u16)tmp;
116457e252bfSMichael Neumann 
116557e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev,
116657e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
116757e252bfSMichael Neumann 					NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
116857e252bfSMichael Neumann 					&tmp, pi->sram_end);
116957e252bfSMichael Neumann 
117057e252bfSMichael Neumann 	if (ret)
117157e252bfSMichael Neumann 		return ret;
117257e252bfSMichael Neumann 
117357e252bfSMichael Neumann 	ni_pi->spll_table_start = (u16)tmp;
117457e252bfSMichael Neumann 
117557e252bfSMichael Neumann 
117657e252bfSMichael Neumann 	return ret;
117757e252bfSMichael Neumann }
117857e252bfSMichael Neumann 
ni_read_clock_registers(struct radeon_device * rdev)117957e252bfSMichael Neumann static void ni_read_clock_registers(struct radeon_device *rdev)
118057e252bfSMichael Neumann {
118157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
118257e252bfSMichael Neumann 
118357e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
118457e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
118557e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
118657e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
118757e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
118857e252bfSMichael Neumann 	ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
118957e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
119057e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
119157e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
119257e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
119357e252bfSMichael Neumann 	ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
119457e252bfSMichael Neumann 	ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
119557e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
119657e252bfSMichael Neumann 	ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
119757e252bfSMichael Neumann }
119857e252bfSMichael Neumann 
119957e252bfSMichael Neumann #if 0
120057e252bfSMichael Neumann static int ni_enter_ulp_state(struct radeon_device *rdev)
120157e252bfSMichael Neumann {
120257e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
120357e252bfSMichael Neumann 
120457e252bfSMichael Neumann 	if (pi->gfx_clock_gating) {
120557e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
120657e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
120757e252bfSMichael Neumann 		WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
120857e252bfSMichael Neumann 		RREG32(GB_ADDR_CONFIG);
120957e252bfSMichael Neumann 	}
121057e252bfSMichael Neumann 
121157e252bfSMichael Neumann 	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
121257e252bfSMichael Neumann 		 ~HOST_SMC_MSG_MASK);
121357e252bfSMichael Neumann 
1214c4ef309bSzrj 	udelay(25000);
121557e252bfSMichael Neumann 
121657e252bfSMichael Neumann 	return 0;
121757e252bfSMichael Neumann }
121857e252bfSMichael Neumann #endif
121957e252bfSMichael Neumann 
ni_program_response_times(struct radeon_device * rdev)122057e252bfSMichael Neumann static void ni_program_response_times(struct radeon_device *rdev)
122157e252bfSMichael Neumann {
122257e252bfSMichael Neumann 	u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
122357e252bfSMichael Neumann 	u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
122457e252bfSMichael Neumann 	u32 reference_clock;
122557e252bfSMichael Neumann 
122657e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
122757e252bfSMichael Neumann 
122857e252bfSMichael Neumann 	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
122957e252bfSMichael Neumann 	backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
123057e252bfSMichael Neumann 
123157e252bfSMichael Neumann 	if (voltage_response_time == 0)
123257e252bfSMichael Neumann 		voltage_response_time = 1000;
123357e252bfSMichael Neumann 
123457e252bfSMichael Neumann 	if (backbias_response_time == 0)
123557e252bfSMichael Neumann 		backbias_response_time = 1000;
123657e252bfSMichael Neumann 
123757e252bfSMichael Neumann 	acpi_delay_time = 15000;
123857e252bfSMichael Neumann 	vbi_time_out = 100000;
123957e252bfSMichael Neumann 
124057e252bfSMichael Neumann 	reference_clock = radeon_get_xclk(rdev);
124157e252bfSMichael Neumann 
124257e252bfSMichael Neumann 	vddc_dly = (voltage_response_time  * reference_clock) / 1600;
124357e252bfSMichael Neumann 	bb_dly   = (backbias_response_time * reference_clock) / 1600;
124457e252bfSMichael Neumann 	acpi_dly = (acpi_delay_time * reference_clock) / 1600;
124557e252bfSMichael Neumann 	vbi_dly  = (vbi_time_out * reference_clock) / 1600;
124657e252bfSMichael Neumann 
124757e252bfSMichael Neumann 	mclk_switch_limit = (460 * reference_clock) / 100;
124857e252bfSMichael Neumann 
124957e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
125057e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
125157e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
125257e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
125357e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
125457e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
125557e252bfSMichael Neumann }
125657e252bfSMichael Neumann 
ni_populate_smc_voltage_table(struct radeon_device * rdev,struct atom_voltage_table * voltage_table,NISLANDS_SMC_STATETABLE * table)125757e252bfSMichael Neumann static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
125857e252bfSMichael Neumann 					  struct atom_voltage_table *voltage_table,
125957e252bfSMichael Neumann 					  NISLANDS_SMC_STATETABLE *table)
126057e252bfSMichael Neumann {
126157e252bfSMichael Neumann 	unsigned int i;
126257e252bfSMichael Neumann 
126357e252bfSMichael Neumann 	for (i = 0; i < voltage_table->count; i++) {
126457e252bfSMichael Neumann 		table->highSMIO[i] = 0;
126557e252bfSMichael Neumann 		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
126657e252bfSMichael Neumann 	}
126757e252bfSMichael Neumann }
126857e252bfSMichael Neumann 
ni_populate_smc_voltage_tables(struct radeon_device * rdev,NISLANDS_SMC_STATETABLE * table)126957e252bfSMichael Neumann static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
127057e252bfSMichael Neumann 					   NISLANDS_SMC_STATETABLE *table)
127157e252bfSMichael Neumann {
127257e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
127357e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
127457e252bfSMichael Neumann 	unsigned char i;
127557e252bfSMichael Neumann 
127657e252bfSMichael Neumann 	if (eg_pi->vddc_voltage_table.count) {
127757e252bfSMichael Neumann 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
127857e252bfSMichael Neumann 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
127957e252bfSMichael Neumann 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
128057e252bfSMichael Neumann 			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
128157e252bfSMichael Neumann 
128257e252bfSMichael Neumann 		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
128357e252bfSMichael Neumann 			if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
128457e252bfSMichael Neumann 				table->maxVDDCIndexInPPTable = i;
128557e252bfSMichael Neumann 				break;
128657e252bfSMichael Neumann 			}
128757e252bfSMichael Neumann 		}
128857e252bfSMichael Neumann 	}
128957e252bfSMichael Neumann 
129057e252bfSMichael Neumann 	if (eg_pi->vddci_voltage_table.count) {
129157e252bfSMichael Neumann 		ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
129257e252bfSMichael Neumann 
129357e252bfSMichael Neumann 		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
129457e252bfSMichael Neumann 		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1295c6f73aabSFrançois Tigeot 			cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
129657e252bfSMichael Neumann 	}
129757e252bfSMichael Neumann }
129857e252bfSMichael Neumann 
ni_populate_voltage_value(struct radeon_device * rdev,struct atom_voltage_table * table,u16 value,NISLANDS_SMC_VOLTAGE_VALUE * voltage)129957e252bfSMichael Neumann static int ni_populate_voltage_value(struct radeon_device *rdev,
130057e252bfSMichael Neumann 				     struct atom_voltage_table *table,
130157e252bfSMichael Neumann 				     u16 value,
130257e252bfSMichael Neumann 				     NISLANDS_SMC_VOLTAGE_VALUE *voltage)
130357e252bfSMichael Neumann {
130457e252bfSMichael Neumann 	unsigned int i;
130557e252bfSMichael Neumann 
130657e252bfSMichael Neumann 	for (i = 0; i < table->count; i++) {
130757e252bfSMichael Neumann 		if (value <= table->entries[i].value) {
130857e252bfSMichael Neumann 			voltage->index = (u8)i;
130957e252bfSMichael Neumann 			voltage->value = cpu_to_be16(table->entries[i].value);
131057e252bfSMichael Neumann 			break;
131157e252bfSMichael Neumann 		}
131257e252bfSMichael Neumann 	}
131357e252bfSMichael Neumann 
131457e252bfSMichael Neumann 	if (i >= table->count)
131557e252bfSMichael Neumann 		return -EINVAL;
131657e252bfSMichael Neumann 
131757e252bfSMichael Neumann 	return 0;
131857e252bfSMichael Neumann }
131957e252bfSMichael Neumann 
ni_populate_mvdd_value(struct radeon_device * rdev,u32 mclk,NISLANDS_SMC_VOLTAGE_VALUE * voltage)132057e252bfSMichael Neumann static void ni_populate_mvdd_value(struct radeon_device *rdev,
132157e252bfSMichael Neumann 				   u32 mclk,
132257e252bfSMichael Neumann 				   NISLANDS_SMC_VOLTAGE_VALUE *voltage)
132357e252bfSMichael Neumann {
132457e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
132557e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
132657e252bfSMichael Neumann 
132757e252bfSMichael Neumann 	if (!pi->mvdd_control) {
132857e252bfSMichael Neumann 		voltage->index = eg_pi->mvdd_high_index;
132957e252bfSMichael Neumann 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
133057e252bfSMichael Neumann 		return;
133157e252bfSMichael Neumann 	}
133257e252bfSMichael Neumann 
133357e252bfSMichael Neumann 	if (mclk <= pi->mvdd_split_frequency) {
133457e252bfSMichael Neumann 		voltage->index = eg_pi->mvdd_low_index;
133557e252bfSMichael Neumann 		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
133657e252bfSMichael Neumann 	} else {
133757e252bfSMichael Neumann 		voltage->index = eg_pi->mvdd_high_index;
133857e252bfSMichael Neumann 		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
133957e252bfSMichael Neumann 	}
134057e252bfSMichael Neumann }
134157e252bfSMichael Neumann 
ni_get_std_voltage_value(struct radeon_device * rdev,NISLANDS_SMC_VOLTAGE_VALUE * voltage,u16 * std_voltage)134257e252bfSMichael Neumann static int ni_get_std_voltage_value(struct radeon_device *rdev,
134357e252bfSMichael Neumann 				    NISLANDS_SMC_VOLTAGE_VALUE *voltage,
134457e252bfSMichael Neumann 				    u16 *std_voltage)
134557e252bfSMichael Neumann {
134657e252bfSMichael Neumann 	if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
134757e252bfSMichael Neumann 	    ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
134857e252bfSMichael Neumann 		*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
134957e252bfSMichael Neumann 	else
135057e252bfSMichael Neumann 		*std_voltage = be16_to_cpu(voltage->value);
135157e252bfSMichael Neumann 
135257e252bfSMichael Neumann 	return 0;
135357e252bfSMichael Neumann }
135457e252bfSMichael Neumann 
ni_populate_std_voltage_value(struct radeon_device * rdev,u16 value,u8 index,NISLANDS_SMC_VOLTAGE_VALUE * voltage)135557e252bfSMichael Neumann static void ni_populate_std_voltage_value(struct radeon_device *rdev,
135657e252bfSMichael Neumann 					  u16 value, u8 index,
135757e252bfSMichael Neumann 					  NISLANDS_SMC_VOLTAGE_VALUE *voltage)
135857e252bfSMichael Neumann {
135957e252bfSMichael Neumann 	voltage->index = index;
136057e252bfSMichael Neumann 	voltage->value = cpu_to_be16(value);
136157e252bfSMichael Neumann }
136257e252bfSMichael Neumann 
ni_get_smc_power_scaling_factor(struct radeon_device * rdev)136357e252bfSMichael Neumann static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
136457e252bfSMichael Neumann {
136557e252bfSMichael Neumann 	u32 xclk_period;
136657e252bfSMichael Neumann 	u32 xclk = radeon_get_xclk(rdev);
136757e252bfSMichael Neumann 	u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
136857e252bfSMichael Neumann 
136957e252bfSMichael Neumann 	xclk_period = (1000000000UL / xclk);
137057e252bfSMichael Neumann 	xclk_period /= 10000UL;
137157e252bfSMichael Neumann 
137257e252bfSMichael Neumann 	return tmp * xclk_period;
137357e252bfSMichael Neumann }
137457e252bfSMichael Neumann 
ni_scale_power_for_smc(u32 power_in_watts,u32 scaling_factor)137557e252bfSMichael Neumann static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
137657e252bfSMichael Neumann {
137757e252bfSMichael Neumann 	return (power_in_watts * scaling_factor) << 2;
137857e252bfSMichael Neumann }
137957e252bfSMichael Neumann 
ni_calculate_power_boost_limit(struct radeon_device * rdev,struct radeon_ps * radeon_state,u32 near_tdp_limit)138057e252bfSMichael Neumann static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
138157e252bfSMichael Neumann 					  struct radeon_ps *radeon_state,
138257e252bfSMichael Neumann 					  u32 near_tdp_limit)
138357e252bfSMichael Neumann {
138457e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
138557e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
138657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
138757e252bfSMichael Neumann 	u32 power_boost_limit = 0;
138857e252bfSMichael Neumann 	int ret;
138957e252bfSMichael Neumann 
139057e252bfSMichael Neumann 	if (ni_pi->enable_power_containment &&
139157e252bfSMichael Neumann 	    ni_pi->use_power_boost_limit) {
139257e252bfSMichael Neumann 		NISLANDS_SMC_VOLTAGE_VALUE vddc;
139357e252bfSMichael Neumann 		u16 std_vddc_med;
139457e252bfSMichael Neumann 		u16 std_vddc_high;
139557e252bfSMichael Neumann 		u64 tmp, n, d;
139657e252bfSMichael Neumann 
139757e252bfSMichael Neumann 		if (state->performance_level_count < 3)
139857e252bfSMichael Neumann 			return 0;
139957e252bfSMichael Neumann 
140057e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
140157e252bfSMichael Neumann 						state->performance_levels[state->performance_level_count - 2].vddc,
140257e252bfSMichael Neumann 						&vddc);
140357e252bfSMichael Neumann 		if (ret)
140457e252bfSMichael Neumann 			return 0;
140557e252bfSMichael Neumann 
140657e252bfSMichael Neumann 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
140757e252bfSMichael Neumann 		if (ret)
140857e252bfSMichael Neumann 			return 0;
140957e252bfSMichael Neumann 
141057e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
141157e252bfSMichael Neumann 						state->performance_levels[state->performance_level_count - 1].vddc,
141257e252bfSMichael Neumann 						&vddc);
141357e252bfSMichael Neumann 		if (ret)
141457e252bfSMichael Neumann 			return 0;
141557e252bfSMichael Neumann 
141657e252bfSMichael Neumann 		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
141757e252bfSMichael Neumann 		if (ret)
141857e252bfSMichael Neumann 			return 0;
141957e252bfSMichael Neumann 
142057e252bfSMichael Neumann 		n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
142157e252bfSMichael Neumann 		d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
142257e252bfSMichael Neumann 		tmp = div64_u64(n, d);
142357e252bfSMichael Neumann 
142457e252bfSMichael Neumann 		if (tmp >> 32)
142557e252bfSMichael Neumann 			return 0;
142657e252bfSMichael Neumann 		power_boost_limit = (u32)tmp;
142757e252bfSMichael Neumann 	}
142857e252bfSMichael Neumann 
142957e252bfSMichael Neumann 	return power_boost_limit;
143057e252bfSMichael Neumann }
143157e252bfSMichael Neumann 
ni_calculate_adjusted_tdp_limits(struct radeon_device * rdev,bool adjust_polarity,u32 tdp_adjustment,u32 * tdp_limit,u32 * near_tdp_limit)143257e252bfSMichael Neumann static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
143357e252bfSMichael Neumann 					    bool adjust_polarity,
143457e252bfSMichael Neumann 					    u32 tdp_adjustment,
143557e252bfSMichael Neumann 					    u32 *tdp_limit,
143657e252bfSMichael Neumann 					    u32 *near_tdp_limit)
143757e252bfSMichael Neumann {
143857e252bfSMichael Neumann 	if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
143957e252bfSMichael Neumann 		return -EINVAL;
144057e252bfSMichael Neumann 
144157e252bfSMichael Neumann 	if (adjust_polarity) {
144257e252bfSMichael Neumann 		*tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
144357e252bfSMichael Neumann 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
144457e252bfSMichael Neumann 	} else {
144557e252bfSMichael Neumann 		*tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
144657e252bfSMichael Neumann 		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
144757e252bfSMichael Neumann 	}
144857e252bfSMichael Neumann 
144957e252bfSMichael Neumann 	return 0;
145057e252bfSMichael Neumann }
145157e252bfSMichael Neumann 
ni_populate_smc_tdp_limits(struct radeon_device * rdev,struct radeon_ps * radeon_state)145257e252bfSMichael Neumann static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
145357e252bfSMichael Neumann 				      struct radeon_ps *radeon_state)
145457e252bfSMichael Neumann {
145557e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
145657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
145757e252bfSMichael Neumann 
145857e252bfSMichael Neumann 	if (ni_pi->enable_power_containment) {
145957e252bfSMichael Neumann 		NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
146057e252bfSMichael Neumann 		u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
146157e252bfSMichael Neumann 		u32 tdp_limit;
146257e252bfSMichael Neumann 		u32 near_tdp_limit;
146357e252bfSMichael Neumann 		u32 power_boost_limit;
146457e252bfSMichael Neumann 		int ret;
146557e252bfSMichael Neumann 
146657e252bfSMichael Neumann 		if (scaling_factor == 0)
146757e252bfSMichael Neumann 			return -EINVAL;
146857e252bfSMichael Neumann 
146957e252bfSMichael Neumann 		memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
147057e252bfSMichael Neumann 
147157e252bfSMichael Neumann 		ret = ni_calculate_adjusted_tdp_limits(rdev,
147257e252bfSMichael Neumann 						       false, /* ??? */
147357e252bfSMichael Neumann 						       rdev->pm.dpm.tdp_adjustment,
147457e252bfSMichael Neumann 						       &tdp_limit,
147557e252bfSMichael Neumann 						       &near_tdp_limit);
147657e252bfSMichael Neumann 		if (ret)
147757e252bfSMichael Neumann 			return ret;
147857e252bfSMichael Neumann 
147957e252bfSMichael Neumann 		power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
148057e252bfSMichael Neumann 								   near_tdp_limit);
148157e252bfSMichael Neumann 
148257e252bfSMichael Neumann 		smc_table->dpm2Params.TDPLimit =
148357e252bfSMichael Neumann 			cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
148457e252bfSMichael Neumann 		smc_table->dpm2Params.NearTDPLimit =
148557e252bfSMichael Neumann 			cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
148657e252bfSMichael Neumann 		smc_table->dpm2Params.SafePowerLimit =
148757e252bfSMichael Neumann 			cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
148857e252bfSMichael Neumann 							   scaling_factor));
148957e252bfSMichael Neumann 		smc_table->dpm2Params.PowerBoostLimit =
149057e252bfSMichael Neumann 			cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
149157e252bfSMichael Neumann 
149257e252bfSMichael Neumann 		ret = rv770_copy_bytes_to_smc(rdev,
149357e252bfSMichael Neumann 					      (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
149457e252bfSMichael Neumann 						    offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
149557e252bfSMichael Neumann 					      (u8 *)(&smc_table->dpm2Params.TDPLimit),
149657e252bfSMichael Neumann 					      sizeof(u32) * 4, pi->sram_end);
149757e252bfSMichael Neumann 		if (ret)
149857e252bfSMichael Neumann 			return ret;
149957e252bfSMichael Neumann 	}
150057e252bfSMichael Neumann 
150157e252bfSMichael Neumann 	return 0;
150257e252bfSMichael Neumann }
150357e252bfSMichael Neumann 
ni_copy_and_switch_arb_sets(struct radeon_device * rdev,u32 arb_freq_src,u32 arb_freq_dest)150457e252bfSMichael Neumann int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
150557e252bfSMichael Neumann 				u32 arb_freq_src, u32 arb_freq_dest)
150657e252bfSMichael Neumann {
150757e252bfSMichael Neumann 	u32 mc_arb_dram_timing;
150857e252bfSMichael Neumann 	u32 mc_arb_dram_timing2;
150957e252bfSMichael Neumann 	u32 burst_time;
151057e252bfSMichael Neumann 	u32 mc_cg_config;
151157e252bfSMichael Neumann 
151257e252bfSMichael Neumann 	switch (arb_freq_src) {
151357e252bfSMichael Neumann 	case MC_CG_ARB_FREQ_F0:
151457e252bfSMichael Neumann 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
151557e252bfSMichael Neumann 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
151657e252bfSMichael Neumann 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
151757e252bfSMichael Neumann 		break;
151857e252bfSMichael Neumann 	case MC_CG_ARB_FREQ_F1:
151957e252bfSMichael Neumann 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
152057e252bfSMichael Neumann 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
152157e252bfSMichael Neumann 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
152257e252bfSMichael Neumann 		break;
152357e252bfSMichael Neumann 	case MC_CG_ARB_FREQ_F2:
152457e252bfSMichael Neumann 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
152557e252bfSMichael Neumann 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
152657e252bfSMichael Neumann 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
152757e252bfSMichael Neumann 		break;
152857e252bfSMichael Neumann 	case MC_CG_ARB_FREQ_F3:
152957e252bfSMichael Neumann 		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
153057e252bfSMichael Neumann 		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
153157e252bfSMichael Neumann 		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
153257e252bfSMichael Neumann 		break;
153357e252bfSMichael Neumann 	default:
153457e252bfSMichael Neumann 		return -EINVAL;
153557e252bfSMichael Neumann 	}
153657e252bfSMichael Neumann 
153757e252bfSMichael Neumann 	switch (arb_freq_dest) {
153857e252bfSMichael Neumann 	case MC_CG_ARB_FREQ_F0:
153957e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
154057e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
154157e252bfSMichael Neumann 		WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
154257e252bfSMichael Neumann 		break;
154357e252bfSMichael Neumann 	case MC_CG_ARB_FREQ_F1:
154457e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
154557e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
154657e252bfSMichael Neumann 		WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
154757e252bfSMichael Neumann 		break;
154857e252bfSMichael Neumann 	case MC_CG_ARB_FREQ_F2:
154957e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
155057e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
155157e252bfSMichael Neumann 		WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
155257e252bfSMichael Neumann 		break;
155357e252bfSMichael Neumann 	case MC_CG_ARB_FREQ_F3:
155457e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
155557e252bfSMichael Neumann 		WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
155657e252bfSMichael Neumann 		WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
155757e252bfSMichael Neumann 		break;
155857e252bfSMichael Neumann 	default:
155957e252bfSMichael Neumann 		return -EINVAL;
156057e252bfSMichael Neumann 	}
156157e252bfSMichael Neumann 
156257e252bfSMichael Neumann 	mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
156357e252bfSMichael Neumann 	WREG32(MC_CG_CONFIG, mc_cg_config);
156457e252bfSMichael Neumann 	WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
156557e252bfSMichael Neumann 
156657e252bfSMichael Neumann 	return 0;
156757e252bfSMichael Neumann }
156857e252bfSMichael Neumann 
ni_init_arb_table_index(struct radeon_device * rdev)156957e252bfSMichael Neumann static int ni_init_arb_table_index(struct radeon_device *rdev)
157057e252bfSMichael Neumann {
157157e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
157257e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
157357e252bfSMichael Neumann 	u32 tmp;
157457e252bfSMichael Neumann 	int ret;
157557e252bfSMichael Neumann 
157657e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
157757e252bfSMichael Neumann 					&tmp, pi->sram_end);
157857e252bfSMichael Neumann 	if (ret)
157957e252bfSMichael Neumann 		return ret;
158057e252bfSMichael Neumann 
158157e252bfSMichael Neumann 	tmp &= 0x00FFFFFF;
158257e252bfSMichael Neumann 	tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
158357e252bfSMichael Neumann 
158457e252bfSMichael Neumann 	return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
158557e252bfSMichael Neumann 					  tmp, pi->sram_end);
158657e252bfSMichael Neumann }
158757e252bfSMichael Neumann 
ni_initial_switch_from_arb_f0_to_f1(struct radeon_device * rdev)158857e252bfSMichael Neumann static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
158957e252bfSMichael Neumann {
159057e252bfSMichael Neumann 	return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
159157e252bfSMichael Neumann }
159257e252bfSMichael Neumann 
ni_force_switch_to_arb_f0(struct radeon_device * rdev)159357e252bfSMichael Neumann static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
159457e252bfSMichael Neumann {
159557e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
159657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
159757e252bfSMichael Neumann 	u32 tmp;
159857e252bfSMichael Neumann 	int ret;
159957e252bfSMichael Neumann 
160057e252bfSMichael Neumann 	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
160157e252bfSMichael Neumann 					&tmp, pi->sram_end);
160257e252bfSMichael Neumann 	if (ret)
160357e252bfSMichael Neumann 		return ret;
160457e252bfSMichael Neumann 
160557e252bfSMichael Neumann 	tmp = (tmp >> 24) & 0xff;
160657e252bfSMichael Neumann 
160757e252bfSMichael Neumann 	if (tmp == MC_CG_ARB_FREQ_F0)
160857e252bfSMichael Neumann 		return 0;
160957e252bfSMichael Neumann 
161057e252bfSMichael Neumann 	return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
161157e252bfSMichael Neumann }
161257e252bfSMichael Neumann 
ni_populate_memory_timing_parameters(struct radeon_device * rdev,struct rv7xx_pl * pl,SMC_NIslands_MCArbDramTimingRegisterSet * arb_regs)161357e252bfSMichael Neumann static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
161457e252bfSMichael Neumann 						struct rv7xx_pl *pl,
161557e252bfSMichael Neumann 						SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
161657e252bfSMichael Neumann {
161757e252bfSMichael Neumann 	u32 dram_timing;
161857e252bfSMichael Neumann 	u32 dram_timing2;
161957e252bfSMichael Neumann 
162057e252bfSMichael Neumann 	arb_regs->mc_arb_rfsh_rate =
162157e252bfSMichael Neumann 		(u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
162257e252bfSMichael Neumann 
162357e252bfSMichael Neumann 
1624*d78d3a22SFrançois Tigeot 	radeon_atom_set_engine_dram_timings(rdev, pl->sclk, pl->mclk);
162557e252bfSMichael Neumann 
162657e252bfSMichael Neumann 	dram_timing = RREG32(MC_ARB_DRAM_TIMING);
162757e252bfSMichael Neumann 	dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
162857e252bfSMichael Neumann 
162957e252bfSMichael Neumann 	arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
163057e252bfSMichael Neumann 	arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
163157e252bfSMichael Neumann 
163257e252bfSMichael Neumann 	return 0;
163357e252bfSMichael Neumann }
163457e252bfSMichael Neumann 
ni_do_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_state,unsigned int first_arb_set)163557e252bfSMichael Neumann static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
163657e252bfSMichael Neumann 						  struct radeon_ps *radeon_state,
163757e252bfSMichael Neumann 						  unsigned int first_arb_set)
163857e252bfSMichael Neumann {
163957e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
164057e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
164157e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
164257e252bfSMichael Neumann 	SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
164357e252bfSMichael Neumann 	int i, ret = 0;
164457e252bfSMichael Neumann 
164557e252bfSMichael Neumann 	for (i = 0; i < state->performance_level_count; i++) {
164657e252bfSMichael Neumann 		ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
164757e252bfSMichael Neumann 		if (ret)
164857e252bfSMichael Neumann 			break;
164957e252bfSMichael Neumann 
165057e252bfSMichael Neumann 		ret = rv770_copy_bytes_to_smc(rdev,
165157e252bfSMichael Neumann 					      (u16)(ni_pi->arb_table_start +
165257e252bfSMichael Neumann 						    offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
165357e252bfSMichael Neumann 						    sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
165457e252bfSMichael Neumann 					      (u8 *)&arb_regs,
165557e252bfSMichael Neumann 					      (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
165657e252bfSMichael Neumann 					      pi->sram_end);
165757e252bfSMichael Neumann 		if (ret)
165857e252bfSMichael Neumann 			break;
165957e252bfSMichael Neumann 	}
166057e252bfSMichael Neumann 	return ret;
166157e252bfSMichael Neumann }
166257e252bfSMichael Neumann 
ni_program_memory_timing_parameters(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)166357e252bfSMichael Neumann static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
166457e252bfSMichael Neumann 					       struct radeon_ps *radeon_new_state)
166557e252bfSMichael Neumann {
166657e252bfSMichael Neumann 	return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
166757e252bfSMichael Neumann 						      NISLANDS_DRIVER_STATE_ARB_INDEX);
166857e252bfSMichael Neumann }
166957e252bfSMichael Neumann 
ni_populate_initial_mvdd_value(struct radeon_device * rdev,struct NISLANDS_SMC_VOLTAGE_VALUE * voltage)167057e252bfSMichael Neumann static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
167157e252bfSMichael Neumann 					   struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
167257e252bfSMichael Neumann {
167357e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
167457e252bfSMichael Neumann 
167557e252bfSMichael Neumann 	voltage->index = eg_pi->mvdd_high_index;
167657e252bfSMichael Neumann 	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
167757e252bfSMichael Neumann }
167857e252bfSMichael Neumann 
ni_populate_smc_initial_state(struct radeon_device * rdev,struct radeon_ps * radeon_initial_state,NISLANDS_SMC_STATETABLE * table)167957e252bfSMichael Neumann static int ni_populate_smc_initial_state(struct radeon_device *rdev,
168057e252bfSMichael Neumann 					 struct radeon_ps *radeon_initial_state,
168157e252bfSMichael Neumann 					 NISLANDS_SMC_STATETABLE *table)
168257e252bfSMichael Neumann {
168357e252bfSMichael Neumann 	struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
168457e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
168557e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
168657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
168757e252bfSMichael Neumann 	u32 reg;
168857e252bfSMichael Neumann 	int ret;
168957e252bfSMichael Neumann 
169057e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
169157e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
169257e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
169357e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
169457e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
169557e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
169657e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
169757e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
169857e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
169957e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
170057e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vDLL_CNTL =
170157e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.dll_cntl);
170257e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_SS =
170357e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
170457e252bfSMichael Neumann 	table->initialState.levels[0].mclk.vMPLL_SS2 =
170557e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
170657e252bfSMichael Neumann 	table->initialState.levels[0].mclk.mclk_value =
170757e252bfSMichael Neumann 		cpu_to_be32(initial_state->performance_levels[0].mclk);
170857e252bfSMichael Neumann 
170957e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
171057e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
171157e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
171257e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
171357e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
171457e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
171557e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
171657e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
171757e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
171857e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
171957e252bfSMichael Neumann 	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
172057e252bfSMichael Neumann 		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
172157e252bfSMichael Neumann 	table->initialState.levels[0].sclk.sclk_value =
172257e252bfSMichael Neumann 		cpu_to_be32(initial_state->performance_levels[0].sclk);
172357e252bfSMichael Neumann 	table->initialState.levels[0].arbRefreshState =
172457e252bfSMichael Neumann 		NISLANDS_INITIAL_STATE_ARB_INDEX;
172557e252bfSMichael Neumann 
172657e252bfSMichael Neumann 	table->initialState.levels[0].ACIndex = 0;
172757e252bfSMichael Neumann 
172857e252bfSMichael Neumann 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
172957e252bfSMichael Neumann 					initial_state->performance_levels[0].vddc,
173057e252bfSMichael Neumann 					&table->initialState.levels[0].vddc);
173157e252bfSMichael Neumann 	if (!ret) {
173257e252bfSMichael Neumann 		u16 std_vddc;
173357e252bfSMichael Neumann 
173457e252bfSMichael Neumann 		ret = ni_get_std_voltage_value(rdev,
173557e252bfSMichael Neumann 					       &table->initialState.levels[0].vddc,
173657e252bfSMichael Neumann 					       &std_vddc);
173757e252bfSMichael Neumann 		if (!ret)
173857e252bfSMichael Neumann 			ni_populate_std_voltage_value(rdev, std_vddc,
173957e252bfSMichael Neumann 						      table->initialState.levels[0].vddc.index,
174057e252bfSMichael Neumann 						      &table->initialState.levels[0].std_vddc);
174157e252bfSMichael Neumann 	}
174257e252bfSMichael Neumann 
174357e252bfSMichael Neumann 	if (eg_pi->vddci_control)
174457e252bfSMichael Neumann 		ni_populate_voltage_value(rdev,
174557e252bfSMichael Neumann 					  &eg_pi->vddci_voltage_table,
174657e252bfSMichael Neumann 					  initial_state->performance_levels[0].vddci,
174757e252bfSMichael Neumann 					  &table->initialState.levels[0].vddci);
174857e252bfSMichael Neumann 
174957e252bfSMichael Neumann 	ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
175057e252bfSMichael Neumann 
175157e252bfSMichael Neumann 	reg = CG_R(0xffff) | CG_L(0);
175257e252bfSMichael Neumann 	table->initialState.levels[0].aT = cpu_to_be32(reg);
175357e252bfSMichael Neumann 
175457e252bfSMichael Neumann 	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
175557e252bfSMichael Neumann 
175657e252bfSMichael Neumann 	if (pi->boot_in_gen2)
175757e252bfSMichael Neumann 		table->initialState.levels[0].gen2PCIE = 1;
175857e252bfSMichael Neumann 	else
175957e252bfSMichael Neumann 		table->initialState.levels[0].gen2PCIE = 0;
176057e252bfSMichael Neumann 
176157e252bfSMichael Neumann 	if (pi->mem_gddr5) {
176257e252bfSMichael Neumann 		table->initialState.levels[0].strobeMode =
176357e252bfSMichael Neumann 			cypress_get_strobe_mode_settings(rdev,
176457e252bfSMichael Neumann 							 initial_state->performance_levels[0].mclk);
176557e252bfSMichael Neumann 
176657e252bfSMichael Neumann 		if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
176757e252bfSMichael Neumann 			table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
176857e252bfSMichael Neumann 		else
176957e252bfSMichael Neumann 			table->initialState.levels[0].mcFlags =  0;
177057e252bfSMichael Neumann 	}
177157e252bfSMichael Neumann 
177257e252bfSMichael Neumann 	table->initialState.levelCount = 1;
177357e252bfSMichael Neumann 
177457e252bfSMichael Neumann 	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
177557e252bfSMichael Neumann 
177657e252bfSMichael Neumann 	table->initialState.levels[0].dpm2.MaxPS = 0;
177757e252bfSMichael Neumann 	table->initialState.levels[0].dpm2.NearTDPDec = 0;
177857e252bfSMichael Neumann 	table->initialState.levels[0].dpm2.AboveSafeInc = 0;
177957e252bfSMichael Neumann 	table->initialState.levels[0].dpm2.BelowSafeInc = 0;
178057e252bfSMichael Neumann 
178157e252bfSMichael Neumann 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
178257e252bfSMichael Neumann 	table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
178357e252bfSMichael Neumann 
178457e252bfSMichael Neumann 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
178557e252bfSMichael Neumann 	table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
178657e252bfSMichael Neumann 
178757e252bfSMichael Neumann 	return 0;
178857e252bfSMichael Neumann }
178957e252bfSMichael Neumann 
ni_populate_smc_acpi_state(struct radeon_device * rdev,NISLANDS_SMC_STATETABLE * table)179057e252bfSMichael Neumann static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
179157e252bfSMichael Neumann 				      NISLANDS_SMC_STATETABLE *table)
179257e252bfSMichael Neumann {
179357e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
179457e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
179557e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
179657e252bfSMichael Neumann 	u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
179757e252bfSMichael Neumann 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
179857e252bfSMichael Neumann 	u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
179957e252bfSMichael Neumann 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
180057e252bfSMichael Neumann 	u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
180157e252bfSMichael Neumann 	u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
180257e252bfSMichael Neumann 	u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
180357e252bfSMichael Neumann 	u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
180457e252bfSMichael Neumann 	u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
180557e252bfSMichael Neumann 	u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
180657e252bfSMichael Neumann 	u32 reg;
180757e252bfSMichael Neumann 	int ret;
180857e252bfSMichael Neumann 
180957e252bfSMichael Neumann 	table->ACPIState = table->initialState;
181057e252bfSMichael Neumann 
181157e252bfSMichael Neumann 	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
181257e252bfSMichael Neumann 
181357e252bfSMichael Neumann 	if (pi->acpi_vddc) {
181457e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev,
181557e252bfSMichael Neumann 						&eg_pi->vddc_voltage_table,
181657e252bfSMichael Neumann 						pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
181757e252bfSMichael Neumann 		if (!ret) {
181857e252bfSMichael Neumann 			u16 std_vddc;
181957e252bfSMichael Neumann 
182057e252bfSMichael Neumann 			ret = ni_get_std_voltage_value(rdev,
182157e252bfSMichael Neumann 						       &table->ACPIState.levels[0].vddc, &std_vddc);
182257e252bfSMichael Neumann 			if (!ret)
182357e252bfSMichael Neumann 				ni_populate_std_voltage_value(rdev, std_vddc,
182457e252bfSMichael Neumann 							      table->ACPIState.levels[0].vddc.index,
182557e252bfSMichael Neumann 							      &table->ACPIState.levels[0].std_vddc);
182657e252bfSMichael Neumann 		}
182757e252bfSMichael Neumann 
182857e252bfSMichael Neumann 		if (pi->pcie_gen2) {
182957e252bfSMichael Neumann 			if (pi->acpi_pcie_gen2)
183057e252bfSMichael Neumann 				table->ACPIState.levels[0].gen2PCIE = 1;
183157e252bfSMichael Neumann 			else
183257e252bfSMichael Neumann 				table->ACPIState.levels[0].gen2PCIE = 0;
183357e252bfSMichael Neumann 		} else {
183457e252bfSMichael Neumann 			table->ACPIState.levels[0].gen2PCIE = 0;
183557e252bfSMichael Neumann 		}
183657e252bfSMichael Neumann 	} else {
183757e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev,
183857e252bfSMichael Neumann 						&eg_pi->vddc_voltage_table,
183957e252bfSMichael Neumann 						pi->min_vddc_in_table,
184057e252bfSMichael Neumann 						&table->ACPIState.levels[0].vddc);
184157e252bfSMichael Neumann 		if (!ret) {
184257e252bfSMichael Neumann 			u16 std_vddc;
184357e252bfSMichael Neumann 
184457e252bfSMichael Neumann 			ret = ni_get_std_voltage_value(rdev,
184557e252bfSMichael Neumann 						       &table->ACPIState.levels[0].vddc,
184657e252bfSMichael Neumann 						       &std_vddc);
184757e252bfSMichael Neumann 			if (!ret)
184857e252bfSMichael Neumann 				ni_populate_std_voltage_value(rdev, std_vddc,
184957e252bfSMichael Neumann 							      table->ACPIState.levels[0].vddc.index,
185057e252bfSMichael Neumann 							      &table->ACPIState.levels[0].std_vddc);
185157e252bfSMichael Neumann 		}
185257e252bfSMichael Neumann 		table->ACPIState.levels[0].gen2PCIE = 0;
185357e252bfSMichael Neumann 	}
185457e252bfSMichael Neumann 
185557e252bfSMichael Neumann 	if (eg_pi->acpi_vddci) {
185657e252bfSMichael Neumann 		if (eg_pi->vddci_control)
185757e252bfSMichael Neumann 			ni_populate_voltage_value(rdev,
185857e252bfSMichael Neumann 						  &eg_pi->vddci_voltage_table,
185957e252bfSMichael Neumann 						  eg_pi->acpi_vddci,
186057e252bfSMichael Neumann 						  &table->ACPIState.levels[0].vddci);
186157e252bfSMichael Neumann 	}
186257e252bfSMichael Neumann 
186357e252bfSMichael Neumann 
186457e252bfSMichael Neumann 	mpll_ad_func_cntl &= ~PDNB;
186557e252bfSMichael Neumann 
186657e252bfSMichael Neumann 	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
186757e252bfSMichael Neumann 
186857e252bfSMichael Neumann 	if (pi->mem_gddr5)
186957e252bfSMichael Neumann 		mpll_dq_func_cntl &= ~PDNB;
187057e252bfSMichael Neumann 	mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
187157e252bfSMichael Neumann 
187257e252bfSMichael Neumann 
187357e252bfSMichael Neumann 	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
187457e252bfSMichael Neumann 			     MRDCKA1_RESET |
187557e252bfSMichael Neumann 			     MRDCKB0_RESET |
187657e252bfSMichael Neumann 			     MRDCKB1_RESET |
187757e252bfSMichael Neumann 			     MRDCKC0_RESET |
187857e252bfSMichael Neumann 			     MRDCKC1_RESET |
187957e252bfSMichael Neumann 			     MRDCKD0_RESET |
188057e252bfSMichael Neumann 			     MRDCKD1_RESET);
188157e252bfSMichael Neumann 
188257e252bfSMichael Neumann 	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
188357e252bfSMichael Neumann 			      MRDCKA1_PDNB |
188457e252bfSMichael Neumann 			      MRDCKB0_PDNB |
188557e252bfSMichael Neumann 			      MRDCKB1_PDNB |
188657e252bfSMichael Neumann 			      MRDCKC0_PDNB |
188757e252bfSMichael Neumann 			      MRDCKC1_PDNB |
188857e252bfSMichael Neumann 			      MRDCKD0_PDNB |
188957e252bfSMichael Neumann 			      MRDCKD1_PDNB);
189057e252bfSMichael Neumann 
189157e252bfSMichael Neumann 	dll_cntl |= (MRDCKA0_BYPASS |
189257e252bfSMichael Neumann 		     MRDCKA1_BYPASS |
189357e252bfSMichael Neumann 		     MRDCKB0_BYPASS |
189457e252bfSMichael Neumann 		     MRDCKB1_BYPASS |
189557e252bfSMichael Neumann 		     MRDCKC0_BYPASS |
189657e252bfSMichael Neumann 		     MRDCKC1_BYPASS |
189757e252bfSMichael Neumann 		     MRDCKD0_BYPASS |
189857e252bfSMichael Neumann 		     MRDCKD1_BYPASS);
189957e252bfSMichael Neumann 
190057e252bfSMichael Neumann 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
190157e252bfSMichael Neumann 	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
190257e252bfSMichael Neumann 
190357e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
190457e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
190557e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
190657e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
190757e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
190857e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
190957e252bfSMichael Neumann 
191057e252bfSMichael Neumann 	table->ACPIState.levels[0].mclk.mclk_value = 0;
191157e252bfSMichael Neumann 
191257e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
191357e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
191457e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
191557e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
191657e252bfSMichael Neumann 
191757e252bfSMichael Neumann 	table->ACPIState.levels[0].sclk.sclk_value = 0;
191857e252bfSMichael Neumann 
191957e252bfSMichael Neumann 	ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
192057e252bfSMichael Neumann 
192157e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing)
192257e252bfSMichael Neumann 		table->ACPIState.levels[0].ACIndex = 1;
192357e252bfSMichael Neumann 
192457e252bfSMichael Neumann 	table->ACPIState.levels[0].dpm2.MaxPS = 0;
192557e252bfSMichael Neumann 	table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
192657e252bfSMichael Neumann 	table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
192757e252bfSMichael Neumann 	table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
192857e252bfSMichael Neumann 
192957e252bfSMichael Neumann 	reg = MIN_POWER_MASK | MAX_POWER_MASK;
193057e252bfSMichael Neumann 	table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
193157e252bfSMichael Neumann 
193257e252bfSMichael Neumann 	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
193357e252bfSMichael Neumann 	table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
193457e252bfSMichael Neumann 
193557e252bfSMichael Neumann 	return 0;
193657e252bfSMichael Neumann }
193757e252bfSMichael Neumann 
ni_init_smc_table(struct radeon_device * rdev)193857e252bfSMichael Neumann static int ni_init_smc_table(struct radeon_device *rdev)
193957e252bfSMichael Neumann {
194057e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
194157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
194257e252bfSMichael Neumann 	int ret;
194357e252bfSMichael Neumann 	struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
194457e252bfSMichael Neumann 	NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
194557e252bfSMichael Neumann 
194657e252bfSMichael Neumann 	memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
194757e252bfSMichael Neumann 
194857e252bfSMichael Neumann 	ni_populate_smc_voltage_tables(rdev, table);
194957e252bfSMichael Neumann 
195057e252bfSMichael Neumann 	switch (rdev->pm.int_thermal_type) {
195157e252bfSMichael Neumann 	case THERMAL_TYPE_NI:
195257e252bfSMichael Neumann 	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
195357e252bfSMichael Neumann 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
195457e252bfSMichael Neumann 		break;
195557e252bfSMichael Neumann 	case THERMAL_TYPE_NONE:
195657e252bfSMichael Neumann 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
195757e252bfSMichael Neumann 		break;
195857e252bfSMichael Neumann 	default:
195957e252bfSMichael Neumann 		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
196057e252bfSMichael Neumann 		break;
196157e252bfSMichael Neumann 	}
196257e252bfSMichael Neumann 
196357e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
196457e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
196557e252bfSMichael Neumann 
196657e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
196757e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
196857e252bfSMichael Neumann 
196957e252bfSMichael Neumann 	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
197057e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
197157e252bfSMichael Neumann 
197257e252bfSMichael Neumann 	if (pi->mem_gddr5)
197357e252bfSMichael Neumann 		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
197457e252bfSMichael Neumann 
197557e252bfSMichael Neumann 	ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
197657e252bfSMichael Neumann 	if (ret)
197757e252bfSMichael Neumann 		return ret;
197857e252bfSMichael Neumann 
197957e252bfSMichael Neumann 	ret = ni_populate_smc_acpi_state(rdev, table);
198057e252bfSMichael Neumann 	if (ret)
198157e252bfSMichael Neumann 		return ret;
198257e252bfSMichael Neumann 
198357e252bfSMichael Neumann 	table->driverState = table->initialState;
198457e252bfSMichael Neumann 
198557e252bfSMichael Neumann 	table->ULVState = table->initialState;
198657e252bfSMichael Neumann 
198757e252bfSMichael Neumann 	ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
198857e252bfSMichael Neumann 						     NISLANDS_INITIAL_STATE_ARB_INDEX);
198957e252bfSMichael Neumann 	if (ret)
199057e252bfSMichael Neumann 		return ret;
199157e252bfSMichael Neumann 
199257e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
199357e252bfSMichael Neumann 				       sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
199457e252bfSMichael Neumann }
199557e252bfSMichael Neumann 
ni_calculate_sclk_params(struct radeon_device * rdev,u32 engine_clock,NISLANDS_SMC_SCLK_VALUE * sclk)199657e252bfSMichael Neumann static int ni_calculate_sclk_params(struct radeon_device *rdev,
199757e252bfSMichael Neumann 				    u32 engine_clock,
199857e252bfSMichael Neumann 				    NISLANDS_SMC_SCLK_VALUE *sclk)
199957e252bfSMichael Neumann {
200057e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
200157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
200257e252bfSMichael Neumann 	struct atom_clock_dividers dividers;
200357e252bfSMichael Neumann 	u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
200457e252bfSMichael Neumann 	u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
200557e252bfSMichael Neumann 	u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
200657e252bfSMichael Neumann 	u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
200757e252bfSMichael Neumann 	u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
200857e252bfSMichael Neumann 	u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
200957e252bfSMichael Neumann 	u64 tmp;
201057e252bfSMichael Neumann 	u32 reference_clock = rdev->clock.spll.reference_freq;
201157e252bfSMichael Neumann 	u32 reference_divider;
201257e252bfSMichael Neumann 	u32 fbdiv;
201357e252bfSMichael Neumann 	int ret;
201457e252bfSMichael Neumann 
201557e252bfSMichael Neumann 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
201657e252bfSMichael Neumann 					     engine_clock, false, &dividers);
201757e252bfSMichael Neumann 	if (ret)
201857e252bfSMichael Neumann 		return ret;
201957e252bfSMichael Neumann 
202057e252bfSMichael Neumann 	reference_divider = 1 + dividers.ref_div;
202157e252bfSMichael Neumann 
202257e252bfSMichael Neumann 
202357e252bfSMichael Neumann 	tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
202457e252bfSMichael Neumann 	do_div(tmp, reference_clock);
202557e252bfSMichael Neumann 	fbdiv = (u32) tmp;
202657e252bfSMichael Neumann 
202757e252bfSMichael Neumann 	spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
202857e252bfSMichael Neumann 	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
202957e252bfSMichael Neumann 	spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
203057e252bfSMichael Neumann 
203157e252bfSMichael Neumann 	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
203257e252bfSMichael Neumann 	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
203357e252bfSMichael Neumann 
203457e252bfSMichael Neumann 	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
203557e252bfSMichael Neumann 	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
203657e252bfSMichael Neumann 	spll_func_cntl_3 |= SPLL_DITHEN;
203757e252bfSMichael Neumann 
203857e252bfSMichael Neumann 	if (pi->sclk_ss) {
203957e252bfSMichael Neumann 		struct radeon_atom_ss ss;
204057e252bfSMichael Neumann 		u32 vco_freq = engine_clock * dividers.post_div;
204157e252bfSMichael Neumann 
204257e252bfSMichael Neumann 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
204357e252bfSMichael Neumann 						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
204457e252bfSMichael Neumann 			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
204557e252bfSMichael Neumann 			u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
204657e252bfSMichael Neumann 
204757e252bfSMichael Neumann 			cg_spll_spread_spectrum &= ~CLK_S_MASK;
204857e252bfSMichael Neumann 			cg_spll_spread_spectrum |= CLK_S(clk_s);
204957e252bfSMichael Neumann 			cg_spll_spread_spectrum |= SSEN;
205057e252bfSMichael Neumann 
205157e252bfSMichael Neumann 			cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
205257e252bfSMichael Neumann 			cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
205357e252bfSMichael Neumann 		}
205457e252bfSMichael Neumann 	}
205557e252bfSMichael Neumann 
205657e252bfSMichael Neumann 	sclk->sclk_value = engine_clock;
205757e252bfSMichael Neumann 	sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
205857e252bfSMichael Neumann 	sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
205957e252bfSMichael Neumann 	sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
206057e252bfSMichael Neumann 	sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
206157e252bfSMichael Neumann 	sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
206257e252bfSMichael Neumann 	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
206357e252bfSMichael Neumann 
206457e252bfSMichael Neumann 	return 0;
206557e252bfSMichael Neumann }
206657e252bfSMichael Neumann 
ni_populate_sclk_value(struct radeon_device * rdev,u32 engine_clock,NISLANDS_SMC_SCLK_VALUE * sclk)206757e252bfSMichael Neumann static int ni_populate_sclk_value(struct radeon_device *rdev,
206857e252bfSMichael Neumann 				  u32 engine_clock,
206957e252bfSMichael Neumann 				  NISLANDS_SMC_SCLK_VALUE *sclk)
207057e252bfSMichael Neumann {
207157e252bfSMichael Neumann 	NISLANDS_SMC_SCLK_VALUE sclk_tmp;
207257e252bfSMichael Neumann 	int ret;
207357e252bfSMichael Neumann 
207457e252bfSMichael Neumann 	ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
207557e252bfSMichael Neumann 	if (!ret) {
207657e252bfSMichael Neumann 		sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
207757e252bfSMichael Neumann 		sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
207857e252bfSMichael Neumann 		sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
207957e252bfSMichael Neumann 		sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
208057e252bfSMichael Neumann 		sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
208157e252bfSMichael Neumann 		sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
208257e252bfSMichael Neumann 		sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
208357e252bfSMichael Neumann 	}
208457e252bfSMichael Neumann 
208557e252bfSMichael Neumann 	return ret;
208657e252bfSMichael Neumann }
208757e252bfSMichael Neumann 
ni_init_smc_spll_table(struct radeon_device * rdev)208857e252bfSMichael Neumann static int ni_init_smc_spll_table(struct radeon_device *rdev)
208957e252bfSMichael Neumann {
209057e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
209157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
209257e252bfSMichael Neumann 	SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
209357e252bfSMichael Neumann 	NISLANDS_SMC_SCLK_VALUE sclk_params;
209457e252bfSMichael Neumann 	u32 fb_div;
209557e252bfSMichael Neumann 	u32 p_div;
209657e252bfSMichael Neumann 	u32 clk_s;
209757e252bfSMichael Neumann 	u32 clk_v;
209857e252bfSMichael Neumann 	u32 sclk = 0;
209957e252bfSMichael Neumann 	int i, ret;
210057e252bfSMichael Neumann 	u32 tmp;
210157e252bfSMichael Neumann 
210257e252bfSMichael Neumann 	if (ni_pi->spll_table_start == 0)
210357e252bfSMichael Neumann 		return -EINVAL;
210457e252bfSMichael Neumann 
210557e252bfSMichael Neumann 	spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
210657e252bfSMichael Neumann 	if (spll_table == NULL)
210757e252bfSMichael Neumann 		return -ENOMEM;
210857e252bfSMichael Neumann 
210957e252bfSMichael Neumann 	for (i = 0; i < 256; i++) {
211057e252bfSMichael Neumann 		ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
211157e252bfSMichael Neumann 		if (ret)
211257e252bfSMichael Neumann 			break;
211357e252bfSMichael Neumann 
211457e252bfSMichael Neumann 		p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
211557e252bfSMichael Neumann 		fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
211657e252bfSMichael Neumann 		clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
211757e252bfSMichael Neumann 		clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
211857e252bfSMichael Neumann 
211957e252bfSMichael Neumann 		fb_div &= ~0x00001FFF;
212057e252bfSMichael Neumann 		fb_div >>= 1;
212157e252bfSMichael Neumann 		clk_v >>= 6;
212257e252bfSMichael Neumann 
212357e252bfSMichael Neumann 		if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
212457e252bfSMichael Neumann 			ret = -EINVAL;
212557e252bfSMichael Neumann 
212657e252bfSMichael Neumann 		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
212757e252bfSMichael Neumann 			ret = -EINVAL;
212857e252bfSMichael Neumann 
212957e252bfSMichael Neumann 		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
213057e252bfSMichael Neumann 			ret = -EINVAL;
213157e252bfSMichael Neumann 
213257e252bfSMichael Neumann 		if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
213357e252bfSMichael Neumann 			ret = -EINVAL;
213457e252bfSMichael Neumann 
213557e252bfSMichael Neumann 		if (ret)
213657e252bfSMichael Neumann 			break;
213757e252bfSMichael Neumann 
213857e252bfSMichael Neumann 		tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
213957e252bfSMichael Neumann 			((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
214057e252bfSMichael Neumann 		spll_table->freq[i] = cpu_to_be32(tmp);
214157e252bfSMichael Neumann 
214257e252bfSMichael Neumann 		tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
214357e252bfSMichael Neumann 			((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
214457e252bfSMichael Neumann 		spll_table->ss[i] = cpu_to_be32(tmp);
214557e252bfSMichael Neumann 
214657e252bfSMichael Neumann 		sclk += 512;
214757e252bfSMichael Neumann 	}
214857e252bfSMichael Neumann 
214957e252bfSMichael Neumann 	if (!ret)
215057e252bfSMichael Neumann 		ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
215157e252bfSMichael Neumann 					      sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
215257e252bfSMichael Neumann 
215357e252bfSMichael Neumann 	kfree(spll_table);
215457e252bfSMichael Neumann 
215557e252bfSMichael Neumann 	return ret;
215657e252bfSMichael Neumann }
215757e252bfSMichael Neumann 
ni_populate_mclk_value(struct radeon_device * rdev,u32 engine_clock,u32 memory_clock,NISLANDS_SMC_MCLK_VALUE * mclk,bool strobe_mode,bool dll_state_on)215857e252bfSMichael Neumann static int ni_populate_mclk_value(struct radeon_device *rdev,
215957e252bfSMichael Neumann 				  u32 engine_clock,
216057e252bfSMichael Neumann 				  u32 memory_clock,
216157e252bfSMichael Neumann 				  NISLANDS_SMC_MCLK_VALUE *mclk,
216257e252bfSMichael Neumann 				  bool strobe_mode,
216357e252bfSMichael Neumann 				  bool dll_state_on)
216457e252bfSMichael Neumann {
216557e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
216657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
216757e252bfSMichael Neumann 	u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
216857e252bfSMichael Neumann 	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
216957e252bfSMichael Neumann 	u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
217057e252bfSMichael Neumann 	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
217157e252bfSMichael Neumann 	u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
217257e252bfSMichael Neumann 	u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
217357e252bfSMichael Neumann 	u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
217457e252bfSMichael Neumann 	u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
217557e252bfSMichael Neumann 	struct atom_clock_dividers dividers;
217657e252bfSMichael Neumann 	u32 ibias;
217757e252bfSMichael Neumann 	u32 dll_speed;
217857e252bfSMichael Neumann 	int ret;
217957e252bfSMichael Neumann 	u32 mc_seq_misc7;
218057e252bfSMichael Neumann 
218157e252bfSMichael Neumann 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
218257e252bfSMichael Neumann 					     memory_clock, strobe_mode, &dividers);
218357e252bfSMichael Neumann 	if (ret)
218457e252bfSMichael Neumann 		return ret;
218557e252bfSMichael Neumann 
218657e252bfSMichael Neumann 	if (!strobe_mode) {
218757e252bfSMichael Neumann 		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
218857e252bfSMichael Neumann 
218957e252bfSMichael Neumann 		if (mc_seq_misc7 & 0x8000000)
219057e252bfSMichael Neumann 			dividers.post_div = 1;
219157e252bfSMichael Neumann 	}
219257e252bfSMichael Neumann 
219357e252bfSMichael Neumann 	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
219457e252bfSMichael Neumann 
219557e252bfSMichael Neumann 	mpll_ad_func_cntl &= ~(CLKR_MASK |
219657e252bfSMichael Neumann 			       YCLK_POST_DIV_MASK |
219757e252bfSMichael Neumann 			       CLKF_MASK |
219857e252bfSMichael Neumann 			       CLKFRAC_MASK |
219957e252bfSMichael Neumann 			       IBIAS_MASK);
220057e252bfSMichael Neumann 	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
220157e252bfSMichael Neumann 	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
220257e252bfSMichael Neumann 	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
220357e252bfSMichael Neumann 	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
220457e252bfSMichael Neumann 	mpll_ad_func_cntl |= IBIAS(ibias);
220557e252bfSMichael Neumann 
220657e252bfSMichael Neumann 	if (dividers.vco_mode)
220757e252bfSMichael Neumann 		mpll_ad_func_cntl_2 |= VCO_MODE;
220857e252bfSMichael Neumann 	else
220957e252bfSMichael Neumann 		mpll_ad_func_cntl_2 &= ~VCO_MODE;
221057e252bfSMichael Neumann 
221157e252bfSMichael Neumann 	if (pi->mem_gddr5) {
221257e252bfSMichael Neumann 		mpll_dq_func_cntl &= ~(CLKR_MASK |
221357e252bfSMichael Neumann 				       YCLK_POST_DIV_MASK |
221457e252bfSMichael Neumann 				       CLKF_MASK |
221557e252bfSMichael Neumann 				       CLKFRAC_MASK |
221657e252bfSMichael Neumann 				       IBIAS_MASK);
221757e252bfSMichael Neumann 		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
221857e252bfSMichael Neumann 		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
221957e252bfSMichael Neumann 		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
222057e252bfSMichael Neumann 		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
222157e252bfSMichael Neumann 		mpll_dq_func_cntl |= IBIAS(ibias);
222257e252bfSMichael Neumann 
222357e252bfSMichael Neumann 		if (strobe_mode)
222457e252bfSMichael Neumann 			mpll_dq_func_cntl &= ~PDNB;
222557e252bfSMichael Neumann 		else
222657e252bfSMichael Neumann 			mpll_dq_func_cntl |= PDNB;
222757e252bfSMichael Neumann 
222857e252bfSMichael Neumann 		if (dividers.vco_mode)
222957e252bfSMichael Neumann 			mpll_dq_func_cntl_2 |= VCO_MODE;
223057e252bfSMichael Neumann 		else
223157e252bfSMichael Neumann 			mpll_dq_func_cntl_2 &= ~VCO_MODE;
223257e252bfSMichael Neumann 	}
223357e252bfSMichael Neumann 
223457e252bfSMichael Neumann 	if (pi->mclk_ss) {
223557e252bfSMichael Neumann 		struct radeon_atom_ss ss;
223657e252bfSMichael Neumann 		u32 vco_freq = memory_clock * dividers.post_div;
223757e252bfSMichael Neumann 
223857e252bfSMichael Neumann 		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
223957e252bfSMichael Neumann 						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
224057e252bfSMichael Neumann 			u32 reference_clock = rdev->clock.mpll.reference_freq;
224157e252bfSMichael Neumann 			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
224257e252bfSMichael Neumann 			u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
224357e252bfSMichael Neumann 			u32 clk_v = ss.percentage *
224457e252bfSMichael Neumann 				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
224557e252bfSMichael Neumann 
224657e252bfSMichael Neumann 			mpll_ss1 &= ~CLKV_MASK;
224757e252bfSMichael Neumann 			mpll_ss1 |= CLKV(clk_v);
224857e252bfSMichael Neumann 
224957e252bfSMichael Neumann 			mpll_ss2 &= ~CLKS_MASK;
225057e252bfSMichael Neumann 			mpll_ss2 |= CLKS(clk_s);
225157e252bfSMichael Neumann 		}
225257e252bfSMichael Neumann 	}
225357e252bfSMichael Neumann 
225457e252bfSMichael Neumann 	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
225557e252bfSMichael Neumann 					memory_clock);
225657e252bfSMichael Neumann 
225757e252bfSMichael Neumann 	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
225857e252bfSMichael Neumann 	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
225957e252bfSMichael Neumann 	if (dll_state_on)
226057e252bfSMichael Neumann 		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
226157e252bfSMichael Neumann 				     MRDCKA1_PDNB |
226257e252bfSMichael Neumann 				     MRDCKB0_PDNB |
226357e252bfSMichael Neumann 				     MRDCKB1_PDNB |
226457e252bfSMichael Neumann 				     MRDCKC0_PDNB |
226557e252bfSMichael Neumann 				     MRDCKC1_PDNB |
226657e252bfSMichael Neumann 				     MRDCKD0_PDNB |
226757e252bfSMichael Neumann 				     MRDCKD1_PDNB);
226857e252bfSMichael Neumann 	else
226957e252bfSMichael Neumann 		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
227057e252bfSMichael Neumann 				      MRDCKA1_PDNB |
227157e252bfSMichael Neumann 				      MRDCKB0_PDNB |
227257e252bfSMichael Neumann 				      MRDCKB1_PDNB |
227357e252bfSMichael Neumann 				      MRDCKC0_PDNB |
227457e252bfSMichael Neumann 				      MRDCKC1_PDNB |
227557e252bfSMichael Neumann 				      MRDCKD0_PDNB |
227657e252bfSMichael Neumann 				      MRDCKD1_PDNB);
227757e252bfSMichael Neumann 
227857e252bfSMichael Neumann 
227957e252bfSMichael Neumann 	mclk->mclk_value = cpu_to_be32(memory_clock);
228057e252bfSMichael Neumann 	mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
228157e252bfSMichael Neumann 	mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
228257e252bfSMichael Neumann 	mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
228357e252bfSMichael Neumann 	mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
228457e252bfSMichael Neumann 	mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
228557e252bfSMichael Neumann 	mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
228657e252bfSMichael Neumann 	mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
228757e252bfSMichael Neumann 	mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
228857e252bfSMichael Neumann 
228957e252bfSMichael Neumann 	return 0;
229057e252bfSMichael Neumann }
229157e252bfSMichael Neumann 
ni_populate_smc_sp(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)229257e252bfSMichael Neumann static void ni_populate_smc_sp(struct radeon_device *rdev,
229357e252bfSMichael Neumann 			       struct radeon_ps *radeon_state,
229457e252bfSMichael Neumann 			       NISLANDS_SMC_SWSTATE *smc_state)
229557e252bfSMichael Neumann {
229657e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(radeon_state);
229757e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
229857e252bfSMichael Neumann 	int i;
229957e252bfSMichael Neumann 
230057e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count - 1; i++)
230157e252bfSMichael Neumann 		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
230257e252bfSMichael Neumann 
230357e252bfSMichael Neumann 	smc_state->levels[ps->performance_level_count - 1].bSP =
230457e252bfSMichael Neumann 		cpu_to_be32(pi->psp);
230557e252bfSMichael Neumann }
230657e252bfSMichael Neumann 
ni_convert_power_level_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,NISLANDS_SMC_HW_PERFORMANCE_LEVEL * level)230757e252bfSMichael Neumann static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
230857e252bfSMichael Neumann 					 struct rv7xx_pl *pl,
230957e252bfSMichael Neumann 					 NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
231057e252bfSMichael Neumann {
231157e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
231257e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
231357e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
231457e252bfSMichael Neumann 	int ret;
231557e252bfSMichael Neumann 	bool dll_state_on;
231657e252bfSMichael Neumann 	u16 std_vddc;
231757e252bfSMichael Neumann 	u32 tmp = RREG32(DC_STUTTER_CNTL);
231857e252bfSMichael Neumann 
231957e252bfSMichael Neumann 	level->gen2PCIE = pi->pcie_gen2 ?
232057e252bfSMichael Neumann 		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
232157e252bfSMichael Neumann 
232257e252bfSMichael Neumann 	ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
232357e252bfSMichael Neumann 	if (ret)
232457e252bfSMichael Neumann 		return ret;
232557e252bfSMichael Neumann 
232657e252bfSMichael Neumann 	level->mcFlags =  0;
232757e252bfSMichael Neumann 	if (pi->mclk_stutter_mode_threshold &&
232857e252bfSMichael Neumann 	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
232957e252bfSMichael Neumann 	    !eg_pi->uvd_enabled &&
233057e252bfSMichael Neumann 	    (tmp & DC_STUTTER_ENABLE_A) &&
233157e252bfSMichael Neumann 	    (tmp & DC_STUTTER_ENABLE_B))
233257e252bfSMichael Neumann 		level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
233357e252bfSMichael Neumann 
233457e252bfSMichael Neumann 	if (pi->mem_gddr5) {
233557e252bfSMichael Neumann 		if (pl->mclk > pi->mclk_edc_enable_threshold)
233657e252bfSMichael Neumann 			level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
233757e252bfSMichael Neumann 		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
233857e252bfSMichael Neumann 			level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
233957e252bfSMichael Neumann 
234057e252bfSMichael Neumann 		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
234157e252bfSMichael Neumann 
234257e252bfSMichael Neumann 		if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
234357e252bfSMichael Neumann 			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
234457e252bfSMichael Neumann 			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
234557e252bfSMichael Neumann 				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
234657e252bfSMichael Neumann 			else
234757e252bfSMichael Neumann 				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
234857e252bfSMichael Neumann 		} else {
234957e252bfSMichael Neumann 			dll_state_on = false;
235057e252bfSMichael Neumann 			if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
235157e252bfSMichael Neumann 				level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
235257e252bfSMichael Neumann 		}
235357e252bfSMichael Neumann 
235457e252bfSMichael Neumann 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
235557e252bfSMichael Neumann 					     &level->mclk,
235657e252bfSMichael Neumann 					     (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
235757e252bfSMichael Neumann 					     dll_state_on);
235857e252bfSMichael Neumann 	} else
235957e252bfSMichael Neumann 		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
236057e252bfSMichael Neumann 
236157e252bfSMichael Neumann 	if (ret)
236257e252bfSMichael Neumann 		return ret;
236357e252bfSMichael Neumann 
236457e252bfSMichael Neumann 	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
236557e252bfSMichael Neumann 					pl->vddc, &level->vddc);
236657e252bfSMichael Neumann 	if (ret)
236757e252bfSMichael Neumann 		return ret;
236857e252bfSMichael Neumann 
236957e252bfSMichael Neumann 	ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
237057e252bfSMichael Neumann 	if (ret)
237157e252bfSMichael Neumann 		return ret;
237257e252bfSMichael Neumann 
237357e252bfSMichael Neumann 	ni_populate_std_voltage_value(rdev, std_vddc,
237457e252bfSMichael Neumann 				      level->vddc.index, &level->std_vddc);
237557e252bfSMichael Neumann 
237657e252bfSMichael Neumann 	if (eg_pi->vddci_control) {
237757e252bfSMichael Neumann 		ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
237857e252bfSMichael Neumann 						pl->vddci, &level->vddci);
237957e252bfSMichael Neumann 		if (ret)
238057e252bfSMichael Neumann 			return ret;
238157e252bfSMichael Neumann 	}
238257e252bfSMichael Neumann 
238357e252bfSMichael Neumann 	ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
238457e252bfSMichael Neumann 
238557e252bfSMichael Neumann 	return ret;
238657e252bfSMichael Neumann }
238757e252bfSMichael Neumann 
ni_populate_smc_t(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)238857e252bfSMichael Neumann static int ni_populate_smc_t(struct radeon_device *rdev,
238957e252bfSMichael Neumann 			     struct radeon_ps *radeon_state,
239057e252bfSMichael Neumann 			     NISLANDS_SMC_SWSTATE *smc_state)
239157e252bfSMichael Neumann {
239257e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
239357e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
239457e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
239557e252bfSMichael Neumann 	u32 a_t;
239657e252bfSMichael Neumann 	u32 t_l, t_h;
239757e252bfSMichael Neumann 	u32 high_bsp;
239857e252bfSMichael Neumann 	int i, ret;
239957e252bfSMichael Neumann 
240057e252bfSMichael Neumann 	if (state->performance_level_count >= 9)
240157e252bfSMichael Neumann 		return -EINVAL;
240257e252bfSMichael Neumann 
240357e252bfSMichael Neumann 	if (state->performance_level_count < 2) {
240457e252bfSMichael Neumann 		a_t = CG_R(0xffff) | CG_L(0);
240557e252bfSMichael Neumann 		smc_state->levels[0].aT = cpu_to_be32(a_t);
240657e252bfSMichael Neumann 		return 0;
240757e252bfSMichael Neumann 	}
240857e252bfSMichael Neumann 
240957e252bfSMichael Neumann 	smc_state->levels[0].aT = cpu_to_be32(0);
241057e252bfSMichael Neumann 
241157e252bfSMichael Neumann 	for (i = 0; i <= state->performance_level_count - 2; i++) {
241257e252bfSMichael Neumann 		if (eg_pi->uvd_enabled)
241357e252bfSMichael Neumann 			ret = r600_calculate_at(
241457e252bfSMichael Neumann 				1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
241557e252bfSMichael Neumann 				100 * R600_AH_DFLT,
241657e252bfSMichael Neumann 				state->performance_levels[i + 1].sclk,
241757e252bfSMichael Neumann 				state->performance_levels[i].sclk,
241857e252bfSMichael Neumann 				&t_l,
241957e252bfSMichael Neumann 				&t_h);
242057e252bfSMichael Neumann 		else
242157e252bfSMichael Neumann 			ret = r600_calculate_at(
242257e252bfSMichael Neumann 				1000 * (i + 1),
242357e252bfSMichael Neumann 				100 * R600_AH_DFLT,
242457e252bfSMichael Neumann 				state->performance_levels[i + 1].sclk,
242557e252bfSMichael Neumann 				state->performance_levels[i].sclk,
242657e252bfSMichael Neumann 				&t_l,
242757e252bfSMichael Neumann 				&t_h);
242857e252bfSMichael Neumann 
242957e252bfSMichael Neumann 		if (ret) {
243057e252bfSMichael Neumann 			t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
243157e252bfSMichael Neumann 			t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
243257e252bfSMichael Neumann 		}
243357e252bfSMichael Neumann 
243457e252bfSMichael Neumann 		a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
243557e252bfSMichael Neumann 		a_t |= CG_R(t_l * pi->bsp / 20000);
243657e252bfSMichael Neumann 		smc_state->levels[i].aT = cpu_to_be32(a_t);
243757e252bfSMichael Neumann 
243857e252bfSMichael Neumann 		high_bsp = (i == state->performance_level_count - 2) ?
243957e252bfSMichael Neumann 			pi->pbsp : pi->bsp;
244057e252bfSMichael Neumann 
244157e252bfSMichael Neumann 		a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
244257e252bfSMichael Neumann 		smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
244357e252bfSMichael Neumann 	}
244457e252bfSMichael Neumann 
244557e252bfSMichael Neumann 	return 0;
244657e252bfSMichael Neumann }
244757e252bfSMichael Neumann 
ni_populate_power_containment_values(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)244857e252bfSMichael Neumann static int ni_populate_power_containment_values(struct radeon_device *rdev,
244957e252bfSMichael Neumann 						struct radeon_ps *radeon_state,
245057e252bfSMichael Neumann 						NISLANDS_SMC_SWSTATE *smc_state)
245157e252bfSMichael Neumann {
245257e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
245357e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
245457e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
245557e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
245657e252bfSMichael Neumann 	u32 prev_sclk;
245757e252bfSMichael Neumann 	u32 max_sclk;
245857e252bfSMichael Neumann 	u32 min_sclk;
245957e252bfSMichael Neumann 	int i, ret;
246057e252bfSMichael Neumann 	u32 tdp_limit;
246157e252bfSMichael Neumann 	u32 near_tdp_limit;
246257e252bfSMichael Neumann 	u32 power_boost_limit;
246357e252bfSMichael Neumann 	u8 max_ps_percent;
246457e252bfSMichael Neumann 
246557e252bfSMichael Neumann 	if (ni_pi->enable_power_containment == false)
246657e252bfSMichael Neumann 		return 0;
246757e252bfSMichael Neumann 
246857e252bfSMichael Neumann 	if (state->performance_level_count == 0)
246957e252bfSMichael Neumann 		return -EINVAL;
247057e252bfSMichael Neumann 
247157e252bfSMichael Neumann 	if (smc_state->levelCount != state->performance_level_count)
247257e252bfSMichael Neumann 		return -EINVAL;
247357e252bfSMichael Neumann 
247457e252bfSMichael Neumann 	ret = ni_calculate_adjusted_tdp_limits(rdev,
247557e252bfSMichael Neumann 					       false, /* ??? */
247657e252bfSMichael Neumann 					       rdev->pm.dpm.tdp_adjustment,
247757e252bfSMichael Neumann 					       &tdp_limit,
247857e252bfSMichael Neumann 					       &near_tdp_limit);
247957e252bfSMichael Neumann 	if (ret)
248057e252bfSMichael Neumann 		return ret;
248157e252bfSMichael Neumann 
248257e252bfSMichael Neumann 	power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
248357e252bfSMichael Neumann 
248457e252bfSMichael Neumann 	ret = rv770_write_smc_sram_dword(rdev,
248557e252bfSMichael Neumann 					 pi->state_table_start +
248657e252bfSMichael Neumann 					 offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
248757e252bfSMichael Neumann 					 offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
248857e252bfSMichael Neumann 					 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
248957e252bfSMichael Neumann 					 pi->sram_end);
249057e252bfSMichael Neumann 	if (ret)
249157e252bfSMichael Neumann 		power_boost_limit = 0;
249257e252bfSMichael Neumann 
249357e252bfSMichael Neumann 	smc_state->levels[0].dpm2.MaxPS = 0;
249457e252bfSMichael Neumann 	smc_state->levels[0].dpm2.NearTDPDec = 0;
249557e252bfSMichael Neumann 	smc_state->levels[0].dpm2.AboveSafeInc = 0;
249657e252bfSMichael Neumann 	smc_state->levels[0].dpm2.BelowSafeInc = 0;
249757e252bfSMichael Neumann 	smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
249857e252bfSMichael Neumann 
249957e252bfSMichael Neumann 	for (i = 1; i < state->performance_level_count; i++) {
250057e252bfSMichael Neumann 		prev_sclk = state->performance_levels[i-1].sclk;
250157e252bfSMichael Neumann 		max_sclk  = state->performance_levels[i].sclk;
250257e252bfSMichael Neumann 		max_ps_percent = (i != (state->performance_level_count - 1)) ?
250357e252bfSMichael Neumann 			NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
250457e252bfSMichael Neumann 
250557e252bfSMichael Neumann 		if (max_sclk < prev_sclk)
250657e252bfSMichael Neumann 			return -EINVAL;
250757e252bfSMichael Neumann 
250857e252bfSMichael Neumann 		if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
250957e252bfSMichael Neumann 			min_sclk = max_sclk;
251057e252bfSMichael Neumann 		else if (1 == i)
251157e252bfSMichael Neumann 			min_sclk = prev_sclk;
251257e252bfSMichael Neumann 		else
251357e252bfSMichael Neumann 			min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
251457e252bfSMichael Neumann 
251557e252bfSMichael Neumann 		if (min_sclk < state->performance_levels[0].sclk)
251657e252bfSMichael Neumann 			min_sclk = state->performance_levels[0].sclk;
251757e252bfSMichael Neumann 
251857e252bfSMichael Neumann 		if (min_sclk == 0)
251957e252bfSMichael Neumann 			return -EINVAL;
252057e252bfSMichael Neumann 
252157e252bfSMichael Neumann 		smc_state->levels[i].dpm2.MaxPS =
252257e252bfSMichael Neumann 			(u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
252357e252bfSMichael Neumann 		smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
252457e252bfSMichael Neumann 		smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
252557e252bfSMichael Neumann 		smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
252657e252bfSMichael Neumann 		smc_state->levels[i].stateFlags |=
252757e252bfSMichael Neumann 			((i != (state->performance_level_count - 1)) && power_boost_limit) ?
252857e252bfSMichael Neumann 			PPSMC_STATEFLAG_POWERBOOST : 0;
252957e252bfSMichael Neumann 	}
253057e252bfSMichael Neumann 
253157e252bfSMichael Neumann 	return 0;
253257e252bfSMichael Neumann }
253357e252bfSMichael Neumann 
ni_populate_sq_ramping_values(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)253457e252bfSMichael Neumann static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
253557e252bfSMichael Neumann 					 struct radeon_ps *radeon_state,
253657e252bfSMichael Neumann 					 NISLANDS_SMC_SWSTATE *smc_state)
253757e252bfSMichael Neumann {
253857e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
253957e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
254057e252bfSMichael Neumann 	u32 sq_power_throttle;
254157e252bfSMichael Neumann 	u32 sq_power_throttle2;
254257e252bfSMichael Neumann 	bool enable_sq_ramping = ni_pi->enable_sq_ramping;
254357e252bfSMichael Neumann 	int i;
254457e252bfSMichael Neumann 
254557e252bfSMichael Neumann 	if (state->performance_level_count == 0)
254657e252bfSMichael Neumann 		return -EINVAL;
254757e252bfSMichael Neumann 
254857e252bfSMichael Neumann 	if (smc_state->levelCount != state->performance_level_count)
254957e252bfSMichael Neumann 		return -EINVAL;
255057e252bfSMichael Neumann 
255157e252bfSMichael Neumann 	if (rdev->pm.dpm.sq_ramping_threshold == 0)
255257e252bfSMichael Neumann 		return -EINVAL;
255357e252bfSMichael Neumann 
255457e252bfSMichael Neumann 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
255557e252bfSMichael Neumann 		enable_sq_ramping = false;
255657e252bfSMichael Neumann 
255757e252bfSMichael Neumann 	if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
255857e252bfSMichael Neumann 		enable_sq_ramping = false;
255957e252bfSMichael Neumann 
256057e252bfSMichael Neumann 	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
256157e252bfSMichael Neumann 		enable_sq_ramping = false;
256257e252bfSMichael Neumann 
256357e252bfSMichael Neumann 	if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
256457e252bfSMichael Neumann 		enable_sq_ramping = false;
256557e252bfSMichael Neumann 
2566c6f73aabSFrançois Tigeot 	if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO > (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
256757e252bfSMichael Neumann 		enable_sq_ramping = false;
256857e252bfSMichael Neumann 
256957e252bfSMichael Neumann 	for (i = 0; i < state->performance_level_count; i++) {
257057e252bfSMichael Neumann 		sq_power_throttle  = 0;
257157e252bfSMichael Neumann 		sq_power_throttle2 = 0;
257257e252bfSMichael Neumann 
257357e252bfSMichael Neumann 		if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
257457e252bfSMichael Neumann 		    enable_sq_ramping) {
257557e252bfSMichael Neumann 			sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
257657e252bfSMichael Neumann 			sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
257757e252bfSMichael Neumann 			sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
257857e252bfSMichael Neumann 			sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
257957e252bfSMichael Neumann 			sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
258057e252bfSMichael Neumann 		} else {
258157e252bfSMichael Neumann 			sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
258257e252bfSMichael Neumann 			sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
258357e252bfSMichael Neumann 		}
258457e252bfSMichael Neumann 
258557e252bfSMichael Neumann 		smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
258657e252bfSMichael Neumann 		smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
258757e252bfSMichael Neumann 	}
258857e252bfSMichael Neumann 
258957e252bfSMichael Neumann 	return 0;
259057e252bfSMichael Neumann }
259157e252bfSMichael Neumann 
ni_enable_power_containment(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,bool enable)259257e252bfSMichael Neumann static int ni_enable_power_containment(struct radeon_device *rdev,
259357e252bfSMichael Neumann 				       struct radeon_ps *radeon_new_state,
259457e252bfSMichael Neumann 				       bool enable)
259557e252bfSMichael Neumann {
259657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
259757e252bfSMichael Neumann 	PPSMC_Result smc_result;
259857e252bfSMichael Neumann 	int ret = 0;
259957e252bfSMichael Neumann 
260057e252bfSMichael Neumann 	if (ni_pi->enable_power_containment) {
260157e252bfSMichael Neumann 		if (enable) {
260257e252bfSMichael Neumann 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
260357e252bfSMichael Neumann 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
260457e252bfSMichael Neumann 				if (smc_result != PPSMC_Result_OK) {
260557e252bfSMichael Neumann 					ret = -EINVAL;
260657e252bfSMichael Neumann 					ni_pi->pc_enabled = false;
260757e252bfSMichael Neumann 				} else {
260857e252bfSMichael Neumann 					ni_pi->pc_enabled = true;
260957e252bfSMichael Neumann 				}
261057e252bfSMichael Neumann 			}
261157e252bfSMichael Neumann 		} else {
261257e252bfSMichael Neumann 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
261357e252bfSMichael Neumann 			if (smc_result != PPSMC_Result_OK)
261457e252bfSMichael Neumann 				ret = -EINVAL;
261557e252bfSMichael Neumann 			ni_pi->pc_enabled = false;
261657e252bfSMichael Neumann 		}
261757e252bfSMichael Neumann 	}
261857e252bfSMichael Neumann 
261957e252bfSMichael Neumann 	return ret;
262057e252bfSMichael Neumann }
262157e252bfSMichael Neumann 
ni_convert_power_state_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,NISLANDS_SMC_SWSTATE * smc_state)262257e252bfSMichael Neumann static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
262357e252bfSMichael Neumann 					 struct radeon_ps *radeon_state,
262457e252bfSMichael Neumann 					 NISLANDS_SMC_SWSTATE *smc_state)
262557e252bfSMichael Neumann {
262657e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
262757e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
262857e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
262957e252bfSMichael Neumann 	int i, ret;
263057e252bfSMichael Neumann 	u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
263157e252bfSMichael Neumann 
263257e252bfSMichael Neumann 	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
263357e252bfSMichael Neumann 		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
263457e252bfSMichael Neumann 
263557e252bfSMichael Neumann 	smc_state->levelCount = 0;
263657e252bfSMichael Neumann 
263757e252bfSMichael Neumann 	if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
263857e252bfSMichael Neumann 		return -EINVAL;
263957e252bfSMichael Neumann 
264057e252bfSMichael Neumann 	for (i = 0; i < state->performance_level_count; i++) {
264157e252bfSMichael Neumann 		ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
264257e252bfSMichael Neumann 						    &smc_state->levels[i]);
264357e252bfSMichael Neumann 		smc_state->levels[i].arbRefreshState =
264457e252bfSMichael Neumann 			(u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
264557e252bfSMichael Neumann 
264657e252bfSMichael Neumann 		if (ret)
264757e252bfSMichael Neumann 			return ret;
264857e252bfSMichael Neumann 
264957e252bfSMichael Neumann 		if (ni_pi->enable_power_containment)
265057e252bfSMichael Neumann 			smc_state->levels[i].displayWatermark =
265157e252bfSMichael Neumann 				(state->performance_levels[i].sclk < threshold) ?
265257e252bfSMichael Neumann 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
265357e252bfSMichael Neumann 		else
265457e252bfSMichael Neumann 			smc_state->levels[i].displayWatermark = (i < 2) ?
265557e252bfSMichael Neumann 				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
265657e252bfSMichael Neumann 
265757e252bfSMichael Neumann 		if (eg_pi->dynamic_ac_timing)
265857e252bfSMichael Neumann 			smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
265957e252bfSMichael Neumann 		else
266057e252bfSMichael Neumann 			smc_state->levels[i].ACIndex = 0;
266157e252bfSMichael Neumann 
266257e252bfSMichael Neumann 		smc_state->levelCount++;
266357e252bfSMichael Neumann 	}
266457e252bfSMichael Neumann 
266557e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
266657e252bfSMichael Neumann 				      cpu_to_be32(threshold / 512));
266757e252bfSMichael Neumann 
266857e252bfSMichael Neumann 	ni_populate_smc_sp(rdev, radeon_state, smc_state);
266957e252bfSMichael Neumann 
267057e252bfSMichael Neumann 	ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
267157e252bfSMichael Neumann 	if (ret)
267257e252bfSMichael Neumann 		ni_pi->enable_power_containment = false;
267357e252bfSMichael Neumann 
267457e252bfSMichael Neumann 	ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
267557e252bfSMichael Neumann 	if (ret)
267657e252bfSMichael Neumann 		ni_pi->enable_sq_ramping = false;
267757e252bfSMichael Neumann 
267857e252bfSMichael Neumann 	return ni_populate_smc_t(rdev, radeon_state, smc_state);
267957e252bfSMichael Neumann }
268057e252bfSMichael Neumann 
ni_upload_sw_state(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)268157e252bfSMichael Neumann static int ni_upload_sw_state(struct radeon_device *rdev,
268257e252bfSMichael Neumann 			      struct radeon_ps *radeon_new_state)
268357e252bfSMichael Neumann {
268457e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
268557e252bfSMichael Neumann 	u16 address = pi->state_table_start +
268657e252bfSMichael Neumann 		offsetof(NISLANDS_SMC_STATETABLE, driverState);
268757e252bfSMichael Neumann 	u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
268857e252bfSMichael Neumann 		((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
268957e252bfSMichael Neumann 	int ret;
269057e252bfSMichael Neumann 	NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
269157e252bfSMichael Neumann 
269257e252bfSMichael Neumann 	if (smc_state == NULL)
269357e252bfSMichael Neumann 		return -ENOMEM;
269457e252bfSMichael Neumann 
269557e252bfSMichael Neumann 	ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
269657e252bfSMichael Neumann 	if (ret)
269757e252bfSMichael Neumann 		goto done;
269857e252bfSMichael Neumann 
269957e252bfSMichael Neumann 	ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
270057e252bfSMichael Neumann 
270157e252bfSMichael Neumann done:
270257e252bfSMichael Neumann 	kfree(smc_state);
270357e252bfSMichael Neumann 
270457e252bfSMichael Neumann 	return ret;
270557e252bfSMichael Neumann }
270657e252bfSMichael Neumann 
ni_set_mc_special_registers(struct radeon_device * rdev,struct ni_mc_reg_table * table)270757e252bfSMichael Neumann static int ni_set_mc_special_registers(struct radeon_device *rdev,
270857e252bfSMichael Neumann 				       struct ni_mc_reg_table *table)
270957e252bfSMichael Neumann {
271057e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
271157e252bfSMichael Neumann 	u8 i, j, k;
271257e252bfSMichael Neumann 	u32 temp_reg;
271357e252bfSMichael Neumann 
271457e252bfSMichael Neumann 	for (i = 0, j = table->last; i < table->last; i++) {
271557e252bfSMichael Neumann 		switch (table->mc_reg_address[i].s1) {
271657e252bfSMichael Neumann 		case MC_SEQ_MISC1 >> 2:
271757e252bfSMichael Neumann 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
271857e252bfSMichael Neumann 				return -EINVAL;
271957e252bfSMichael Neumann 			temp_reg = RREG32(MC_PMG_CMD_EMRS);
272057e252bfSMichael Neumann 			table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
272157e252bfSMichael Neumann 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
272257e252bfSMichael Neumann 			for (k = 0; k < table->num_entries; k++)
272357e252bfSMichael Neumann 				table->mc_reg_table_entry[k].mc_data[j] =
272457e252bfSMichael Neumann 					((temp_reg & 0xffff0000)) |
272557e252bfSMichael Neumann 					((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
272657e252bfSMichael Neumann 			j++;
272757e252bfSMichael Neumann 			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
272857e252bfSMichael Neumann 				return -EINVAL;
272957e252bfSMichael Neumann 
273057e252bfSMichael Neumann 			temp_reg = RREG32(MC_PMG_CMD_MRS);
273157e252bfSMichael Neumann 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
273257e252bfSMichael Neumann 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
273357e252bfSMichael Neumann 			for(k = 0; k < table->num_entries; k++) {
273457e252bfSMichael Neumann 				table->mc_reg_table_entry[k].mc_data[j] =
273557e252bfSMichael Neumann 					(temp_reg & 0xffff0000) |
273657e252bfSMichael Neumann 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
273757e252bfSMichael Neumann 				if (!pi->mem_gddr5)
273857e252bfSMichael Neumann 					table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
273957e252bfSMichael Neumann 			}
274057e252bfSMichael Neumann 			j++;
274157e252bfSMichael Neumann 			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
274257e252bfSMichael Neumann 				return -EINVAL;
274357e252bfSMichael Neumann 			break;
274457e252bfSMichael Neumann 		case MC_SEQ_RESERVE_M >> 2:
274557e252bfSMichael Neumann 			temp_reg = RREG32(MC_PMG_CMD_MRS1);
274657e252bfSMichael Neumann 			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
274757e252bfSMichael Neumann 			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
274857e252bfSMichael Neumann 			for (k = 0; k < table->num_entries; k++)
274957e252bfSMichael Neumann 				table->mc_reg_table_entry[k].mc_data[j] =
275057e252bfSMichael Neumann 					(temp_reg & 0xffff0000) |
275157e252bfSMichael Neumann 					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
275257e252bfSMichael Neumann 			j++;
275357e252bfSMichael Neumann 			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
275457e252bfSMichael Neumann 				return -EINVAL;
275557e252bfSMichael Neumann 			break;
275657e252bfSMichael Neumann 		default:
275757e252bfSMichael Neumann 			break;
275857e252bfSMichael Neumann 		}
275957e252bfSMichael Neumann 	}
276057e252bfSMichael Neumann 
276157e252bfSMichael Neumann 	table->last = j;
276257e252bfSMichael Neumann 
276357e252bfSMichael Neumann 	return 0;
276457e252bfSMichael Neumann }
276557e252bfSMichael Neumann 
ni_check_s0_mc_reg_index(u16 in_reg,u16 * out_reg)276657e252bfSMichael Neumann static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
276757e252bfSMichael Neumann {
276857e252bfSMichael Neumann 	bool result = true;
276957e252bfSMichael Neumann 
277057e252bfSMichael Neumann 	switch (in_reg) {
277157e252bfSMichael Neumann 	case  MC_SEQ_RAS_TIMING >> 2:
277257e252bfSMichael Neumann 		*out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
277357e252bfSMichael Neumann 		break;
277457e252bfSMichael Neumann 	case MC_SEQ_CAS_TIMING >> 2:
277557e252bfSMichael Neumann 		*out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
277657e252bfSMichael Neumann 		break;
277757e252bfSMichael Neumann 	case MC_SEQ_MISC_TIMING >> 2:
277857e252bfSMichael Neumann 		*out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
277957e252bfSMichael Neumann 		break;
278057e252bfSMichael Neumann 	case MC_SEQ_MISC_TIMING2 >> 2:
278157e252bfSMichael Neumann 		*out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
278257e252bfSMichael Neumann 		break;
278357e252bfSMichael Neumann 	case MC_SEQ_RD_CTL_D0 >> 2:
278457e252bfSMichael Neumann 		*out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
278557e252bfSMichael Neumann 		break;
278657e252bfSMichael Neumann 	case MC_SEQ_RD_CTL_D1 >> 2:
278757e252bfSMichael Neumann 		*out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
278857e252bfSMichael Neumann 		break;
278957e252bfSMichael Neumann 	case MC_SEQ_WR_CTL_D0 >> 2:
279057e252bfSMichael Neumann 		*out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
279157e252bfSMichael Neumann 		break;
279257e252bfSMichael Neumann 	case MC_SEQ_WR_CTL_D1 >> 2:
279357e252bfSMichael Neumann 		*out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
279457e252bfSMichael Neumann 		break;
279557e252bfSMichael Neumann 	case MC_PMG_CMD_EMRS >> 2:
279657e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
279757e252bfSMichael Neumann 		break;
279857e252bfSMichael Neumann 	case MC_PMG_CMD_MRS >> 2:
279957e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
280057e252bfSMichael Neumann 		break;
280157e252bfSMichael Neumann 	case MC_PMG_CMD_MRS1 >> 2:
280257e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
280357e252bfSMichael Neumann 		break;
280457e252bfSMichael Neumann 	case MC_SEQ_PMG_TIMING >> 2:
280557e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
280657e252bfSMichael Neumann 		break;
280757e252bfSMichael Neumann 	case MC_PMG_CMD_MRS2 >> 2:
280857e252bfSMichael Neumann 		*out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
280957e252bfSMichael Neumann 		break;
281057e252bfSMichael Neumann 	default:
281157e252bfSMichael Neumann 		result = false;
281257e252bfSMichael Neumann 		break;
281357e252bfSMichael Neumann 	}
281457e252bfSMichael Neumann 
281557e252bfSMichael Neumann 	return result;
281657e252bfSMichael Neumann }
281757e252bfSMichael Neumann 
ni_set_valid_flag(struct ni_mc_reg_table * table)281857e252bfSMichael Neumann static void ni_set_valid_flag(struct ni_mc_reg_table *table)
281957e252bfSMichael Neumann {
282057e252bfSMichael Neumann 	u8 i, j;
282157e252bfSMichael Neumann 
282257e252bfSMichael Neumann 	for (i = 0; i < table->last; i++) {
282357e252bfSMichael Neumann 		for (j = 1; j < table->num_entries; j++) {
282457e252bfSMichael Neumann 			if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
282557e252bfSMichael Neumann 				table->valid_flag |= 1 << i;
282657e252bfSMichael Neumann 				break;
282757e252bfSMichael Neumann 			}
282857e252bfSMichael Neumann 		}
282957e252bfSMichael Neumann 	}
283057e252bfSMichael Neumann }
283157e252bfSMichael Neumann 
ni_set_s0_mc_reg_index(struct ni_mc_reg_table * table)283257e252bfSMichael Neumann static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
283357e252bfSMichael Neumann {
283457e252bfSMichael Neumann 	u32 i;
283557e252bfSMichael Neumann 	u16 address;
283657e252bfSMichael Neumann 
283757e252bfSMichael Neumann 	for (i = 0; i < table->last; i++)
283857e252bfSMichael Neumann 		table->mc_reg_address[i].s0 =
283957e252bfSMichael Neumann 			ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
284057e252bfSMichael Neumann 			address : table->mc_reg_address[i].s1;
284157e252bfSMichael Neumann }
284257e252bfSMichael Neumann 
ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table * table,struct ni_mc_reg_table * ni_table)284357e252bfSMichael Neumann static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
284457e252bfSMichael Neumann 				      struct ni_mc_reg_table *ni_table)
284557e252bfSMichael Neumann {
284657e252bfSMichael Neumann 	u8 i, j;
284757e252bfSMichael Neumann 
284857e252bfSMichael Neumann 	if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
284957e252bfSMichael Neumann 		return -EINVAL;
285057e252bfSMichael Neumann 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
285157e252bfSMichael Neumann 		return -EINVAL;
285257e252bfSMichael Neumann 
285357e252bfSMichael Neumann 	for (i = 0; i < table->last; i++)
285457e252bfSMichael Neumann 		ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
285557e252bfSMichael Neumann 	ni_table->last = table->last;
285657e252bfSMichael Neumann 
285757e252bfSMichael Neumann 	for (i = 0; i < table->num_entries; i++) {
285857e252bfSMichael Neumann 		ni_table->mc_reg_table_entry[i].mclk_max =
285957e252bfSMichael Neumann 			table->mc_reg_table_entry[i].mclk_max;
286057e252bfSMichael Neumann 		for (j = 0; j < table->last; j++)
286157e252bfSMichael Neumann 			ni_table->mc_reg_table_entry[i].mc_data[j] =
286257e252bfSMichael Neumann 				table->mc_reg_table_entry[i].mc_data[j];
286357e252bfSMichael Neumann 	}
286457e252bfSMichael Neumann 	ni_table->num_entries = table->num_entries;
286557e252bfSMichael Neumann 
286657e252bfSMichael Neumann 	return 0;
286757e252bfSMichael Neumann }
286857e252bfSMichael Neumann 
ni_initialize_mc_reg_table(struct radeon_device * rdev)286957e252bfSMichael Neumann static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
287057e252bfSMichael Neumann {
287157e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
287257e252bfSMichael Neumann 	int ret;
287357e252bfSMichael Neumann 	struct atom_mc_reg_table *table;
287457e252bfSMichael Neumann 	struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
287557e252bfSMichael Neumann 	u8 module_index = rv770_get_memory_module_index(rdev);
287657e252bfSMichael Neumann 
287757e252bfSMichael Neumann 	table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
287857e252bfSMichael Neumann 	if (!table)
287957e252bfSMichael Neumann 		return -ENOMEM;
288057e252bfSMichael Neumann 
288157e252bfSMichael Neumann 	WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
288257e252bfSMichael Neumann 	WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
288357e252bfSMichael Neumann 	WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
288457e252bfSMichael Neumann 	WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
288557e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
288657e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
288757e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
288857e252bfSMichael Neumann 	WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
288957e252bfSMichael Neumann 	WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
289057e252bfSMichael Neumann 	WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
289157e252bfSMichael Neumann 	WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
289257e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
289357e252bfSMichael Neumann 	WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
289457e252bfSMichael Neumann 
289557e252bfSMichael Neumann 	ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
289657e252bfSMichael Neumann 
289757e252bfSMichael Neumann 	if (ret)
289857e252bfSMichael Neumann 		goto init_mc_done;
289957e252bfSMichael Neumann 
290057e252bfSMichael Neumann 	ret = ni_copy_vbios_mc_reg_table(table, ni_table);
290157e252bfSMichael Neumann 
290257e252bfSMichael Neumann 	if (ret)
290357e252bfSMichael Neumann 		goto init_mc_done;
290457e252bfSMichael Neumann 
290557e252bfSMichael Neumann 	ni_set_s0_mc_reg_index(ni_table);
290657e252bfSMichael Neumann 
290757e252bfSMichael Neumann 	ret = ni_set_mc_special_registers(rdev, ni_table);
290857e252bfSMichael Neumann 
290957e252bfSMichael Neumann 	if (ret)
291057e252bfSMichael Neumann 		goto init_mc_done;
291157e252bfSMichael Neumann 
291257e252bfSMichael Neumann 	ni_set_valid_flag(ni_table);
291357e252bfSMichael Neumann 
291457e252bfSMichael Neumann init_mc_done:
291557e252bfSMichael Neumann 	kfree(table);
291657e252bfSMichael Neumann 
291757e252bfSMichael Neumann 	return ret;
291857e252bfSMichael Neumann }
291957e252bfSMichael Neumann 
ni_populate_mc_reg_addresses(struct radeon_device * rdev,SMC_NIslands_MCRegisters * mc_reg_table)292057e252bfSMichael Neumann static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
292157e252bfSMichael Neumann 					 SMC_NIslands_MCRegisters *mc_reg_table)
292257e252bfSMichael Neumann {
292357e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
292457e252bfSMichael Neumann 	u32 i, j;
292557e252bfSMichael Neumann 
292657e252bfSMichael Neumann 	for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
292757e252bfSMichael Neumann 		if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
292857e252bfSMichael Neumann 			if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
292957e252bfSMichael Neumann 				break;
293057e252bfSMichael Neumann 			mc_reg_table->address[i].s0 =
293157e252bfSMichael Neumann 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
293257e252bfSMichael Neumann 			mc_reg_table->address[i].s1 =
293357e252bfSMichael Neumann 				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
293457e252bfSMichael Neumann 			i++;
293557e252bfSMichael Neumann 		}
293657e252bfSMichael Neumann 	}
293757e252bfSMichael Neumann 	mc_reg_table->last = (u8)i;
293857e252bfSMichael Neumann }
293957e252bfSMichael Neumann 
294057e252bfSMichael Neumann 
ni_convert_mc_registers(struct ni_mc_reg_entry * entry,SMC_NIslands_MCRegisterSet * data,u32 num_entries,u32 valid_flag)294157e252bfSMichael Neumann static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
294257e252bfSMichael Neumann 				    SMC_NIslands_MCRegisterSet *data,
294357e252bfSMichael Neumann 				    u32 num_entries, u32 valid_flag)
294457e252bfSMichael Neumann {
294557e252bfSMichael Neumann 	u32 i, j;
294657e252bfSMichael Neumann 
294757e252bfSMichael Neumann 	for (i = 0, j = 0; j < num_entries; j++) {
294857e252bfSMichael Neumann 		if (valid_flag & (1 << j)) {
294957e252bfSMichael Neumann 			data->value[i] = cpu_to_be32(entry->mc_data[j]);
295057e252bfSMichael Neumann 			i++;
295157e252bfSMichael Neumann 		}
295257e252bfSMichael Neumann 	}
295357e252bfSMichael Neumann }
295457e252bfSMichael Neumann 
ni_convert_mc_reg_table_entry_to_smc(struct radeon_device * rdev,struct rv7xx_pl * pl,SMC_NIslands_MCRegisterSet * mc_reg_table_data)295557e252bfSMichael Neumann static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
295657e252bfSMichael Neumann 						 struct rv7xx_pl *pl,
295757e252bfSMichael Neumann 						 SMC_NIslands_MCRegisterSet *mc_reg_table_data)
295857e252bfSMichael Neumann {
295957e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
296057e252bfSMichael Neumann 	u32 i = 0;
296157e252bfSMichael Neumann 
296257e252bfSMichael Neumann 	for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
296357e252bfSMichael Neumann 		if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
296457e252bfSMichael Neumann 			break;
296557e252bfSMichael Neumann 	}
296657e252bfSMichael Neumann 
296757e252bfSMichael Neumann 	if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
296857e252bfSMichael Neumann 		--i;
296957e252bfSMichael Neumann 
297057e252bfSMichael Neumann 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
297157e252bfSMichael Neumann 				mc_reg_table_data,
297257e252bfSMichael Neumann 				ni_pi->mc_reg_table.last,
297357e252bfSMichael Neumann 				ni_pi->mc_reg_table.valid_flag);
297457e252bfSMichael Neumann }
297557e252bfSMichael Neumann 
ni_convert_mc_reg_table_to_smc(struct radeon_device * rdev,struct radeon_ps * radeon_state,SMC_NIslands_MCRegisters * mc_reg_table)297657e252bfSMichael Neumann static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
297757e252bfSMichael Neumann 					   struct radeon_ps *radeon_state,
297857e252bfSMichael Neumann 					   SMC_NIslands_MCRegisters *mc_reg_table)
297957e252bfSMichael Neumann {
298057e252bfSMichael Neumann 	struct ni_ps *state = ni_get_ps(radeon_state);
298157e252bfSMichael Neumann 	int i;
298257e252bfSMichael Neumann 
298357e252bfSMichael Neumann 	for (i = 0; i < state->performance_level_count; i++) {
298457e252bfSMichael Neumann 		ni_convert_mc_reg_table_entry_to_smc(rdev,
298557e252bfSMichael Neumann 						     &state->performance_levels[i],
298657e252bfSMichael Neumann 						     &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
298757e252bfSMichael Neumann 	}
298857e252bfSMichael Neumann }
298957e252bfSMichael Neumann 
ni_populate_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_boot_state)299057e252bfSMichael Neumann static int ni_populate_mc_reg_table(struct radeon_device *rdev,
299157e252bfSMichael Neumann 				    struct radeon_ps *radeon_boot_state)
299257e252bfSMichael Neumann {
299357e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
299457e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
299557e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
299657e252bfSMichael Neumann 	struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
299757e252bfSMichael Neumann 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
299857e252bfSMichael Neumann 
299957e252bfSMichael Neumann 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
300057e252bfSMichael Neumann 
300157e252bfSMichael Neumann 	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
300257e252bfSMichael Neumann 
300357e252bfSMichael Neumann 	ni_populate_mc_reg_addresses(rdev, mc_reg_table);
300457e252bfSMichael Neumann 
300557e252bfSMichael Neumann 	ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
300657e252bfSMichael Neumann 					     &mc_reg_table->data[0]);
300757e252bfSMichael Neumann 
300857e252bfSMichael Neumann 	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
300957e252bfSMichael Neumann 				&mc_reg_table->data[1],
301057e252bfSMichael Neumann 				ni_pi->mc_reg_table.last,
301157e252bfSMichael Neumann 				ni_pi->mc_reg_table.valid_flag);
301257e252bfSMichael Neumann 
301357e252bfSMichael Neumann 	ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
301457e252bfSMichael Neumann 
301557e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
301657e252bfSMichael Neumann 				       (u8 *)mc_reg_table,
301757e252bfSMichael Neumann 				       sizeof(SMC_NIslands_MCRegisters),
301857e252bfSMichael Neumann 				       pi->sram_end);
301957e252bfSMichael Neumann }
302057e252bfSMichael Neumann 
ni_upload_mc_reg_table(struct radeon_device * rdev,struct radeon_ps * radeon_new_state)302157e252bfSMichael Neumann static int ni_upload_mc_reg_table(struct radeon_device *rdev,
302257e252bfSMichael Neumann 				  struct radeon_ps *radeon_new_state)
302357e252bfSMichael Neumann {
302457e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
302557e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
302657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
302757e252bfSMichael Neumann 	struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
302857e252bfSMichael Neumann 	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
302957e252bfSMichael Neumann 	u16 address;
303057e252bfSMichael Neumann 
303157e252bfSMichael Neumann 	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
303257e252bfSMichael Neumann 
303357e252bfSMichael Neumann 	ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
303457e252bfSMichael Neumann 
303557e252bfSMichael Neumann 	address = eg_pi->mc_reg_table_start +
303657e252bfSMichael Neumann 		(u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
303757e252bfSMichael Neumann 
303857e252bfSMichael Neumann 	return rv770_copy_bytes_to_smc(rdev, address,
303957e252bfSMichael Neumann 				       (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
304057e252bfSMichael Neumann 				       sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
304157e252bfSMichael Neumann 				       pi->sram_end);
304257e252bfSMichael Neumann }
304357e252bfSMichael Neumann 
ni_init_driver_calculated_leakage_table(struct radeon_device * rdev,PP_NIslands_CACTABLES * cac_tables)304457e252bfSMichael Neumann static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
304557e252bfSMichael Neumann 						   PP_NIslands_CACTABLES *cac_tables)
304657e252bfSMichael Neumann {
304757e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
304857e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
304957e252bfSMichael Neumann 	u32 leakage = 0;
305057e252bfSMichael Neumann 	unsigned int i, j, table_size;
305157e252bfSMichael Neumann 	s32 t;
305257e252bfSMichael Neumann 	u32 smc_leakage, max_leakage = 0;
305357e252bfSMichael Neumann 	u32 scaling_factor;
305457e252bfSMichael Neumann 
305557e252bfSMichael Neumann 	table_size = eg_pi->vddc_voltage_table.count;
305657e252bfSMichael Neumann 
305757e252bfSMichael Neumann 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
305857e252bfSMichael Neumann 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
305957e252bfSMichael Neumann 
306057e252bfSMichael Neumann 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
306157e252bfSMichael Neumann 
306257e252bfSMichael Neumann 	for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
306357e252bfSMichael Neumann 		for (j = 0; j < table_size; j++) {
306457e252bfSMichael Neumann 			t = (1000 * ((i + 1) * 8));
306557e252bfSMichael Neumann 
306657e252bfSMichael Neumann 			if (t < ni_pi->cac_data.leakage_minimum_temperature)
306757e252bfSMichael Neumann 				t = ni_pi->cac_data.leakage_minimum_temperature;
306857e252bfSMichael Neumann 
306957e252bfSMichael Neumann 			ni_calculate_leakage_for_v_and_t(rdev,
307057e252bfSMichael Neumann 							 &ni_pi->cac_data.leakage_coefficients,
307157e252bfSMichael Neumann 							 eg_pi->vddc_voltage_table.entries[j].value,
307257e252bfSMichael Neumann 							 t,
307357e252bfSMichael Neumann 							 ni_pi->cac_data.i_leakage,
307457e252bfSMichael Neumann 							 &leakage);
307557e252bfSMichael Neumann 
307657e252bfSMichael Neumann 			smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
307757e252bfSMichael Neumann 			if (smc_leakage > max_leakage)
307857e252bfSMichael Neumann 				max_leakage = smc_leakage;
307957e252bfSMichael Neumann 
308057e252bfSMichael Neumann 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
308157e252bfSMichael Neumann 		}
308257e252bfSMichael Neumann 	}
308357e252bfSMichael Neumann 
308457e252bfSMichael Neumann 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
308557e252bfSMichael Neumann 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
308657e252bfSMichael Neumann 			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
308757e252bfSMichael Neumann 	}
308857e252bfSMichael Neumann 	return 0;
308957e252bfSMichael Neumann }
309057e252bfSMichael Neumann 
ni_init_simplified_leakage_table(struct radeon_device * rdev,PP_NIslands_CACTABLES * cac_tables)309157e252bfSMichael Neumann static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
309257e252bfSMichael Neumann 					    PP_NIslands_CACTABLES *cac_tables)
309357e252bfSMichael Neumann {
309457e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
309557e252bfSMichael Neumann 	struct radeon_cac_leakage_table *leakage_table =
309657e252bfSMichael Neumann 		&rdev->pm.dpm.dyn_state.cac_leakage_table;
309757e252bfSMichael Neumann 	u32 i, j, table_size;
309857e252bfSMichael Neumann 	u32 smc_leakage, max_leakage = 0;
309957e252bfSMichael Neumann 	u32 scaling_factor;
310057e252bfSMichael Neumann 
310157e252bfSMichael Neumann 	if (!leakage_table)
310257e252bfSMichael Neumann 		return -EINVAL;
310357e252bfSMichael Neumann 
310457e252bfSMichael Neumann 	table_size = leakage_table->count;
310557e252bfSMichael Neumann 
310657e252bfSMichael Neumann 	if (eg_pi->vddc_voltage_table.count != table_size)
310757e252bfSMichael Neumann 		table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
310857e252bfSMichael Neumann 			eg_pi->vddc_voltage_table.count : leakage_table->count;
310957e252bfSMichael Neumann 
311057e252bfSMichael Neumann 	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
311157e252bfSMichael Neumann 		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
311257e252bfSMichael Neumann 
311357e252bfSMichael Neumann 	if (table_size == 0)
311457e252bfSMichael Neumann 		return -EINVAL;
311557e252bfSMichael Neumann 
311657e252bfSMichael Neumann 	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
311757e252bfSMichael Neumann 
311857e252bfSMichael Neumann 	for (j = 0; j < table_size; j++) {
311957e252bfSMichael Neumann 		smc_leakage = leakage_table->entries[j].leakage;
312057e252bfSMichael Neumann 
312157e252bfSMichael Neumann 		if (smc_leakage > max_leakage)
312257e252bfSMichael Neumann 			max_leakage = smc_leakage;
312357e252bfSMichael Neumann 
312457e252bfSMichael Neumann 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
312557e252bfSMichael Neumann 			cac_tables->cac_lkge_lut[i][j] =
312657e252bfSMichael Neumann 				cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
312757e252bfSMichael Neumann 	}
312857e252bfSMichael Neumann 
312957e252bfSMichael Neumann 	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
313057e252bfSMichael Neumann 		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
313157e252bfSMichael Neumann 			cac_tables->cac_lkge_lut[i][j] =
313257e252bfSMichael Neumann 				cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
313357e252bfSMichael Neumann 	}
313457e252bfSMichael Neumann 	return 0;
313557e252bfSMichael Neumann }
313657e252bfSMichael Neumann 
ni_initialize_smc_cac_tables(struct radeon_device * rdev)313757e252bfSMichael Neumann static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
313857e252bfSMichael Neumann {
313957e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
314057e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
314157e252bfSMichael Neumann 	PP_NIslands_CACTABLES *cac_tables = NULL;
314257e252bfSMichael Neumann 	int i, ret;
314357e252bfSMichael Neumann 	u32 reg;
314457e252bfSMichael Neumann 
314557e252bfSMichael Neumann 	if (ni_pi->enable_cac == false)
314657e252bfSMichael Neumann 		return 0;
314757e252bfSMichael Neumann 
314857e252bfSMichael Neumann 	cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
314957e252bfSMichael Neumann 	if (!cac_tables)
315057e252bfSMichael Neumann 		return -ENOMEM;
315157e252bfSMichael Neumann 
315257e252bfSMichael Neumann 	reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
315357e252bfSMichael Neumann 	reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
315457e252bfSMichael Neumann 		TID_UNIT(ni_pi->cac_weights->tid_unit));
315557e252bfSMichael Neumann 	WREG32(CG_CAC_CTRL, reg);
315657e252bfSMichael Neumann 
315757e252bfSMichael Neumann 	for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
315857e252bfSMichael Neumann 		ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
315957e252bfSMichael Neumann 
316057e252bfSMichael Neumann 	for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
316157e252bfSMichael Neumann 		cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
316257e252bfSMichael Neumann 
316357e252bfSMichael Neumann 	ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
316457e252bfSMichael Neumann 	ni_pi->cac_data.pwr_const = 0;
316557e252bfSMichael Neumann 	ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
316657e252bfSMichael Neumann 	ni_pi->cac_data.bif_cac_value = 0;
316757e252bfSMichael Neumann 	ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
316857e252bfSMichael Neumann 	ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
316957e252bfSMichael Neumann 	ni_pi->cac_data.allow_ovrflw = 0;
317057e252bfSMichael Neumann 	ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
317157e252bfSMichael Neumann 	ni_pi->cac_data.num_win_tdp = 0;
317257e252bfSMichael Neumann 	ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
317357e252bfSMichael Neumann 
317457e252bfSMichael Neumann 	if (ni_pi->driver_calculate_cac_leakage)
317557e252bfSMichael Neumann 		ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
317657e252bfSMichael Neumann 	else
317757e252bfSMichael Neumann 		ret = ni_init_simplified_leakage_table(rdev, cac_tables);
317857e252bfSMichael Neumann 
317957e252bfSMichael Neumann 	if (ret)
318057e252bfSMichael Neumann 		goto done_free;
318157e252bfSMichael Neumann 
318257e252bfSMichael Neumann 	cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
318357e252bfSMichael Neumann 	cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
318457e252bfSMichael Neumann 	cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
318557e252bfSMichael Neumann 	cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
318657e252bfSMichael Neumann 	cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
318757e252bfSMichael Neumann 	cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
318857e252bfSMichael Neumann 	cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
318957e252bfSMichael Neumann 	cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
319057e252bfSMichael Neumann 	cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
319157e252bfSMichael Neumann 
319257e252bfSMichael Neumann 	ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
319357e252bfSMichael Neumann 				      sizeof(PP_NIslands_CACTABLES), pi->sram_end);
319457e252bfSMichael Neumann 
319557e252bfSMichael Neumann done_free:
319657e252bfSMichael Neumann 	if (ret) {
319757e252bfSMichael Neumann 		ni_pi->enable_cac = false;
319857e252bfSMichael Neumann 		ni_pi->enable_power_containment = false;
319957e252bfSMichael Neumann 	}
320057e252bfSMichael Neumann 
320157e252bfSMichael Neumann 	kfree(cac_tables);
320257e252bfSMichael Neumann 
320357e252bfSMichael Neumann 	return 0;
320457e252bfSMichael Neumann }
320557e252bfSMichael Neumann 
ni_initialize_hardware_cac_manager(struct radeon_device * rdev)320657e252bfSMichael Neumann static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
320757e252bfSMichael Neumann {
320857e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
320957e252bfSMichael Neumann 	u32 reg;
321057e252bfSMichael Neumann 
321157e252bfSMichael Neumann 	if (!ni_pi->enable_cac ||
321257e252bfSMichael Neumann 	    !ni_pi->cac_configuration_required)
321357e252bfSMichael Neumann 		return 0;
321457e252bfSMichael Neumann 
321557e252bfSMichael Neumann 	if (ni_pi->cac_weights == NULL)
321657e252bfSMichael Neumann 		return -EINVAL;
321757e252bfSMichael Neumann 
321857e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
321957e252bfSMichael Neumann 						      WEIGHT_TCP_SIG1_MASK |
322057e252bfSMichael Neumann 						      WEIGHT_TA_SIG_MASK);
322157e252bfSMichael Neumann 	reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
322257e252bfSMichael Neumann 		WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
322357e252bfSMichael Neumann 		WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
322457e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
322557e252bfSMichael Neumann 
322657e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
322757e252bfSMichael Neumann 						      WEIGHT_TCC_EN1_MASK |
322857e252bfSMichael Neumann 						      WEIGHT_TCC_EN2_MASK);
322957e252bfSMichael Neumann 	reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
323057e252bfSMichael Neumann 		WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
323157e252bfSMichael Neumann 		WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
323257e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
323357e252bfSMichael Neumann 
323457e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
323557e252bfSMichael Neumann 						      WEIGHT_CB_EN1_MASK |
323657e252bfSMichael Neumann 						      WEIGHT_CB_EN2_MASK |
323757e252bfSMichael Neumann 						      WEIGHT_CB_EN3_MASK);
323857e252bfSMichael Neumann 	reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
323957e252bfSMichael Neumann 		WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
324057e252bfSMichael Neumann 		WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
324157e252bfSMichael Neumann 		WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
324257e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
324357e252bfSMichael Neumann 
324457e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
324557e252bfSMichael Neumann 						      WEIGHT_DB_SIG1_MASK |
324657e252bfSMichael Neumann 						      WEIGHT_DB_SIG2_MASK |
324757e252bfSMichael Neumann 						      WEIGHT_DB_SIG3_MASK);
324857e252bfSMichael Neumann 	reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
324957e252bfSMichael Neumann 		WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
325057e252bfSMichael Neumann 		WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
325157e252bfSMichael Neumann 		WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
325257e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
325357e252bfSMichael Neumann 
325457e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
325557e252bfSMichael Neumann 						      WEIGHT_SXM_SIG1_MASK |
325657e252bfSMichael Neumann 						      WEIGHT_SXM_SIG2_MASK |
325757e252bfSMichael Neumann 						      WEIGHT_SXS_SIG0_MASK |
325857e252bfSMichael Neumann 						      WEIGHT_SXS_SIG1_MASK);
325957e252bfSMichael Neumann 	reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
326057e252bfSMichael Neumann 		WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
326157e252bfSMichael Neumann 		WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
326257e252bfSMichael Neumann 		WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
326357e252bfSMichael Neumann 		WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
326457e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
326557e252bfSMichael Neumann 
326657e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
326757e252bfSMichael Neumann 						      WEIGHT_XBR_1_MASK |
326857e252bfSMichael Neumann 						      WEIGHT_XBR_2_MASK |
326957e252bfSMichael Neumann 						      WEIGHT_SPI_SIG0_MASK);
327057e252bfSMichael Neumann 	reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
327157e252bfSMichael Neumann 		WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
327257e252bfSMichael Neumann 		WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
327357e252bfSMichael Neumann 		WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
327457e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
327557e252bfSMichael Neumann 
327657e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
327757e252bfSMichael Neumann 						      WEIGHT_SPI_SIG2_MASK |
327857e252bfSMichael Neumann 						      WEIGHT_SPI_SIG3_MASK |
327957e252bfSMichael Neumann 						      WEIGHT_SPI_SIG4_MASK |
328057e252bfSMichael Neumann 						      WEIGHT_SPI_SIG5_MASK);
328157e252bfSMichael Neumann 	reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
328257e252bfSMichael Neumann 		WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
328357e252bfSMichael Neumann 		WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
328457e252bfSMichael Neumann 		WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
328557e252bfSMichael Neumann 		WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
328657e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
328757e252bfSMichael Neumann 
328857e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
328957e252bfSMichael Neumann 						      WEIGHT_LDS_SIG1_MASK |
329057e252bfSMichael Neumann 						      WEIGHT_SC_MASK);
329157e252bfSMichael Neumann 	reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
329257e252bfSMichael Neumann 		WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
329357e252bfSMichael Neumann 		WEIGHT_SC(ni_pi->cac_weights->weight_sc));
329457e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
329557e252bfSMichael Neumann 
329657e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
329757e252bfSMichael Neumann 						      WEIGHT_CP_MASK |
329857e252bfSMichael Neumann 						      WEIGHT_PA_SIG0_MASK |
329957e252bfSMichael Neumann 						      WEIGHT_PA_SIG1_MASK |
330057e252bfSMichael Neumann 						      WEIGHT_VGT_SIG0_MASK);
330157e252bfSMichael Neumann 	reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
330257e252bfSMichael Neumann 		WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
330357e252bfSMichael Neumann 		WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
330457e252bfSMichael Neumann 		WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
330557e252bfSMichael Neumann 		WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
330657e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
330757e252bfSMichael Neumann 
330857e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
330957e252bfSMichael Neumann 						      WEIGHT_VGT_SIG2_MASK |
331057e252bfSMichael Neumann 						      WEIGHT_DC_SIG0_MASK |
331157e252bfSMichael Neumann 						      WEIGHT_DC_SIG1_MASK |
331257e252bfSMichael Neumann 						      WEIGHT_DC_SIG2_MASK);
331357e252bfSMichael Neumann 	reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
331457e252bfSMichael Neumann 		WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
331557e252bfSMichael Neumann 		WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
331657e252bfSMichael Neumann 		WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
331757e252bfSMichael Neumann 		WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
331857e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
331957e252bfSMichael Neumann 
332057e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
332157e252bfSMichael Neumann 						      WEIGHT_UVD_SIG0_MASK |
332257e252bfSMichael Neumann 						      WEIGHT_UVD_SIG1_MASK |
332357e252bfSMichael Neumann 						      WEIGHT_SPARE0_MASK |
332457e252bfSMichael Neumann 						      WEIGHT_SPARE1_MASK);
332557e252bfSMichael Neumann 	reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
332657e252bfSMichael Neumann 		WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
332757e252bfSMichael Neumann 		WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
332857e252bfSMichael Neumann 		WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
332957e252bfSMichael Neumann 		WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
333057e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
333157e252bfSMichael Neumann 
333257e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
333357e252bfSMichael Neumann 						      WEIGHT_SQ_VSP0_MASK);
333457e252bfSMichael Neumann 	reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
333557e252bfSMichael Neumann 		WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
333657e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
333757e252bfSMichael Neumann 
333857e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
333957e252bfSMichael Neumann 	reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
334057e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
334157e252bfSMichael Neumann 
334257e252bfSMichael Neumann 	reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
334357e252bfSMichael Neumann 							OVR_VAL_SPARE_0_MASK |
334457e252bfSMichael Neumann 							OVR_MODE_SPARE_1_MASK |
334557e252bfSMichael Neumann 							OVR_VAL_SPARE_1_MASK);
334657e252bfSMichael Neumann 	reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
334757e252bfSMichael Neumann 		OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
334857e252bfSMichael Neumann 		OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
334957e252bfSMichael Neumann 		OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
335057e252bfSMichael Neumann 	WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
335157e252bfSMichael Neumann 
335257e252bfSMichael Neumann 	reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
335357e252bfSMichael Neumann 					   VSP0_MASK |
335457e252bfSMichael Neumann 					   GPR_MASK);
335557e252bfSMichael Neumann 	reg |= (VSP(ni_pi->cac_weights->vsp) |
335657e252bfSMichael Neumann 		VSP0(ni_pi->cac_weights->vsp0) |
335757e252bfSMichael Neumann 		GPR(ni_pi->cac_weights->gpr));
335857e252bfSMichael Neumann 	WREG32(SQ_CAC_THRESHOLD, reg);
335957e252bfSMichael Neumann 
336057e252bfSMichael Neumann 	reg = (MCDW_WR_ENABLE |
336157e252bfSMichael Neumann 	       MCDX_WR_ENABLE |
336257e252bfSMichael Neumann 	       MCDY_WR_ENABLE |
336357e252bfSMichael Neumann 	       MCDZ_WR_ENABLE |
336457e252bfSMichael Neumann 	       INDEX(0x09D4));
336557e252bfSMichael Neumann 	WREG32(MC_CG_CONFIG, reg);
336657e252bfSMichael Neumann 
336757e252bfSMichael Neumann 	reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
336857e252bfSMichael Neumann 	       WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
336957e252bfSMichael Neumann 	       ALLOW_OVERFLOW);
337057e252bfSMichael Neumann 	WREG32(MC_CG_DATAPORT, reg);
337157e252bfSMichael Neumann 
337257e252bfSMichael Neumann 	return 0;
337357e252bfSMichael Neumann }
337457e252bfSMichael Neumann 
ni_enable_smc_cac(struct radeon_device * rdev,struct radeon_ps * radeon_new_state,bool enable)337557e252bfSMichael Neumann static int ni_enable_smc_cac(struct radeon_device *rdev,
337657e252bfSMichael Neumann 			     struct radeon_ps *radeon_new_state,
337757e252bfSMichael Neumann 			     bool enable)
337857e252bfSMichael Neumann {
337957e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
338057e252bfSMichael Neumann 	int ret = 0;
338157e252bfSMichael Neumann 	PPSMC_Result smc_result;
338257e252bfSMichael Neumann 
338357e252bfSMichael Neumann 	if (ni_pi->enable_cac) {
338457e252bfSMichael Neumann 		if (enable) {
338557e252bfSMichael Neumann 			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
338657e252bfSMichael Neumann 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
338757e252bfSMichael Neumann 
338857e252bfSMichael Neumann 				if (ni_pi->support_cac_long_term_average) {
338957e252bfSMichael Neumann 					smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
339057e252bfSMichael Neumann 					if (PPSMC_Result_OK != smc_result)
339157e252bfSMichael Neumann 						ni_pi->support_cac_long_term_average = false;
339257e252bfSMichael Neumann 				}
339357e252bfSMichael Neumann 
339457e252bfSMichael Neumann 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
339557e252bfSMichael Neumann 				if (PPSMC_Result_OK != smc_result)
339657e252bfSMichael Neumann 					ret = -EINVAL;
339757e252bfSMichael Neumann 
339857e252bfSMichael Neumann 				ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
339957e252bfSMichael Neumann 			}
340057e252bfSMichael Neumann 		} else if (ni_pi->cac_enabled) {
340157e252bfSMichael Neumann 			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
340257e252bfSMichael Neumann 
340357e252bfSMichael Neumann 			ni_pi->cac_enabled = false;
340457e252bfSMichael Neumann 
340557e252bfSMichael Neumann 			if (ni_pi->support_cac_long_term_average) {
340657e252bfSMichael Neumann 				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
340757e252bfSMichael Neumann 				if (PPSMC_Result_OK != smc_result)
340857e252bfSMichael Neumann 					ni_pi->support_cac_long_term_average = false;
340957e252bfSMichael Neumann 			}
341057e252bfSMichael Neumann 		}
341157e252bfSMichael Neumann 	}
341257e252bfSMichael Neumann 
341357e252bfSMichael Neumann 	return ret;
341457e252bfSMichael Neumann }
341557e252bfSMichael Neumann 
ni_pcie_performance_request(struct radeon_device * rdev,u8 perf_req,bool advertise)341657e252bfSMichael Neumann static int ni_pcie_performance_request(struct radeon_device *rdev,
341757e252bfSMichael Neumann 				       u8 perf_req, bool advertise)
341857e252bfSMichael Neumann {
3419c6f73aabSFrançois Tigeot #if defined(CONFIG_ACPI)
342057e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
342157e252bfSMichael Neumann 
342257e252bfSMichael Neumann 	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
342357e252bfSMichael Neumann 	    (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
342457e252bfSMichael Neumann 		if (eg_pi->pcie_performance_request_registered == false)
342557e252bfSMichael Neumann 			radeon_acpi_pcie_notify_device_ready(rdev);
342657e252bfSMichael Neumann 		eg_pi->pcie_performance_request_registered = true;
342757e252bfSMichael Neumann 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
342857e252bfSMichael Neumann 	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
342957e252bfSMichael Neumann 		    eg_pi->pcie_performance_request_registered) {
343057e252bfSMichael Neumann 		eg_pi->pcie_performance_request_registered = false;
343157e252bfSMichael Neumann 		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
343257e252bfSMichael Neumann 	}
343357e252bfSMichael Neumann #endif
343457e252bfSMichael Neumann 	return 0;
343557e252bfSMichael Neumann }
343657e252bfSMichael Neumann 
ni_advertise_gen2_capability(struct radeon_device * rdev)343757e252bfSMichael Neumann static int ni_advertise_gen2_capability(struct radeon_device *rdev)
343857e252bfSMichael Neumann {
343957e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
344057e252bfSMichael Neumann 	u32 tmp;
344157e252bfSMichael Neumann 
344257e252bfSMichael Neumann 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
344357e252bfSMichael Neumann 
344457e252bfSMichael Neumann 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
344557e252bfSMichael Neumann 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
344657e252bfSMichael Neumann 		pi->pcie_gen2 = true;
344757e252bfSMichael Neumann 	else
344857e252bfSMichael Neumann 		pi->pcie_gen2 = false;
344957e252bfSMichael Neumann 
345057e252bfSMichael Neumann 	if (!pi->pcie_gen2)
345157e252bfSMichael Neumann 		ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
345257e252bfSMichael Neumann 
345357e252bfSMichael Neumann 	return 0;
345457e252bfSMichael Neumann }
345557e252bfSMichael Neumann 
ni_enable_bif_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)345657e252bfSMichael Neumann static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
345757e252bfSMichael Neumann 					    bool enable)
345857e252bfSMichael Neumann {
345957e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
346057e252bfSMichael Neumann 	u32 tmp, bif;
346157e252bfSMichael Neumann 
346257e252bfSMichael Neumann 	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
346357e252bfSMichael Neumann 
346457e252bfSMichael Neumann 	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
346557e252bfSMichael Neumann 	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
346657e252bfSMichael Neumann 		if (enable) {
346757e252bfSMichael Neumann 			if (!pi->boot_in_gen2) {
346857e252bfSMichael Neumann 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
346957e252bfSMichael Neumann 				bif |= CG_CLIENT_REQ(0xd);
347057e252bfSMichael Neumann 				WREG32(CG_BIF_REQ_AND_RSP, bif);
347157e252bfSMichael Neumann 			}
347257e252bfSMichael Neumann 			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
347357e252bfSMichael Neumann 			tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
347457e252bfSMichael Neumann 			tmp |= LC_GEN2_EN_STRAP;
347557e252bfSMichael Neumann 
347657e252bfSMichael Neumann 			tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
347757e252bfSMichael Neumann 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3478c4ef309bSzrj 			udelay(10);
347957e252bfSMichael Neumann 			tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
348057e252bfSMichael Neumann 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
348157e252bfSMichael Neumann 		} else {
348257e252bfSMichael Neumann 			if (!pi->boot_in_gen2) {
348357e252bfSMichael Neumann 				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
348457e252bfSMichael Neumann 				bif |= CG_CLIENT_REQ(0xd);
348557e252bfSMichael Neumann 				WREG32(CG_BIF_REQ_AND_RSP, bif);
348657e252bfSMichael Neumann 
348757e252bfSMichael Neumann 				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
348857e252bfSMichael Neumann 				tmp &= ~LC_GEN2_EN_STRAP;
348957e252bfSMichael Neumann 			}
349057e252bfSMichael Neumann 			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
349157e252bfSMichael Neumann 		}
349257e252bfSMichael Neumann 	}
349357e252bfSMichael Neumann }
349457e252bfSMichael Neumann 
ni_enable_dynamic_pcie_gen2(struct radeon_device * rdev,bool enable)349557e252bfSMichael Neumann static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
349657e252bfSMichael Neumann 					bool enable)
349757e252bfSMichael Neumann {
349857e252bfSMichael Neumann 	ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
349957e252bfSMichael Neumann 
350057e252bfSMichael Neumann 	if (enable)
350157e252bfSMichael Neumann 		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
350257e252bfSMichael Neumann 	else
350357e252bfSMichael Neumann 		WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
350457e252bfSMichael Neumann }
350557e252bfSMichael Neumann 
ni_set_uvd_clock_before_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)350657e252bfSMichael Neumann void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
350757e252bfSMichael Neumann 					   struct radeon_ps *new_ps,
350857e252bfSMichael Neumann 					   struct radeon_ps *old_ps)
350957e252bfSMichael Neumann {
351057e252bfSMichael Neumann 	struct ni_ps *new_state = ni_get_ps(new_ps);
351157e252bfSMichael Neumann 	struct ni_ps *current_state = ni_get_ps(old_ps);
351257e252bfSMichael Neumann 
351357e252bfSMichael Neumann 	if ((new_ps->vclk == old_ps->vclk) &&
351457e252bfSMichael Neumann 	    (new_ps->dclk == old_ps->dclk))
351557e252bfSMichael Neumann 		return;
351657e252bfSMichael Neumann 
351757e252bfSMichael Neumann 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
351857e252bfSMichael Neumann 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
351957e252bfSMichael Neumann 		return;
352057e252bfSMichael Neumann 
352157e252bfSMichael Neumann 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
352257e252bfSMichael Neumann }
352357e252bfSMichael Neumann 
ni_set_uvd_clock_after_set_eng_clock(struct radeon_device * rdev,struct radeon_ps * new_ps,struct radeon_ps * old_ps)352457e252bfSMichael Neumann void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
352557e252bfSMichael Neumann 					  struct radeon_ps *new_ps,
352657e252bfSMichael Neumann 					  struct radeon_ps *old_ps)
352757e252bfSMichael Neumann {
352857e252bfSMichael Neumann 	struct ni_ps *new_state = ni_get_ps(new_ps);
352957e252bfSMichael Neumann 	struct ni_ps *current_state = ni_get_ps(old_ps);
353057e252bfSMichael Neumann 
353157e252bfSMichael Neumann 	if ((new_ps->vclk == old_ps->vclk) &&
353257e252bfSMichael Neumann 	    (new_ps->dclk == old_ps->dclk))
353357e252bfSMichael Neumann 		return;
353457e252bfSMichael Neumann 
353557e252bfSMichael Neumann 	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
353657e252bfSMichael Neumann 	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
353757e252bfSMichael Neumann 		return;
353857e252bfSMichael Neumann 
353957e252bfSMichael Neumann 	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
354057e252bfSMichael Neumann }
354157e252bfSMichael Neumann 
ni_dpm_setup_asic(struct radeon_device * rdev)354257e252bfSMichael Neumann void ni_dpm_setup_asic(struct radeon_device *rdev)
354357e252bfSMichael Neumann {
354457e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3545c6f73aabSFrançois Tigeot 	int r;
354657e252bfSMichael Neumann 
3547c6f73aabSFrançois Tigeot 	r = ni_mc_load_microcode(rdev);
3548c6f73aabSFrançois Tigeot 	if (r)
3549c6f73aabSFrançois Tigeot 		DRM_ERROR("Failed to load MC firmware!\n");
355057e252bfSMichael Neumann 	ni_read_clock_registers(rdev);
355157e252bfSMichael Neumann 	btc_read_arb_registers(rdev);
355257e252bfSMichael Neumann 	rv770_get_memory_type(rdev);
355357e252bfSMichael Neumann 	if (eg_pi->pcie_performance_request)
355457e252bfSMichael Neumann 		ni_advertise_gen2_capability(rdev);
355557e252bfSMichael Neumann 	rv770_get_pcie_gen2_status(rdev);
355657e252bfSMichael Neumann 	rv770_enable_acpi_pm(rdev);
355757e252bfSMichael Neumann }
355857e252bfSMichael Neumann 
ni_update_current_ps(struct radeon_device * rdev,struct radeon_ps * rps)355957e252bfSMichael Neumann void ni_update_current_ps(struct radeon_device *rdev,
356057e252bfSMichael Neumann 			  struct radeon_ps *rps)
356157e252bfSMichael Neumann {
356257e252bfSMichael Neumann 	struct ni_ps *new_ps = ni_get_ps(rps);
356357e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
356457e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
356557e252bfSMichael Neumann 
356657e252bfSMichael Neumann 	eg_pi->current_rps = *rps;
356757e252bfSMichael Neumann 	ni_pi->current_ps = *new_ps;
356857e252bfSMichael Neumann 	eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
356957e252bfSMichael Neumann }
357057e252bfSMichael Neumann 
ni_update_requested_ps(struct radeon_device * rdev,struct radeon_ps * rps)357157e252bfSMichael Neumann void ni_update_requested_ps(struct radeon_device *rdev,
357257e252bfSMichael Neumann 			    struct radeon_ps *rps)
357357e252bfSMichael Neumann {
357457e252bfSMichael Neumann 	struct ni_ps *new_ps = ni_get_ps(rps);
357557e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
357657e252bfSMichael Neumann 	struct ni_power_info *ni_pi = ni_get_pi(rdev);
357757e252bfSMichael Neumann 
357857e252bfSMichael Neumann 	eg_pi->requested_rps = *rps;
357957e252bfSMichael Neumann 	ni_pi->requested_ps = *new_ps;
358057e252bfSMichael Neumann 	eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
358157e252bfSMichael Neumann }
358257e252bfSMichael Neumann 
ni_dpm_enable(struct radeon_device * rdev)358357e252bfSMichael Neumann int ni_dpm_enable(struct radeon_device *rdev)
358457e252bfSMichael Neumann {
358557e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
358657e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
358757e252bfSMichael Neumann 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
358857e252bfSMichael Neumann 	int ret;
358957e252bfSMichael Neumann 
359057e252bfSMichael Neumann 	if (pi->gfx_clock_gating)
359157e252bfSMichael Neumann 		ni_cg_clockgating_default(rdev);
359257e252bfSMichael Neumann 	if (btc_dpm_enabled(rdev))
359357e252bfSMichael Neumann 		return -EINVAL;
359457e252bfSMichael Neumann 	if (pi->mg_clock_gating)
359557e252bfSMichael Neumann 		ni_mg_clockgating_default(rdev);
359657e252bfSMichael Neumann 	if (eg_pi->ls_clock_gating)
359757e252bfSMichael Neumann 		ni_ls_clockgating_default(rdev);
359857e252bfSMichael Neumann 	if (pi->voltage_control) {
359957e252bfSMichael Neumann 		rv770_enable_voltage_control(rdev, true);
360057e252bfSMichael Neumann 		ret = cypress_construct_voltage_tables(rdev);
360157e252bfSMichael Neumann 		if (ret) {
360257e252bfSMichael Neumann 			DRM_ERROR("cypress_construct_voltage_tables failed\n");
360357e252bfSMichael Neumann 			return ret;
360457e252bfSMichael Neumann 		}
360557e252bfSMichael Neumann 	}
360657e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
360757e252bfSMichael Neumann 		ret = ni_initialize_mc_reg_table(rdev);
360857e252bfSMichael Neumann 		if (ret)
360957e252bfSMichael Neumann 			eg_pi->dynamic_ac_timing = false;
361057e252bfSMichael Neumann 	}
361157e252bfSMichael Neumann 	if (pi->dynamic_ss)
361257e252bfSMichael Neumann 		cypress_enable_spread_spectrum(rdev, true);
361357e252bfSMichael Neumann 	if (pi->thermal_protection)
361457e252bfSMichael Neumann 		rv770_enable_thermal_protection(rdev, true);
361557e252bfSMichael Neumann 	rv770_setup_bsp(rdev);
361657e252bfSMichael Neumann 	rv770_program_git(rdev);
361757e252bfSMichael Neumann 	rv770_program_tp(rdev);
361857e252bfSMichael Neumann 	rv770_program_tpp(rdev);
361957e252bfSMichael Neumann 	rv770_program_sstp(rdev);
362057e252bfSMichael Neumann 	cypress_enable_display_gap(rdev);
362157e252bfSMichael Neumann 	rv770_program_vc(rdev);
362257e252bfSMichael Neumann 	if (pi->dynamic_pcie_gen2)
362357e252bfSMichael Neumann 		ni_enable_dynamic_pcie_gen2(rdev, true);
362457e252bfSMichael Neumann 	ret = rv770_upload_firmware(rdev);
362557e252bfSMichael Neumann 	if (ret) {
362657e252bfSMichael Neumann 		DRM_ERROR("rv770_upload_firmware failed\n");
362757e252bfSMichael Neumann 		return ret;
362857e252bfSMichael Neumann 	}
362957e252bfSMichael Neumann 	ret = ni_process_firmware_header(rdev);
363057e252bfSMichael Neumann 	if (ret) {
363157e252bfSMichael Neumann 		DRM_ERROR("ni_process_firmware_header failed\n");
363257e252bfSMichael Neumann 		return ret;
363357e252bfSMichael Neumann 	}
363457e252bfSMichael Neumann 	ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
363557e252bfSMichael Neumann 	if (ret) {
363657e252bfSMichael Neumann 		DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
363757e252bfSMichael Neumann 		return ret;
363857e252bfSMichael Neumann 	}
363957e252bfSMichael Neumann 	ret = ni_init_smc_table(rdev);
364057e252bfSMichael Neumann 	if (ret) {
364157e252bfSMichael Neumann 		DRM_ERROR("ni_init_smc_table failed\n");
364257e252bfSMichael Neumann 		return ret;
364357e252bfSMichael Neumann 	}
364457e252bfSMichael Neumann 	ret = ni_init_smc_spll_table(rdev);
364557e252bfSMichael Neumann 	if (ret) {
364657e252bfSMichael Neumann 		DRM_ERROR("ni_init_smc_spll_table failed\n");
364757e252bfSMichael Neumann 		return ret;
364857e252bfSMichael Neumann 	}
364957e252bfSMichael Neumann 	ret = ni_init_arb_table_index(rdev);
365057e252bfSMichael Neumann 	if (ret) {
365157e252bfSMichael Neumann 		DRM_ERROR("ni_init_arb_table_index failed\n");
365257e252bfSMichael Neumann 		return ret;
365357e252bfSMichael Neumann 	}
365457e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
365557e252bfSMichael Neumann 		ret = ni_populate_mc_reg_table(rdev, boot_ps);
365657e252bfSMichael Neumann 		if (ret) {
365757e252bfSMichael Neumann 			DRM_ERROR("ni_populate_mc_reg_table failed\n");
365857e252bfSMichael Neumann 			return ret;
365957e252bfSMichael Neumann 		}
366057e252bfSMichael Neumann 	}
366157e252bfSMichael Neumann 	ret = ni_initialize_smc_cac_tables(rdev);
366257e252bfSMichael Neumann 	if (ret) {
366357e252bfSMichael Neumann 		DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
366457e252bfSMichael Neumann 		return ret;
366557e252bfSMichael Neumann 	}
366657e252bfSMichael Neumann 	ret = ni_initialize_hardware_cac_manager(rdev);
366757e252bfSMichael Neumann 	if (ret) {
366857e252bfSMichael Neumann 		DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
366957e252bfSMichael Neumann 		return ret;
367057e252bfSMichael Neumann 	}
367157e252bfSMichael Neumann 	ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
367257e252bfSMichael Neumann 	if (ret) {
367357e252bfSMichael Neumann 		DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
367457e252bfSMichael Neumann 		return ret;
367557e252bfSMichael Neumann 	}
367657e252bfSMichael Neumann 	ni_program_response_times(rdev);
367757e252bfSMichael Neumann 	r7xx_start_smc(rdev);
367857e252bfSMichael Neumann 	ret = cypress_notify_smc_display_change(rdev, false);
367957e252bfSMichael Neumann 	if (ret) {
368057e252bfSMichael Neumann 		DRM_ERROR("cypress_notify_smc_display_change failed\n");
368157e252bfSMichael Neumann 		return ret;
368257e252bfSMichael Neumann 	}
368357e252bfSMichael Neumann 	cypress_enable_sclk_control(rdev, true);
368457e252bfSMichael Neumann 	if (eg_pi->memory_transition)
368557e252bfSMichael Neumann 		cypress_enable_mclk_control(rdev, true);
368657e252bfSMichael Neumann 	cypress_start_dpm(rdev);
368757e252bfSMichael Neumann 	if (pi->gfx_clock_gating)
368857e252bfSMichael Neumann 		ni_gfx_clockgating_enable(rdev, true);
368957e252bfSMichael Neumann 	if (pi->mg_clock_gating)
369057e252bfSMichael Neumann 		ni_mg_clockgating_enable(rdev, true);
369157e252bfSMichael Neumann 	if (eg_pi->ls_clock_gating)
369257e252bfSMichael Neumann 		ni_ls_clockgating_enable(rdev, true);
369357e252bfSMichael Neumann 
369457e252bfSMichael Neumann 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
369557e252bfSMichael Neumann 
369657e252bfSMichael Neumann 	ni_update_current_ps(rdev, boot_ps);
369757e252bfSMichael Neumann 
369857e252bfSMichael Neumann 	return 0;
369957e252bfSMichael Neumann }
370057e252bfSMichael Neumann 
ni_dpm_disable(struct radeon_device * rdev)370157e252bfSMichael Neumann void ni_dpm_disable(struct radeon_device *rdev)
370257e252bfSMichael Neumann {
370357e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
370457e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
370557e252bfSMichael Neumann 	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
370657e252bfSMichael Neumann 
370757e252bfSMichael Neumann 	if (!btc_dpm_enabled(rdev))
370857e252bfSMichael Neumann 		return;
370957e252bfSMichael Neumann 	rv770_clear_vc(rdev);
371057e252bfSMichael Neumann 	if (pi->thermal_protection)
371157e252bfSMichael Neumann 		rv770_enable_thermal_protection(rdev, false);
371257e252bfSMichael Neumann 	ni_enable_power_containment(rdev, boot_ps, false);
371357e252bfSMichael Neumann 	ni_enable_smc_cac(rdev, boot_ps, false);
371457e252bfSMichael Neumann 	cypress_enable_spread_spectrum(rdev, false);
371557e252bfSMichael Neumann 	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
371657e252bfSMichael Neumann 	if (pi->dynamic_pcie_gen2)
371757e252bfSMichael Neumann 		ni_enable_dynamic_pcie_gen2(rdev, false);
371857e252bfSMichael Neumann 
371957e252bfSMichael Neumann 	if (rdev->irq.installed &&
372057e252bfSMichael Neumann 	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
372157e252bfSMichael Neumann 		rdev->irq.dpm_thermal = false;
372257e252bfSMichael Neumann 		radeon_irq_set(rdev);
372357e252bfSMichael Neumann 	}
372457e252bfSMichael Neumann 
372557e252bfSMichael Neumann 	if (pi->gfx_clock_gating)
372657e252bfSMichael Neumann 		ni_gfx_clockgating_enable(rdev, false);
372757e252bfSMichael Neumann 	if (pi->mg_clock_gating)
372857e252bfSMichael Neumann 		ni_mg_clockgating_enable(rdev, false);
372957e252bfSMichael Neumann 	if (eg_pi->ls_clock_gating)
373057e252bfSMichael Neumann 		ni_ls_clockgating_enable(rdev, false);
373157e252bfSMichael Neumann 	ni_stop_dpm(rdev);
373257e252bfSMichael Neumann 	btc_reset_to_default(rdev);
373357e252bfSMichael Neumann 	ni_stop_smc(rdev);
373457e252bfSMichael Neumann 	ni_force_switch_to_arb_f0(rdev);
373557e252bfSMichael Neumann 
373657e252bfSMichael Neumann 	ni_update_current_ps(rdev, boot_ps);
373757e252bfSMichael Neumann }
373857e252bfSMichael Neumann 
ni_power_control_set_level(struct radeon_device * rdev)373957e252bfSMichael Neumann static int ni_power_control_set_level(struct radeon_device *rdev)
374057e252bfSMichael Neumann {
374157e252bfSMichael Neumann 	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
374257e252bfSMichael Neumann 	int ret;
374357e252bfSMichael Neumann 
374457e252bfSMichael Neumann 	ret = ni_restrict_performance_levels_before_switch(rdev);
374557e252bfSMichael Neumann 	if (ret)
374657e252bfSMichael Neumann 		return ret;
374757e252bfSMichael Neumann 	ret = rv770_halt_smc(rdev);
374857e252bfSMichael Neumann 	if (ret)
374957e252bfSMichael Neumann 		return ret;
375057e252bfSMichael Neumann 	ret = ni_populate_smc_tdp_limits(rdev, new_ps);
375157e252bfSMichael Neumann 	if (ret)
375257e252bfSMichael Neumann 		return ret;
375357e252bfSMichael Neumann 	ret = rv770_resume_smc(rdev);
375457e252bfSMichael Neumann 	if (ret)
375557e252bfSMichael Neumann 		return ret;
375657e252bfSMichael Neumann 	ret = rv770_set_sw_state(rdev);
375757e252bfSMichael Neumann 	if (ret)
375857e252bfSMichael Neumann 		return ret;
375957e252bfSMichael Neumann 
376057e252bfSMichael Neumann 	return 0;
376157e252bfSMichael Neumann }
376257e252bfSMichael Neumann 
ni_dpm_pre_set_power_state(struct radeon_device * rdev)376357e252bfSMichael Neumann int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
376457e252bfSMichael Neumann {
376557e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
376657e252bfSMichael Neumann 	struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
376757e252bfSMichael Neumann 	struct radeon_ps *new_ps = &requested_ps;
376857e252bfSMichael Neumann 
376957e252bfSMichael Neumann 	ni_update_requested_ps(rdev, new_ps);
377057e252bfSMichael Neumann 
377157e252bfSMichael Neumann 	ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
377257e252bfSMichael Neumann 
377357e252bfSMichael Neumann 	return 0;
377457e252bfSMichael Neumann }
377557e252bfSMichael Neumann 
ni_dpm_set_power_state(struct radeon_device * rdev)377657e252bfSMichael Neumann int ni_dpm_set_power_state(struct radeon_device *rdev)
377757e252bfSMichael Neumann {
377857e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
377957e252bfSMichael Neumann 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
378057e252bfSMichael Neumann 	struct radeon_ps *old_ps = &eg_pi->current_rps;
378157e252bfSMichael Neumann 	int ret;
378257e252bfSMichael Neumann 
378357e252bfSMichael Neumann 	ret = ni_restrict_performance_levels_before_switch(rdev);
378457e252bfSMichael Neumann 	if (ret) {
378557e252bfSMichael Neumann 		DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
378657e252bfSMichael Neumann 		return ret;
378757e252bfSMichael Neumann 	}
378857e252bfSMichael Neumann 	ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
378957e252bfSMichael Neumann 	ret = ni_enable_power_containment(rdev, new_ps, false);
379057e252bfSMichael Neumann 	if (ret) {
379157e252bfSMichael Neumann 		DRM_ERROR("ni_enable_power_containment failed\n");
379257e252bfSMichael Neumann 		return ret;
379357e252bfSMichael Neumann 	}
379457e252bfSMichael Neumann 	ret = ni_enable_smc_cac(rdev, new_ps, false);
379557e252bfSMichael Neumann 	if (ret) {
379657e252bfSMichael Neumann 		DRM_ERROR("ni_enable_smc_cac failed\n");
379757e252bfSMichael Neumann 		return ret;
379857e252bfSMichael Neumann 	}
379957e252bfSMichael Neumann 	ret = rv770_halt_smc(rdev);
380057e252bfSMichael Neumann 	if (ret) {
380157e252bfSMichael Neumann 		DRM_ERROR("rv770_halt_smc failed\n");
380257e252bfSMichael Neumann 		return ret;
380357e252bfSMichael Neumann 	}
380457e252bfSMichael Neumann 	if (eg_pi->smu_uvd_hs)
380557e252bfSMichael Neumann 		btc_notify_uvd_to_smc(rdev, new_ps);
380657e252bfSMichael Neumann 	ret = ni_upload_sw_state(rdev, new_ps);
380757e252bfSMichael Neumann 	if (ret) {
380857e252bfSMichael Neumann 		DRM_ERROR("ni_upload_sw_state failed\n");
380957e252bfSMichael Neumann 		return ret;
381057e252bfSMichael Neumann 	}
381157e252bfSMichael Neumann 	if (eg_pi->dynamic_ac_timing) {
381257e252bfSMichael Neumann 		ret = ni_upload_mc_reg_table(rdev, new_ps);
381357e252bfSMichael Neumann 		if (ret) {
381457e252bfSMichael Neumann 			DRM_ERROR("ni_upload_mc_reg_table failed\n");
381557e252bfSMichael Neumann 			return ret;
381657e252bfSMichael Neumann 		}
381757e252bfSMichael Neumann 	}
381857e252bfSMichael Neumann 	ret = ni_program_memory_timing_parameters(rdev, new_ps);
381957e252bfSMichael Neumann 	if (ret) {
382057e252bfSMichael Neumann 		DRM_ERROR("ni_program_memory_timing_parameters failed\n");
382157e252bfSMichael Neumann 		return ret;
382257e252bfSMichael Neumann 	}
382357e252bfSMichael Neumann 	ret = rv770_resume_smc(rdev);
382457e252bfSMichael Neumann 	if (ret) {
382557e252bfSMichael Neumann 		DRM_ERROR("rv770_resume_smc failed\n");
382657e252bfSMichael Neumann 		return ret;
382757e252bfSMichael Neumann 	}
382857e252bfSMichael Neumann 	ret = rv770_set_sw_state(rdev);
382957e252bfSMichael Neumann 	if (ret) {
383057e252bfSMichael Neumann 		DRM_ERROR("rv770_set_sw_state failed\n");
383157e252bfSMichael Neumann 		return ret;
383257e252bfSMichael Neumann 	}
383357e252bfSMichael Neumann 	ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
383457e252bfSMichael Neumann 	ret = ni_enable_smc_cac(rdev, new_ps, true);
383557e252bfSMichael Neumann 	if (ret) {
383657e252bfSMichael Neumann 		DRM_ERROR("ni_enable_smc_cac failed\n");
383757e252bfSMichael Neumann 		return ret;
383857e252bfSMichael Neumann 	}
383957e252bfSMichael Neumann 	ret = ni_enable_power_containment(rdev, new_ps, true);
384057e252bfSMichael Neumann 	if (ret) {
384157e252bfSMichael Neumann 		DRM_ERROR("ni_enable_power_containment failed\n");
384257e252bfSMichael Neumann 		return ret;
384357e252bfSMichael Neumann 	}
384457e252bfSMichael Neumann 
384557e252bfSMichael Neumann 	/* update tdp */
384657e252bfSMichael Neumann 	ret = ni_power_control_set_level(rdev);
384757e252bfSMichael Neumann 	if (ret) {
384857e252bfSMichael Neumann 		DRM_ERROR("ni_power_control_set_level failed\n");
384957e252bfSMichael Neumann 		return ret;
385057e252bfSMichael Neumann 	}
385157e252bfSMichael Neumann 
385257e252bfSMichael Neumann 	return 0;
385357e252bfSMichael Neumann }
385457e252bfSMichael Neumann 
ni_dpm_post_set_power_state(struct radeon_device * rdev)385557e252bfSMichael Neumann void ni_dpm_post_set_power_state(struct radeon_device *rdev)
385657e252bfSMichael Neumann {
385757e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
385857e252bfSMichael Neumann 	struct radeon_ps *new_ps = &eg_pi->requested_rps;
385957e252bfSMichael Neumann 
386057e252bfSMichael Neumann 	ni_update_current_ps(rdev, new_ps);
386157e252bfSMichael Neumann }
386257e252bfSMichael Neumann 
3863b6771645Szrj #if 0
386457e252bfSMichael Neumann void ni_dpm_reset_asic(struct radeon_device *rdev)
386557e252bfSMichael Neumann {
386657e252bfSMichael Neumann 	ni_restrict_performance_levels_before_switch(rdev);
386757e252bfSMichael Neumann 	rv770_set_boot_state(rdev);
386857e252bfSMichael Neumann }
3869b6771645Szrj #endif
387057e252bfSMichael Neumann 
387157e252bfSMichael Neumann union power_info {
387257e252bfSMichael Neumann 	struct _ATOM_POWERPLAY_INFO info;
387357e252bfSMichael Neumann 	struct _ATOM_POWERPLAY_INFO_V2 info_2;
387457e252bfSMichael Neumann 	struct _ATOM_POWERPLAY_INFO_V3 info_3;
387557e252bfSMichael Neumann 	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
387657e252bfSMichael Neumann 	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
387757e252bfSMichael Neumann 	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
387857e252bfSMichael Neumann };
387957e252bfSMichael Neumann 
388057e252bfSMichael Neumann union pplib_clock_info {
388157e252bfSMichael Neumann 	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
388257e252bfSMichael Neumann 	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
388357e252bfSMichael Neumann 	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
388457e252bfSMichael Neumann 	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
388557e252bfSMichael Neumann };
388657e252bfSMichael Neumann 
388757e252bfSMichael Neumann union pplib_power_state {
388857e252bfSMichael Neumann 	struct _ATOM_PPLIB_STATE v1;
388957e252bfSMichael Neumann 	struct _ATOM_PPLIB_STATE_V2 v2;
389057e252bfSMichael Neumann };
389157e252bfSMichael Neumann 
ni_parse_pplib_non_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,struct _ATOM_PPLIB_NONCLOCK_INFO * non_clock_info,u8 table_rev)389257e252bfSMichael Neumann static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
389357e252bfSMichael Neumann 					  struct radeon_ps *rps,
389457e252bfSMichael Neumann 					  struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
389557e252bfSMichael Neumann 					  u8 table_rev)
389657e252bfSMichael Neumann {
389757e252bfSMichael Neumann 	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
389857e252bfSMichael Neumann 	rps->class = le16_to_cpu(non_clock_info->usClassification);
389957e252bfSMichael Neumann 	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
390057e252bfSMichael Neumann 
390157e252bfSMichael Neumann 	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
390257e252bfSMichael Neumann 		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
390357e252bfSMichael Neumann 		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
390457e252bfSMichael Neumann 	} else if (r600_is_uvd_state(rps->class, rps->class2)) {
390557e252bfSMichael Neumann 		rps->vclk = RV770_DEFAULT_VCLK_FREQ;
390657e252bfSMichael Neumann 		rps->dclk = RV770_DEFAULT_DCLK_FREQ;
390757e252bfSMichael Neumann 	} else {
390857e252bfSMichael Neumann 		rps->vclk = 0;
390957e252bfSMichael Neumann 		rps->dclk = 0;
391057e252bfSMichael Neumann 	}
391157e252bfSMichael Neumann 
391257e252bfSMichael Neumann 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
391357e252bfSMichael Neumann 		rdev->pm.dpm.boot_ps = rps;
391457e252bfSMichael Neumann 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
391557e252bfSMichael Neumann 		rdev->pm.dpm.uvd_ps = rps;
391657e252bfSMichael Neumann }
391757e252bfSMichael Neumann 
ni_parse_pplib_clock_info(struct radeon_device * rdev,struct radeon_ps * rps,int index,union pplib_clock_info * clock_info)391857e252bfSMichael Neumann static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
391957e252bfSMichael Neumann 				      struct radeon_ps *rps, int index,
392057e252bfSMichael Neumann 				      union pplib_clock_info *clock_info)
392157e252bfSMichael Neumann {
392257e252bfSMichael Neumann 	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
392357e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
392457e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(rps);
392557e252bfSMichael Neumann 	struct rv7xx_pl *pl = &ps->performance_levels[index];
392657e252bfSMichael Neumann 
392757e252bfSMichael Neumann 	ps->performance_level_count = index + 1;
392857e252bfSMichael Neumann 
392957e252bfSMichael Neumann 	pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
393057e252bfSMichael Neumann 	pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
393157e252bfSMichael Neumann 	pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
393257e252bfSMichael Neumann 	pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
393357e252bfSMichael Neumann 
393457e252bfSMichael Neumann 	pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
393557e252bfSMichael Neumann 	pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
393657e252bfSMichael Neumann 	pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
393757e252bfSMichael Neumann 
393857e252bfSMichael Neumann 	/* patch up vddc if necessary */
393957e252bfSMichael Neumann 	if (pl->vddc == 0xff01) {
3940c6f73aabSFrançois Tigeot 		if (pi->max_vddc)
3941c6f73aabSFrançois Tigeot 			pl->vddc = pi->max_vddc;
394257e252bfSMichael Neumann 	}
394357e252bfSMichael Neumann 
394457e252bfSMichael Neumann 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
394557e252bfSMichael Neumann 		pi->acpi_vddc = pl->vddc;
394657e252bfSMichael Neumann 		eg_pi->acpi_vddci = pl->vddci;
394757e252bfSMichael Neumann 		if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
394857e252bfSMichael Neumann 			pi->acpi_pcie_gen2 = true;
394957e252bfSMichael Neumann 		else
395057e252bfSMichael Neumann 			pi->acpi_pcie_gen2 = false;
395157e252bfSMichael Neumann 	}
395257e252bfSMichael Neumann 
395357e252bfSMichael Neumann 	if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
395457e252bfSMichael Neumann 		eg_pi->ulv.supported = true;
395557e252bfSMichael Neumann 		eg_pi->ulv.pl = pl;
395657e252bfSMichael Neumann 	}
395757e252bfSMichael Neumann 
395857e252bfSMichael Neumann 	if (pi->min_vddc_in_table > pl->vddc)
395957e252bfSMichael Neumann 		pi->min_vddc_in_table = pl->vddc;
396057e252bfSMichael Neumann 
396157e252bfSMichael Neumann 	if (pi->max_vddc_in_table < pl->vddc)
396257e252bfSMichael Neumann 		pi->max_vddc_in_table = pl->vddc;
396357e252bfSMichael Neumann 
396457e252bfSMichael Neumann 	/* patch up boot state */
396557e252bfSMichael Neumann 	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
396657e252bfSMichael Neumann 		u16 vddc, vddci, mvdd;
396757e252bfSMichael Neumann 		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
396857e252bfSMichael Neumann 		pl->mclk = rdev->clock.default_mclk;
396957e252bfSMichael Neumann 		pl->sclk = rdev->clock.default_sclk;
397057e252bfSMichael Neumann 		pl->vddc = vddc;
397157e252bfSMichael Neumann 		pl->vddci = vddci;
397257e252bfSMichael Neumann 	}
397357e252bfSMichael Neumann 
397457e252bfSMichael Neumann 	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
397557e252bfSMichael Neumann 	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
397657e252bfSMichael Neumann 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
397757e252bfSMichael Neumann 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
397857e252bfSMichael Neumann 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
397957e252bfSMichael Neumann 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
398057e252bfSMichael Neumann 	}
398157e252bfSMichael Neumann }
398257e252bfSMichael Neumann 
ni_parse_power_table(struct radeon_device * rdev)398357e252bfSMichael Neumann static int ni_parse_power_table(struct radeon_device *rdev)
398457e252bfSMichael Neumann {
398557e252bfSMichael Neumann 	struct radeon_mode_info *mode_info = &rdev->mode_info;
398657e252bfSMichael Neumann 	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
398757e252bfSMichael Neumann 	union pplib_power_state *power_state;
398857e252bfSMichael Neumann 	int i, j;
398957e252bfSMichael Neumann 	union pplib_clock_info *clock_info;
399057e252bfSMichael Neumann 	union power_info *power_info;
399157e252bfSMichael Neumann 	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
399257e252bfSMichael Neumann 	u16 data_offset;
399357e252bfSMichael Neumann 	u8 frev, crev;
399457e252bfSMichael Neumann 	struct ni_ps *ps;
399557e252bfSMichael Neumann 
399657e252bfSMichael Neumann 	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
399757e252bfSMichael Neumann 				   &frev, &crev, &data_offset))
399857e252bfSMichael Neumann 		return -EINVAL;
399957e252bfSMichael Neumann 	power_info = (union power_info *)((uint8_t*)mode_info->atom_context->bios + data_offset);
400057e252bfSMichael Neumann 
400157e252bfSMichael Neumann 	rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
400257e252bfSMichael Neumann 				  power_info->pplib.ucNumStates, GFP_KERNEL);
400357e252bfSMichael Neumann 	if (!rdev->pm.dpm.ps)
400457e252bfSMichael Neumann 		return -ENOMEM;
400557e252bfSMichael Neumann 
400657e252bfSMichael Neumann 	for (i = 0; i < power_info->pplib.ucNumStates; i++) {
400757e252bfSMichael Neumann 		power_state = (union pplib_power_state *)
400857e252bfSMichael Neumann 			((uint8_t*)mode_info->atom_context->bios + data_offset +
400957e252bfSMichael Neumann 			 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
401057e252bfSMichael Neumann 			 i * power_info->pplib.ucStateEntrySize);
401157e252bfSMichael Neumann 		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
401257e252bfSMichael Neumann 			((uint8_t*)mode_info->atom_context->bios + data_offset +
401357e252bfSMichael Neumann 			 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
401457e252bfSMichael Neumann 			 (power_state->v1.ucNonClockStateIndex *
401557e252bfSMichael Neumann 			  power_info->pplib.ucNonClockSize));
401657e252bfSMichael Neumann 		if (power_info->pplib.ucStateEntrySize - 1) {
40174cd92098Szrj 			u8 *idx;
401857e252bfSMichael Neumann 			ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
401957e252bfSMichael Neumann 			if (ps == NULL) {
402057e252bfSMichael Neumann 				kfree(rdev->pm.dpm.ps);
402157e252bfSMichael Neumann 				return -ENOMEM;
402257e252bfSMichael Neumann 			}
402357e252bfSMichael Neumann 			rdev->pm.dpm.ps[i].ps_priv = ps;
402457e252bfSMichael Neumann 			ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
402557e252bfSMichael Neumann 							 non_clock_info,
402657e252bfSMichael Neumann 							 power_info->pplib.ucNonClockSize);
40274cd92098Szrj 			idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
402857e252bfSMichael Neumann 			for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
402957e252bfSMichael Neumann 				clock_info = (union pplib_clock_info *)
403057e252bfSMichael Neumann 					((uint8_t*)mode_info->atom_context->bios + data_offset +
403157e252bfSMichael Neumann 					 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
40324cd92098Szrj 					 (idx[j] * power_info->pplib.ucClockInfoSize));
403357e252bfSMichael Neumann 				ni_parse_pplib_clock_info(rdev,
403457e252bfSMichael Neumann 							  &rdev->pm.dpm.ps[i], j,
403557e252bfSMichael Neumann 							  clock_info);
403657e252bfSMichael Neumann 			}
403757e252bfSMichael Neumann 		}
403857e252bfSMichael Neumann 	}
403957e252bfSMichael Neumann 	rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
404057e252bfSMichael Neumann 	return 0;
404157e252bfSMichael Neumann }
404257e252bfSMichael Neumann 
ni_dpm_init(struct radeon_device * rdev)404357e252bfSMichael Neumann int ni_dpm_init(struct radeon_device *rdev)
404457e252bfSMichael Neumann {
404557e252bfSMichael Neumann 	struct rv7xx_power_info *pi;
404657e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi;
404757e252bfSMichael Neumann 	struct ni_power_info *ni_pi;
404857e252bfSMichael Neumann 	struct atom_clock_dividers dividers;
404957e252bfSMichael Neumann 	int ret;
405057e252bfSMichael Neumann 
405157e252bfSMichael Neumann 	ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
405257e252bfSMichael Neumann 	if (ni_pi == NULL)
405357e252bfSMichael Neumann 		return -ENOMEM;
405457e252bfSMichael Neumann 	rdev->pm.dpm.priv = ni_pi;
405557e252bfSMichael Neumann 	eg_pi = &ni_pi->eg;
405657e252bfSMichael Neumann 	pi = &eg_pi->rv7xx;
405757e252bfSMichael Neumann 
405857e252bfSMichael Neumann 	rv770_get_max_vddc(rdev);
405957e252bfSMichael Neumann 
406057e252bfSMichael Neumann 	eg_pi->ulv.supported = false;
406157e252bfSMichael Neumann 	pi->acpi_vddc = 0;
406257e252bfSMichael Neumann 	eg_pi->acpi_vddci = 0;
406357e252bfSMichael Neumann 	pi->min_vddc_in_table = 0;
406457e252bfSMichael Neumann 	pi->max_vddc_in_table = 0;
406557e252bfSMichael Neumann 
4066c6f73aabSFrançois Tigeot 	ret = r600_get_platform_caps(rdev);
4067c6f73aabSFrançois Tigeot 	if (ret)
4068c6f73aabSFrançois Tigeot 		return ret;
4069c6f73aabSFrançois Tigeot 
407057e252bfSMichael Neumann 	ret = ni_parse_power_table(rdev);
407157e252bfSMichael Neumann 	if (ret)
407257e252bfSMichael Neumann 		return ret;
407357e252bfSMichael Neumann 	ret = r600_parse_extended_power_table(rdev);
407457e252bfSMichael Neumann 	if (ret)
407557e252bfSMichael Neumann 		return ret;
407657e252bfSMichael Neumann 
407757e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
407857e252bfSMichael Neumann 		kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
407957e252bfSMichael Neumann 	if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
408057e252bfSMichael Neumann 		r600_free_extended_power_table(rdev);
408157e252bfSMichael Neumann 		return -ENOMEM;
408257e252bfSMichael Neumann 	}
408357e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
408457e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
408557e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
408657e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
408757e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
408857e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
408957e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
409057e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
409157e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
409257e252bfSMichael Neumann 
409357e252bfSMichael Neumann 	ni_patch_dependency_tables_based_on_leakage(rdev);
409457e252bfSMichael Neumann 
409557e252bfSMichael Neumann 	if (rdev->pm.dpm.voltage_response_time == 0)
409657e252bfSMichael Neumann 		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
409757e252bfSMichael Neumann 	if (rdev->pm.dpm.backbias_response_time == 0)
409857e252bfSMichael Neumann 		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
409957e252bfSMichael Neumann 
410057e252bfSMichael Neumann 	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
410157e252bfSMichael Neumann 					     0, false, &dividers);
410257e252bfSMichael Neumann 	if (ret)
410357e252bfSMichael Neumann 		pi->ref_div = dividers.ref_div + 1;
410457e252bfSMichael Neumann 	else
410557e252bfSMichael Neumann 		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
410657e252bfSMichael Neumann 
410757e252bfSMichael Neumann 	pi->rlp = RV770_RLP_DFLT;
410857e252bfSMichael Neumann 	pi->rmp = RV770_RMP_DFLT;
410957e252bfSMichael Neumann 	pi->lhp = RV770_LHP_DFLT;
411057e252bfSMichael Neumann 	pi->lmp = RV770_LMP_DFLT;
411157e252bfSMichael Neumann 
411257e252bfSMichael Neumann 	eg_pi->ats[0].rlp = RV770_RLP_DFLT;
411357e252bfSMichael Neumann 	eg_pi->ats[0].rmp = RV770_RMP_DFLT;
411457e252bfSMichael Neumann 	eg_pi->ats[0].lhp = RV770_LHP_DFLT;
411557e252bfSMichael Neumann 	eg_pi->ats[0].lmp = RV770_LMP_DFLT;
411657e252bfSMichael Neumann 
411757e252bfSMichael Neumann 	eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
411857e252bfSMichael Neumann 	eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
411957e252bfSMichael Neumann 	eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
412057e252bfSMichael Neumann 	eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
412157e252bfSMichael Neumann 
412257e252bfSMichael Neumann 	eg_pi->smu_uvd_hs = true;
412357e252bfSMichael Neumann 
4124c6f73aabSFrançois Tigeot 	if (rdev->pdev->device == 0x6707) {
412557e252bfSMichael Neumann 		pi->mclk_strobe_mode_threshold = 55000;
412657e252bfSMichael Neumann 		pi->mclk_edc_enable_threshold = 55000;
412757e252bfSMichael Neumann 		eg_pi->mclk_edc_wr_enable_threshold = 55000;
412857e252bfSMichael Neumann 	} else {
412957e252bfSMichael Neumann 		pi->mclk_strobe_mode_threshold = 40000;
413057e252bfSMichael Neumann 		pi->mclk_edc_enable_threshold = 40000;
413157e252bfSMichael Neumann 		eg_pi->mclk_edc_wr_enable_threshold = 40000;
413257e252bfSMichael Neumann 	}
413357e252bfSMichael Neumann 	ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
413457e252bfSMichael Neumann 
413557e252bfSMichael Neumann 	pi->voltage_control =
413657e252bfSMichael Neumann 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
413757e252bfSMichael Neumann 
413857e252bfSMichael Neumann 	pi->mvdd_control =
413957e252bfSMichael Neumann 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
414057e252bfSMichael Neumann 
414157e252bfSMichael Neumann 	eg_pi->vddci_control =
414257e252bfSMichael Neumann 		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
414357e252bfSMichael Neumann 
414457e252bfSMichael Neumann 	rv770_get_engine_memory_ss(rdev);
414557e252bfSMichael Neumann 
414657e252bfSMichael Neumann 	pi->asi = RV770_ASI_DFLT;
414757e252bfSMichael Neumann 	pi->pasi = CYPRESS_HASI_DFLT;
414857e252bfSMichael Neumann 	pi->vrc = CYPRESS_VRC_DFLT;
414957e252bfSMichael Neumann 
415057e252bfSMichael Neumann 	pi->power_gating = false;
415157e252bfSMichael Neumann 
415257e252bfSMichael Neumann 	pi->gfx_clock_gating = true;
415357e252bfSMichael Neumann 
415457e252bfSMichael Neumann 	pi->mg_clock_gating = true;
415557e252bfSMichael Neumann 	pi->mgcgtssm = true;
415657e252bfSMichael Neumann 	eg_pi->ls_clock_gating = false;
415757e252bfSMichael Neumann 	eg_pi->sclk_deep_sleep = false;
415857e252bfSMichael Neumann 
415957e252bfSMichael Neumann 	pi->dynamic_pcie_gen2 = true;
416057e252bfSMichael Neumann 
416157e252bfSMichael Neumann 	if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
416257e252bfSMichael Neumann 		pi->thermal_protection = true;
416357e252bfSMichael Neumann 	else
416457e252bfSMichael Neumann 		pi->thermal_protection = false;
416557e252bfSMichael Neumann 
416657e252bfSMichael Neumann 	pi->display_gap = true;
416757e252bfSMichael Neumann 
416857e252bfSMichael Neumann 	pi->dcodt = true;
416957e252bfSMichael Neumann 
417057e252bfSMichael Neumann 	pi->ulps = true;
417157e252bfSMichael Neumann 
417257e252bfSMichael Neumann 	eg_pi->dynamic_ac_timing = true;
417357e252bfSMichael Neumann 	eg_pi->abm = true;
417457e252bfSMichael Neumann 	eg_pi->mcls = true;
417557e252bfSMichael Neumann 	eg_pi->light_sleep = true;
417657e252bfSMichael Neumann 	eg_pi->memory_transition = true;
417757e252bfSMichael Neumann #if defined(CONFIG_ACPI)
417857e252bfSMichael Neumann 	eg_pi->pcie_performance_request =
417957e252bfSMichael Neumann 		radeon_acpi_is_pcie_performance_request_supported(rdev);
418057e252bfSMichael Neumann #else
418157e252bfSMichael Neumann 	eg_pi->pcie_performance_request = false;
418257e252bfSMichael Neumann #endif
418357e252bfSMichael Neumann 
418457e252bfSMichael Neumann 	eg_pi->dll_default_on = false;
418557e252bfSMichael Neumann 
418657e252bfSMichael Neumann 	eg_pi->sclk_deep_sleep = false;
418757e252bfSMichael Neumann 
418857e252bfSMichael Neumann 	pi->mclk_stutter_mode_threshold = 0;
418957e252bfSMichael Neumann 
419057e252bfSMichael Neumann 	pi->sram_end = SMC_RAM_END;
419157e252bfSMichael Neumann 
419257e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
419357e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
419457e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
419557e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
419657e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
419757e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
419857e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
419957e252bfSMichael Neumann 	rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
420057e252bfSMichael Neumann 
420157e252bfSMichael Neumann 	ni_pi->cac_data.leakage_coefficients.at = 516;
420257e252bfSMichael Neumann 	ni_pi->cac_data.leakage_coefficients.bt = 18;
420357e252bfSMichael Neumann 	ni_pi->cac_data.leakage_coefficients.av = 51;
420457e252bfSMichael Neumann 	ni_pi->cac_data.leakage_coefficients.bv = 2957;
420557e252bfSMichael Neumann 
4206c6f73aabSFrançois Tigeot 	switch (rdev->pdev->device) {
420757e252bfSMichael Neumann 	case 0x6700:
420857e252bfSMichael Neumann 	case 0x6701:
420957e252bfSMichael Neumann 	case 0x6702:
421057e252bfSMichael Neumann 	case 0x6703:
421157e252bfSMichael Neumann 	case 0x6718:
421257e252bfSMichael Neumann 		ni_pi->cac_weights = &cac_weights_cayman_xt;
421357e252bfSMichael Neumann 		break;
421457e252bfSMichael Neumann 	case 0x6705:
421557e252bfSMichael Neumann 	case 0x6719:
421657e252bfSMichael Neumann 	case 0x671D:
421757e252bfSMichael Neumann 	case 0x671C:
421857e252bfSMichael Neumann 	default:
421957e252bfSMichael Neumann 		ni_pi->cac_weights = &cac_weights_cayman_pro;
422057e252bfSMichael Neumann 		break;
422157e252bfSMichael Neumann 	case 0x6704:
422257e252bfSMichael Neumann 	case 0x6706:
422357e252bfSMichael Neumann 	case 0x6707:
422457e252bfSMichael Neumann 	case 0x6708:
422557e252bfSMichael Neumann 	case 0x6709:
422657e252bfSMichael Neumann 		ni_pi->cac_weights = &cac_weights_cayman_le;
422757e252bfSMichael Neumann 		break;
422857e252bfSMichael Neumann 	}
422957e252bfSMichael Neumann 
423057e252bfSMichael Neumann 	if (ni_pi->cac_weights->enable_power_containment_by_default) {
423157e252bfSMichael Neumann 		ni_pi->enable_power_containment = true;
423257e252bfSMichael Neumann 		ni_pi->enable_cac = true;
423357e252bfSMichael Neumann 		ni_pi->enable_sq_ramping = true;
423457e252bfSMichael Neumann 	} else {
423557e252bfSMichael Neumann 		ni_pi->enable_power_containment = false;
423657e252bfSMichael Neumann 		ni_pi->enable_cac = false;
423757e252bfSMichael Neumann 		ni_pi->enable_sq_ramping = false;
423857e252bfSMichael Neumann 	}
423957e252bfSMichael Neumann 
424057e252bfSMichael Neumann 	ni_pi->driver_calculate_cac_leakage = false;
424157e252bfSMichael Neumann 	ni_pi->cac_configuration_required = true;
424257e252bfSMichael Neumann 
424357e252bfSMichael Neumann 	if (ni_pi->cac_configuration_required) {
424457e252bfSMichael Neumann 		ni_pi->support_cac_long_term_average = true;
424557e252bfSMichael Neumann 		ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
424657e252bfSMichael Neumann 		ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
424757e252bfSMichael Neumann 	} else {
424857e252bfSMichael Neumann 		ni_pi->support_cac_long_term_average = false;
424957e252bfSMichael Neumann 		ni_pi->lta_window_size = 0;
425057e252bfSMichael Neumann 		ni_pi->lts_truncate = 0;
425157e252bfSMichael Neumann 	}
425257e252bfSMichael Neumann 
425357e252bfSMichael Neumann 	ni_pi->use_power_boost_limit = true;
425457e252bfSMichael Neumann 
42554cd92098Szrj 	/* make sure dc limits are valid */
42564cd92098Szrj 	if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
42574cd92098Szrj 	    (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
42584cd92098Szrj 		rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
42594cd92098Szrj 			rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
42604cd92098Szrj 
426157e252bfSMichael Neumann 	return 0;
426257e252bfSMichael Neumann }
426357e252bfSMichael Neumann 
ni_dpm_fini(struct radeon_device * rdev)426457e252bfSMichael Neumann void ni_dpm_fini(struct radeon_device *rdev)
426557e252bfSMichael Neumann {
426657e252bfSMichael Neumann 	int i;
426757e252bfSMichael Neumann 
426857e252bfSMichael Neumann 	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
426957e252bfSMichael Neumann 		kfree(rdev->pm.dpm.ps[i].ps_priv);
427057e252bfSMichael Neumann 	}
427157e252bfSMichael Neumann 	kfree(rdev->pm.dpm.ps);
427257e252bfSMichael Neumann 	kfree(rdev->pm.dpm.priv);
427357e252bfSMichael Neumann 	kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
427457e252bfSMichael Neumann 	r600_free_extended_power_table(rdev);
427557e252bfSMichael Neumann }
427657e252bfSMichael Neumann 
ni_dpm_print_power_state(struct radeon_device * rdev,struct radeon_ps * rps)427757e252bfSMichael Neumann void ni_dpm_print_power_state(struct radeon_device *rdev,
427857e252bfSMichael Neumann 			      struct radeon_ps *rps)
427957e252bfSMichael Neumann {
428057e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(rps);
428157e252bfSMichael Neumann 	struct rv7xx_pl *pl;
428257e252bfSMichael Neumann 	int i;
428357e252bfSMichael Neumann 
428457e252bfSMichael Neumann 	r600_dpm_print_class_info(rps->class, rps->class2);
428557e252bfSMichael Neumann 	r600_dpm_print_cap_info(rps->caps);
428657e252bfSMichael Neumann 	printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
428757e252bfSMichael Neumann 	for (i = 0; i < ps->performance_level_count; i++) {
428857e252bfSMichael Neumann 		pl = &ps->performance_levels[i];
428957e252bfSMichael Neumann 		if (rdev->family >= CHIP_TAHITI)
429057e252bfSMichael Neumann 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
429157e252bfSMichael Neumann 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
429257e252bfSMichael Neumann 		else
429357e252bfSMichael Neumann 			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
429457e252bfSMichael Neumann 			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
429557e252bfSMichael Neumann 	}
429657e252bfSMichael Neumann 	r600_dpm_print_ps_status(rdev, rps);
429757e252bfSMichael Neumann }
429857e252bfSMichael Neumann 
ni_dpm_debugfs_print_current_performance_level(struct radeon_device * rdev,struct seq_file * m)429957e252bfSMichael Neumann void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
430057e252bfSMichael Neumann 						    struct seq_file *m)
430157e252bfSMichael Neumann {
4302c6f73aabSFrançois Tigeot 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4303c6f73aabSFrançois Tigeot 	struct radeon_ps *rps = &eg_pi->current_rps;
430457e252bfSMichael Neumann 	struct ni_ps *ps = ni_get_ps(rps);
430557e252bfSMichael Neumann 	struct rv7xx_pl *pl;
430657e252bfSMichael Neumann 	u32 current_index =
430757e252bfSMichael Neumann 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
430857e252bfSMichael Neumann 		CURRENT_STATE_INDEX_SHIFT;
430957e252bfSMichael Neumann 
431057e252bfSMichael Neumann 	if (current_index >= ps->performance_level_count) {
431157e252bfSMichael Neumann 		seq_printf(m, "invalid dpm profile %d\n", current_index);
431257e252bfSMichael Neumann 	} else {
431357e252bfSMichael Neumann 		pl = &ps->performance_levels[current_index];
431457e252bfSMichael Neumann 		seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
431557e252bfSMichael Neumann 		seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
431657e252bfSMichael Neumann 			   current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
431757e252bfSMichael Neumann 	}
431857e252bfSMichael Neumann }
431957e252bfSMichael Neumann 
ni_dpm_get_current_sclk(struct radeon_device * rdev)4320c59a5c48SFrançois Tigeot u32 ni_dpm_get_current_sclk(struct radeon_device *rdev)
4321c59a5c48SFrançois Tigeot {
4322c59a5c48SFrançois Tigeot 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4323c59a5c48SFrançois Tigeot 	struct radeon_ps *rps = &eg_pi->current_rps;
4324c59a5c48SFrançois Tigeot 	struct ni_ps *ps = ni_get_ps(rps);
4325c59a5c48SFrançois Tigeot 	struct rv7xx_pl *pl;
4326c59a5c48SFrançois Tigeot 	u32 current_index =
4327c59a5c48SFrançois Tigeot 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4328c59a5c48SFrançois Tigeot 		CURRENT_STATE_INDEX_SHIFT;
4329c59a5c48SFrançois Tigeot 
4330c59a5c48SFrançois Tigeot 	if (current_index >= ps->performance_level_count) {
4331c59a5c48SFrançois Tigeot 		return 0;
4332c59a5c48SFrançois Tigeot 	} else {
4333c59a5c48SFrançois Tigeot 		pl = &ps->performance_levels[current_index];
4334c59a5c48SFrançois Tigeot 		return pl->sclk;
4335c59a5c48SFrançois Tigeot 	}
4336c59a5c48SFrançois Tigeot }
4337c59a5c48SFrançois Tigeot 
ni_dpm_get_current_mclk(struct radeon_device * rdev)4338c59a5c48SFrançois Tigeot u32 ni_dpm_get_current_mclk(struct radeon_device *rdev)
4339c59a5c48SFrançois Tigeot {
4340c59a5c48SFrançois Tigeot 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4341c59a5c48SFrançois Tigeot 	struct radeon_ps *rps = &eg_pi->current_rps;
4342c59a5c48SFrançois Tigeot 	struct ni_ps *ps = ni_get_ps(rps);
4343c59a5c48SFrançois Tigeot 	struct rv7xx_pl *pl;
4344c59a5c48SFrançois Tigeot 	u32 current_index =
4345c59a5c48SFrançois Tigeot 		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4346c59a5c48SFrançois Tigeot 		CURRENT_STATE_INDEX_SHIFT;
4347c59a5c48SFrançois Tigeot 
4348c59a5c48SFrançois Tigeot 	if (current_index >= ps->performance_level_count) {
4349c59a5c48SFrançois Tigeot 		return 0;
4350c59a5c48SFrançois Tigeot 	} else {
4351c59a5c48SFrançois Tigeot 		pl = &ps->performance_levels[current_index];
4352c59a5c48SFrançois Tigeot 		return pl->mclk;
4353c59a5c48SFrançois Tigeot 	}
4354c59a5c48SFrançois Tigeot }
4355c59a5c48SFrançois Tigeot 
ni_dpm_get_sclk(struct radeon_device * rdev,bool low)435657e252bfSMichael Neumann u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
435757e252bfSMichael Neumann {
435857e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
435957e252bfSMichael Neumann 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
436057e252bfSMichael Neumann 
436157e252bfSMichael Neumann 	if (low)
436257e252bfSMichael Neumann 		return requested_state->performance_levels[0].sclk;
436357e252bfSMichael Neumann 	else
436457e252bfSMichael Neumann 		return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
436557e252bfSMichael Neumann }
436657e252bfSMichael Neumann 
ni_dpm_get_mclk(struct radeon_device * rdev,bool low)436757e252bfSMichael Neumann u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
436857e252bfSMichael Neumann {
436957e252bfSMichael Neumann 	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
437057e252bfSMichael Neumann 	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
437157e252bfSMichael Neumann 
437257e252bfSMichael Neumann 	if (low)
437357e252bfSMichael Neumann 		return requested_state->performance_levels[0].mclk;
437457e252bfSMichael Neumann 	else
437557e252bfSMichael Neumann 		return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
437657e252bfSMichael Neumann }
4377