157e252bfSMichael Neumann /* 257e252bfSMichael Neumann * Copyright 2012 Advanced Micro Devices, Inc. 357e252bfSMichael Neumann * 457e252bfSMichael Neumann * Permission is hereby granted, free of charge, to any person obtaining a 557e252bfSMichael Neumann * copy of this software and associated documentation files (the "Software"), 657e252bfSMichael Neumann * to deal in the Software without restriction, including without limitation 757e252bfSMichael Neumann * the rights to use, copy, modify, merge, publish, distribute, sublicense, 857e252bfSMichael Neumann * and/or sell copies of the Software, and to permit persons to whom the 957e252bfSMichael Neumann * Software is furnished to do so, subject to the following conditions: 1057e252bfSMichael Neumann * 1157e252bfSMichael Neumann * The above copyright notice and this permission notice shall be included in 1257e252bfSMichael Neumann * all copies or substantial portions of the Software. 1357e252bfSMichael Neumann * 1457e252bfSMichael Neumann * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 1557e252bfSMichael Neumann * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 1657e252bfSMichael Neumann * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 1757e252bfSMichael Neumann * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 1857e252bfSMichael Neumann * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 1957e252bfSMichael Neumann * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 2057e252bfSMichael Neumann * OTHER DEALINGS IN THE SOFTWARE. 2157e252bfSMichael Neumann * 2257e252bfSMichael Neumann */ 2357e252bfSMichael Neumann 2457e252bfSMichael Neumann #include <drm/drmP.h> 2557e252bfSMichael Neumann #include "radeon.h" 2657e252bfSMichael Neumann #include "radeon_asic.h" 2757e252bfSMichael Neumann #include "nid.h" 2857e252bfSMichael Neumann #include "r600_dpm.h" 2957e252bfSMichael Neumann #include "ni_dpm.h" 3057e252bfSMichael Neumann #include "atom.h" 3157e252bfSMichael Neumann #include <linux/math64.h> 3257e252bfSMichael Neumann #include <linux/seq_file.h> 3357e252bfSMichael Neumann 3457e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F0 0x0a 3557e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F1 0x0b 3657e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F2 0x0c 3757e252bfSMichael Neumann #define MC_CG_ARB_FREQ_F3 0x0d 3857e252bfSMichael Neumann 3957e252bfSMichael Neumann #define SMC_RAM_END 0xC000 4057e252bfSMichael Neumann 4157e252bfSMichael Neumann static const struct ni_cac_weights cac_weights_cayman_xt = 4257e252bfSMichael Neumann { 4357e252bfSMichael Neumann 0x15, 4457e252bfSMichael Neumann 0x2, 4557e252bfSMichael Neumann 0x19, 4657e252bfSMichael Neumann 0x2, 4757e252bfSMichael Neumann 0x8, 4857e252bfSMichael Neumann 0x14, 4957e252bfSMichael Neumann 0x2, 5057e252bfSMichael Neumann 0x16, 5157e252bfSMichael Neumann 0xE, 5257e252bfSMichael Neumann 0x17, 5357e252bfSMichael Neumann 0x13, 5457e252bfSMichael Neumann 0x2B, 5557e252bfSMichael Neumann 0x10, 5657e252bfSMichael Neumann 0x7, 5757e252bfSMichael Neumann 0x5, 5857e252bfSMichael Neumann 0x5, 5957e252bfSMichael Neumann 0x5, 6057e252bfSMichael Neumann 0x2, 6157e252bfSMichael Neumann 0x3, 6257e252bfSMichael Neumann 0x9, 6357e252bfSMichael Neumann 0x10, 6457e252bfSMichael Neumann 0x10, 6557e252bfSMichael Neumann 0x2B, 6657e252bfSMichael Neumann 0xA, 6757e252bfSMichael Neumann 0x9, 6857e252bfSMichael Neumann 0x4, 6957e252bfSMichael Neumann 0xD, 7057e252bfSMichael Neumann 0xD, 7157e252bfSMichael Neumann 0x3E, 7257e252bfSMichael Neumann 0x18, 7357e252bfSMichael Neumann 0x14, 7457e252bfSMichael Neumann 0, 7557e252bfSMichael Neumann 0x3, 7657e252bfSMichael Neumann 0x3, 7757e252bfSMichael Neumann 0x5, 7857e252bfSMichael Neumann 0, 7957e252bfSMichael Neumann 0x2, 8057e252bfSMichael Neumann 0, 8157e252bfSMichael Neumann 0, 8257e252bfSMichael Neumann 0, 8357e252bfSMichael Neumann 0, 8457e252bfSMichael Neumann 0, 8557e252bfSMichael Neumann 0, 8657e252bfSMichael Neumann 0, 8757e252bfSMichael Neumann 0, 8857e252bfSMichael Neumann 0, 8957e252bfSMichael Neumann 0x1CC, 9057e252bfSMichael Neumann 0, 9157e252bfSMichael Neumann 0x164, 9257e252bfSMichael Neumann 1, 9357e252bfSMichael Neumann 1, 9457e252bfSMichael Neumann 1, 9557e252bfSMichael Neumann 1, 9657e252bfSMichael Neumann 12, 9757e252bfSMichael Neumann 12, 9857e252bfSMichael Neumann 12, 9957e252bfSMichael Neumann 0x12, 10057e252bfSMichael Neumann 0x1F, 10157e252bfSMichael Neumann 132, 10257e252bfSMichael Neumann 5, 10357e252bfSMichael Neumann 7, 10457e252bfSMichael Neumann 0, 10557e252bfSMichael Neumann { 0, 0, 0, 0, 0, 0, 0, 0 }, 10657e252bfSMichael Neumann { 0, 0, 0, 0 }, 10757e252bfSMichael Neumann true 10857e252bfSMichael Neumann }; 10957e252bfSMichael Neumann 11057e252bfSMichael Neumann static const struct ni_cac_weights cac_weights_cayman_pro = 11157e252bfSMichael Neumann { 11257e252bfSMichael Neumann 0x16, 11357e252bfSMichael Neumann 0x4, 11457e252bfSMichael Neumann 0x10, 11557e252bfSMichael Neumann 0x2, 11657e252bfSMichael Neumann 0xA, 11757e252bfSMichael Neumann 0x16, 11857e252bfSMichael Neumann 0x2, 11957e252bfSMichael Neumann 0x18, 12057e252bfSMichael Neumann 0x10, 12157e252bfSMichael Neumann 0x1A, 12257e252bfSMichael Neumann 0x16, 12357e252bfSMichael Neumann 0x2D, 12457e252bfSMichael Neumann 0x12, 12557e252bfSMichael Neumann 0xA, 12657e252bfSMichael Neumann 0x6, 12757e252bfSMichael Neumann 0x6, 12857e252bfSMichael Neumann 0x6, 12957e252bfSMichael Neumann 0x2, 13057e252bfSMichael Neumann 0x4, 13157e252bfSMichael Neumann 0xB, 13257e252bfSMichael Neumann 0x11, 13357e252bfSMichael Neumann 0x11, 13457e252bfSMichael Neumann 0x2D, 13557e252bfSMichael Neumann 0xC, 13657e252bfSMichael Neumann 0xC, 13757e252bfSMichael Neumann 0x7, 13857e252bfSMichael Neumann 0x10, 13957e252bfSMichael Neumann 0x10, 14057e252bfSMichael Neumann 0x3F, 14157e252bfSMichael Neumann 0x1A, 14257e252bfSMichael Neumann 0x16, 14357e252bfSMichael Neumann 0, 14457e252bfSMichael Neumann 0x7, 14557e252bfSMichael Neumann 0x4, 14657e252bfSMichael Neumann 0x6, 14757e252bfSMichael Neumann 1, 14857e252bfSMichael Neumann 0x2, 14957e252bfSMichael Neumann 0x1, 15057e252bfSMichael Neumann 0, 15157e252bfSMichael Neumann 0, 15257e252bfSMichael Neumann 0, 15357e252bfSMichael Neumann 0, 15457e252bfSMichael Neumann 0, 15557e252bfSMichael Neumann 0, 15657e252bfSMichael Neumann 0x30, 15757e252bfSMichael Neumann 0, 15857e252bfSMichael Neumann 0x1CF, 15957e252bfSMichael Neumann 0, 16057e252bfSMichael Neumann 0x166, 16157e252bfSMichael Neumann 1, 16257e252bfSMichael Neumann 1, 16357e252bfSMichael Neumann 1, 16457e252bfSMichael Neumann 1, 16557e252bfSMichael Neumann 12, 16657e252bfSMichael Neumann 12, 16757e252bfSMichael Neumann 12, 16857e252bfSMichael Neumann 0x15, 16957e252bfSMichael Neumann 0x1F, 17057e252bfSMichael Neumann 132, 17157e252bfSMichael Neumann 6, 17257e252bfSMichael Neumann 6, 17357e252bfSMichael Neumann 0, 17457e252bfSMichael Neumann { 0, 0, 0, 0, 0, 0, 0, 0 }, 17557e252bfSMichael Neumann { 0, 0, 0, 0 }, 17657e252bfSMichael Neumann true 17757e252bfSMichael Neumann }; 17857e252bfSMichael Neumann 17957e252bfSMichael Neumann static const struct ni_cac_weights cac_weights_cayman_le = 18057e252bfSMichael Neumann { 18157e252bfSMichael Neumann 0x7, 18257e252bfSMichael Neumann 0xE, 18357e252bfSMichael Neumann 0x1, 18457e252bfSMichael Neumann 0xA, 18557e252bfSMichael Neumann 0x1, 18657e252bfSMichael Neumann 0x3F, 18757e252bfSMichael Neumann 0x2, 18857e252bfSMichael Neumann 0x18, 18957e252bfSMichael Neumann 0x10, 19057e252bfSMichael Neumann 0x1A, 19157e252bfSMichael Neumann 0x1, 19257e252bfSMichael Neumann 0x3F, 19357e252bfSMichael Neumann 0x1, 19457e252bfSMichael Neumann 0xE, 19557e252bfSMichael Neumann 0x6, 19657e252bfSMichael Neumann 0x6, 19757e252bfSMichael Neumann 0x6, 19857e252bfSMichael Neumann 0x2, 19957e252bfSMichael Neumann 0x4, 20057e252bfSMichael Neumann 0x9, 20157e252bfSMichael Neumann 0x1A, 20257e252bfSMichael Neumann 0x1A, 20357e252bfSMichael Neumann 0x2C, 20457e252bfSMichael Neumann 0xA, 20557e252bfSMichael Neumann 0x11, 20657e252bfSMichael Neumann 0x8, 20757e252bfSMichael Neumann 0x19, 20857e252bfSMichael Neumann 0x19, 20957e252bfSMichael Neumann 0x1, 21057e252bfSMichael Neumann 0x1, 21157e252bfSMichael Neumann 0x1A, 21257e252bfSMichael Neumann 0, 21357e252bfSMichael Neumann 0x8, 21457e252bfSMichael Neumann 0x5, 21557e252bfSMichael Neumann 0x8, 21657e252bfSMichael Neumann 0x1, 21757e252bfSMichael Neumann 0x3, 21857e252bfSMichael Neumann 0x1, 21957e252bfSMichael Neumann 0, 22057e252bfSMichael Neumann 0, 22157e252bfSMichael Neumann 0, 22257e252bfSMichael Neumann 0, 22357e252bfSMichael Neumann 0, 22457e252bfSMichael Neumann 0, 22557e252bfSMichael Neumann 0x38, 22657e252bfSMichael Neumann 0x38, 22757e252bfSMichael Neumann 0x239, 22857e252bfSMichael Neumann 0x3, 22957e252bfSMichael Neumann 0x18A, 23057e252bfSMichael Neumann 1, 23157e252bfSMichael Neumann 1, 23257e252bfSMichael Neumann 1, 23357e252bfSMichael Neumann 1, 23457e252bfSMichael Neumann 12, 23557e252bfSMichael Neumann 12, 23657e252bfSMichael Neumann 12, 23757e252bfSMichael Neumann 0x15, 23857e252bfSMichael Neumann 0x22, 23957e252bfSMichael Neumann 132, 24057e252bfSMichael Neumann 6, 24157e252bfSMichael Neumann 6, 24257e252bfSMichael Neumann 0, 24357e252bfSMichael Neumann { 0, 0, 0, 0, 0, 0, 0, 0 }, 24457e252bfSMichael Neumann { 0, 0, 0, 0 }, 24557e252bfSMichael Neumann true 24657e252bfSMichael Neumann }; 24757e252bfSMichael Neumann 24857e252bfSMichael Neumann #define NISLANDS_MGCG_SEQUENCE 300 24957e252bfSMichael Neumann 25057e252bfSMichael Neumann static const u32 cayman_cgcg_cgls_default[] = 25157e252bfSMichael Neumann { 25257e252bfSMichael Neumann 0x000008f8, 0x00000010, 0xffffffff, 25357e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 25457e252bfSMichael Neumann 0x000008f8, 0x00000011, 0xffffffff, 25557e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 25657e252bfSMichael Neumann 0x000008f8, 0x00000012, 0xffffffff, 25757e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 25857e252bfSMichael Neumann 0x000008f8, 0x00000013, 0xffffffff, 25957e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 26057e252bfSMichael Neumann 0x000008f8, 0x00000014, 0xffffffff, 26157e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 26257e252bfSMichael Neumann 0x000008f8, 0x00000015, 0xffffffff, 26357e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 26457e252bfSMichael Neumann 0x000008f8, 0x00000016, 0xffffffff, 26557e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 26657e252bfSMichael Neumann 0x000008f8, 0x00000017, 0xffffffff, 26757e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 26857e252bfSMichael Neumann 0x000008f8, 0x00000018, 0xffffffff, 26957e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 27057e252bfSMichael Neumann 0x000008f8, 0x00000019, 0xffffffff, 27157e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 27257e252bfSMichael Neumann 0x000008f8, 0x0000001a, 0xffffffff, 27357e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 27457e252bfSMichael Neumann 0x000008f8, 0x0000001b, 0xffffffff, 27557e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 27657e252bfSMichael Neumann 0x000008f8, 0x00000020, 0xffffffff, 27757e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 27857e252bfSMichael Neumann 0x000008f8, 0x00000021, 0xffffffff, 27957e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 28057e252bfSMichael Neumann 0x000008f8, 0x00000022, 0xffffffff, 28157e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 28257e252bfSMichael Neumann 0x000008f8, 0x00000023, 0xffffffff, 28357e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 28457e252bfSMichael Neumann 0x000008f8, 0x00000024, 0xffffffff, 28557e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 28657e252bfSMichael Neumann 0x000008f8, 0x00000025, 0xffffffff, 28757e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 28857e252bfSMichael Neumann 0x000008f8, 0x00000026, 0xffffffff, 28957e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 29057e252bfSMichael Neumann 0x000008f8, 0x00000027, 0xffffffff, 29157e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 29257e252bfSMichael Neumann 0x000008f8, 0x00000028, 0xffffffff, 29357e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 29457e252bfSMichael Neumann 0x000008f8, 0x00000029, 0xffffffff, 29557e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 29657e252bfSMichael Neumann 0x000008f8, 0x0000002a, 0xffffffff, 29757e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 29857e252bfSMichael Neumann 0x000008f8, 0x0000002b, 0xffffffff, 29957e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff 30057e252bfSMichael Neumann }; 30157e252bfSMichael Neumann #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32)) 30257e252bfSMichael Neumann 30357e252bfSMichael Neumann static const u32 cayman_cgcg_cgls_disable[] = 30457e252bfSMichael Neumann { 30557e252bfSMichael Neumann 0x000008f8, 0x00000010, 0xffffffff, 30657e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 30757e252bfSMichael Neumann 0x000008f8, 0x00000011, 0xffffffff, 30857e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 30957e252bfSMichael Neumann 0x000008f8, 0x00000012, 0xffffffff, 31057e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 31157e252bfSMichael Neumann 0x000008f8, 0x00000013, 0xffffffff, 31257e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 31357e252bfSMichael Neumann 0x000008f8, 0x00000014, 0xffffffff, 31457e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 31557e252bfSMichael Neumann 0x000008f8, 0x00000015, 0xffffffff, 31657e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 31757e252bfSMichael Neumann 0x000008f8, 0x00000016, 0xffffffff, 31857e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 31957e252bfSMichael Neumann 0x000008f8, 0x00000017, 0xffffffff, 32057e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 32157e252bfSMichael Neumann 0x000008f8, 0x00000018, 0xffffffff, 32257e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 32357e252bfSMichael Neumann 0x000008f8, 0x00000019, 0xffffffff, 32457e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 32557e252bfSMichael Neumann 0x000008f8, 0x0000001a, 0xffffffff, 32657e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 32757e252bfSMichael Neumann 0x000008f8, 0x0000001b, 0xffffffff, 32857e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 32957e252bfSMichael Neumann 0x000008f8, 0x00000020, 0xffffffff, 33057e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 33157e252bfSMichael Neumann 0x000008f8, 0x00000021, 0xffffffff, 33257e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 33357e252bfSMichael Neumann 0x000008f8, 0x00000022, 0xffffffff, 33457e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 33557e252bfSMichael Neumann 0x000008f8, 0x00000023, 0xffffffff, 33657e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 33757e252bfSMichael Neumann 0x000008f8, 0x00000024, 0xffffffff, 33857e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 33957e252bfSMichael Neumann 0x000008f8, 0x00000025, 0xffffffff, 34057e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 34157e252bfSMichael Neumann 0x000008f8, 0x00000026, 0xffffffff, 34257e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 34357e252bfSMichael Neumann 0x000008f8, 0x00000027, 0xffffffff, 34457e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 34557e252bfSMichael Neumann 0x000008f8, 0x00000028, 0xffffffff, 34657e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 34757e252bfSMichael Neumann 0x000008f8, 0x00000029, 0xffffffff, 34857e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 34957e252bfSMichael Neumann 0x000008f8, 0x0000002a, 0xffffffff, 35057e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 35157e252bfSMichael Neumann 0x000008f8, 0x0000002b, 0xffffffff, 35257e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 35357e252bfSMichael Neumann 0x00000644, 0x000f7902, 0x001f4180, 35457e252bfSMichael Neumann 0x00000644, 0x000f3802, 0x001f4180 35557e252bfSMichael Neumann }; 35657e252bfSMichael Neumann #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32)) 35757e252bfSMichael Neumann 35857e252bfSMichael Neumann static const u32 cayman_cgcg_cgls_enable[] = 35957e252bfSMichael Neumann { 36057e252bfSMichael Neumann 0x00000644, 0x000f7882, 0x001f4080, 36157e252bfSMichael Neumann 0x000008f8, 0x00000010, 0xffffffff, 36257e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 36357e252bfSMichael Neumann 0x000008f8, 0x00000011, 0xffffffff, 36457e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 36557e252bfSMichael Neumann 0x000008f8, 0x00000012, 0xffffffff, 36657e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 36757e252bfSMichael Neumann 0x000008f8, 0x00000013, 0xffffffff, 36857e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 36957e252bfSMichael Neumann 0x000008f8, 0x00000014, 0xffffffff, 37057e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 37157e252bfSMichael Neumann 0x000008f8, 0x00000015, 0xffffffff, 37257e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 37357e252bfSMichael Neumann 0x000008f8, 0x00000016, 0xffffffff, 37457e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 37557e252bfSMichael Neumann 0x000008f8, 0x00000017, 0xffffffff, 37657e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 37757e252bfSMichael Neumann 0x000008f8, 0x00000018, 0xffffffff, 37857e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 37957e252bfSMichael Neumann 0x000008f8, 0x00000019, 0xffffffff, 38057e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 38157e252bfSMichael Neumann 0x000008f8, 0x0000001a, 0xffffffff, 38257e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 38357e252bfSMichael Neumann 0x000008f8, 0x0000001b, 0xffffffff, 38457e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 38557e252bfSMichael Neumann 0x000008f8, 0x00000020, 0xffffffff, 38657e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 38757e252bfSMichael Neumann 0x000008f8, 0x00000021, 0xffffffff, 38857e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 38957e252bfSMichael Neumann 0x000008f8, 0x00000022, 0xffffffff, 39057e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 39157e252bfSMichael Neumann 0x000008f8, 0x00000023, 0xffffffff, 39257e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 39357e252bfSMichael Neumann 0x000008f8, 0x00000024, 0xffffffff, 39457e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 39557e252bfSMichael Neumann 0x000008f8, 0x00000025, 0xffffffff, 39657e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 39757e252bfSMichael Neumann 0x000008f8, 0x00000026, 0xffffffff, 39857e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 39957e252bfSMichael Neumann 0x000008f8, 0x00000027, 0xffffffff, 40057e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 40157e252bfSMichael Neumann 0x000008f8, 0x00000028, 0xffffffff, 40257e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 40357e252bfSMichael Neumann 0x000008f8, 0x00000029, 0xffffffff, 40457e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 40557e252bfSMichael Neumann 0x000008f8, 0x0000002a, 0xffffffff, 40657e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 40757e252bfSMichael Neumann 0x000008f8, 0x0000002b, 0xffffffff, 40857e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff 40957e252bfSMichael Neumann }; 41057e252bfSMichael Neumann #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32)) 41157e252bfSMichael Neumann 41257e252bfSMichael Neumann static const u32 cayman_mgcg_default[] = 41357e252bfSMichael Neumann { 41457e252bfSMichael Neumann 0x0000802c, 0xc0000000, 0xffffffff, 41557e252bfSMichael Neumann 0x00003fc4, 0xc0000000, 0xffffffff, 41657e252bfSMichael Neumann 0x00005448, 0x00000100, 0xffffffff, 41757e252bfSMichael Neumann 0x000055e4, 0x00000100, 0xffffffff, 41857e252bfSMichael Neumann 0x0000160c, 0x00000100, 0xffffffff, 41957e252bfSMichael Neumann 0x00008984, 0x06000100, 0xffffffff, 42057e252bfSMichael Neumann 0x0000c164, 0x00000100, 0xffffffff, 42157e252bfSMichael Neumann 0x00008a18, 0x00000100, 0xffffffff, 42257e252bfSMichael Neumann 0x0000897c, 0x06000100, 0xffffffff, 42357e252bfSMichael Neumann 0x00008b28, 0x00000100, 0xffffffff, 42457e252bfSMichael Neumann 0x00009144, 0x00800200, 0xffffffff, 42557e252bfSMichael Neumann 0x00009a60, 0x00000100, 0xffffffff, 42657e252bfSMichael Neumann 0x00009868, 0x00000100, 0xffffffff, 42757e252bfSMichael Neumann 0x00008d58, 0x00000100, 0xffffffff, 42857e252bfSMichael Neumann 0x00009510, 0x00000100, 0xffffffff, 42957e252bfSMichael Neumann 0x0000949c, 0x00000100, 0xffffffff, 43057e252bfSMichael Neumann 0x00009654, 0x00000100, 0xffffffff, 43157e252bfSMichael Neumann 0x00009030, 0x00000100, 0xffffffff, 43257e252bfSMichael Neumann 0x00009034, 0x00000100, 0xffffffff, 43357e252bfSMichael Neumann 0x00009038, 0x00000100, 0xffffffff, 43457e252bfSMichael Neumann 0x0000903c, 0x00000100, 0xffffffff, 43557e252bfSMichael Neumann 0x00009040, 0x00000100, 0xffffffff, 43657e252bfSMichael Neumann 0x0000a200, 0x00000100, 0xffffffff, 43757e252bfSMichael Neumann 0x0000a204, 0x00000100, 0xffffffff, 43857e252bfSMichael Neumann 0x0000a208, 0x00000100, 0xffffffff, 43957e252bfSMichael Neumann 0x0000a20c, 0x00000100, 0xffffffff, 44057e252bfSMichael Neumann 0x00009744, 0x00000100, 0xffffffff, 44157e252bfSMichael Neumann 0x00003f80, 0x00000100, 0xffffffff, 44257e252bfSMichael Neumann 0x0000a210, 0x00000100, 0xffffffff, 44357e252bfSMichael Neumann 0x0000a214, 0x00000100, 0xffffffff, 44457e252bfSMichael Neumann 0x000004d8, 0x00000100, 0xffffffff, 44557e252bfSMichael Neumann 0x00009664, 0x00000100, 0xffffffff, 44657e252bfSMichael Neumann 0x00009698, 0x00000100, 0xffffffff, 44757e252bfSMichael Neumann 0x000004d4, 0x00000200, 0xffffffff, 44857e252bfSMichael Neumann 0x000004d0, 0x00000000, 0xffffffff, 44957e252bfSMichael Neumann 0x000030cc, 0x00000104, 0xffffffff, 45057e252bfSMichael Neumann 0x0000d0c0, 0x00000100, 0xffffffff, 45157e252bfSMichael Neumann 0x0000d8c0, 0x00000100, 0xffffffff, 45257e252bfSMichael Neumann 0x0000802c, 0x40000000, 0xffffffff, 45357e252bfSMichael Neumann 0x00003fc4, 0x40000000, 0xffffffff, 45457e252bfSMichael Neumann 0x0000915c, 0x00010000, 0xffffffff, 45557e252bfSMichael Neumann 0x00009160, 0x00030002, 0xffffffff, 45657e252bfSMichael Neumann 0x00009164, 0x00050004, 0xffffffff, 45757e252bfSMichael Neumann 0x00009168, 0x00070006, 0xffffffff, 45857e252bfSMichael Neumann 0x00009178, 0x00070000, 0xffffffff, 45957e252bfSMichael Neumann 0x0000917c, 0x00030002, 0xffffffff, 46057e252bfSMichael Neumann 0x00009180, 0x00050004, 0xffffffff, 46157e252bfSMichael Neumann 0x0000918c, 0x00010006, 0xffffffff, 46257e252bfSMichael Neumann 0x00009190, 0x00090008, 0xffffffff, 46357e252bfSMichael Neumann 0x00009194, 0x00070000, 0xffffffff, 46457e252bfSMichael Neumann 0x00009198, 0x00030002, 0xffffffff, 46557e252bfSMichael Neumann 0x0000919c, 0x00050004, 0xffffffff, 46657e252bfSMichael Neumann 0x000091a8, 0x00010006, 0xffffffff, 46757e252bfSMichael Neumann 0x000091ac, 0x00090008, 0xffffffff, 46857e252bfSMichael Neumann 0x000091b0, 0x00070000, 0xffffffff, 46957e252bfSMichael Neumann 0x000091b4, 0x00030002, 0xffffffff, 47057e252bfSMichael Neumann 0x000091b8, 0x00050004, 0xffffffff, 47157e252bfSMichael Neumann 0x000091c4, 0x00010006, 0xffffffff, 47257e252bfSMichael Neumann 0x000091c8, 0x00090008, 0xffffffff, 47357e252bfSMichael Neumann 0x000091cc, 0x00070000, 0xffffffff, 47457e252bfSMichael Neumann 0x000091d0, 0x00030002, 0xffffffff, 47557e252bfSMichael Neumann 0x000091d4, 0x00050004, 0xffffffff, 47657e252bfSMichael Neumann 0x000091e0, 0x00010006, 0xffffffff, 47757e252bfSMichael Neumann 0x000091e4, 0x00090008, 0xffffffff, 47857e252bfSMichael Neumann 0x000091e8, 0x00000000, 0xffffffff, 47957e252bfSMichael Neumann 0x000091ec, 0x00070000, 0xffffffff, 48057e252bfSMichael Neumann 0x000091f0, 0x00030002, 0xffffffff, 48157e252bfSMichael Neumann 0x000091f4, 0x00050004, 0xffffffff, 48257e252bfSMichael Neumann 0x00009200, 0x00010006, 0xffffffff, 48357e252bfSMichael Neumann 0x00009204, 0x00090008, 0xffffffff, 48457e252bfSMichael Neumann 0x00009208, 0x00070000, 0xffffffff, 48557e252bfSMichael Neumann 0x0000920c, 0x00030002, 0xffffffff, 48657e252bfSMichael Neumann 0x00009210, 0x00050004, 0xffffffff, 48757e252bfSMichael Neumann 0x0000921c, 0x00010006, 0xffffffff, 48857e252bfSMichael Neumann 0x00009220, 0x00090008, 0xffffffff, 48957e252bfSMichael Neumann 0x00009224, 0x00070000, 0xffffffff, 49057e252bfSMichael Neumann 0x00009228, 0x00030002, 0xffffffff, 49157e252bfSMichael Neumann 0x0000922c, 0x00050004, 0xffffffff, 49257e252bfSMichael Neumann 0x00009238, 0x00010006, 0xffffffff, 49357e252bfSMichael Neumann 0x0000923c, 0x00090008, 0xffffffff, 49457e252bfSMichael Neumann 0x00009240, 0x00070000, 0xffffffff, 49557e252bfSMichael Neumann 0x00009244, 0x00030002, 0xffffffff, 49657e252bfSMichael Neumann 0x00009248, 0x00050004, 0xffffffff, 49757e252bfSMichael Neumann 0x00009254, 0x00010006, 0xffffffff, 49857e252bfSMichael Neumann 0x00009258, 0x00090008, 0xffffffff, 49957e252bfSMichael Neumann 0x0000925c, 0x00070000, 0xffffffff, 50057e252bfSMichael Neumann 0x00009260, 0x00030002, 0xffffffff, 50157e252bfSMichael Neumann 0x00009264, 0x00050004, 0xffffffff, 50257e252bfSMichael Neumann 0x00009270, 0x00010006, 0xffffffff, 50357e252bfSMichael Neumann 0x00009274, 0x00090008, 0xffffffff, 50457e252bfSMichael Neumann 0x00009278, 0x00070000, 0xffffffff, 50557e252bfSMichael Neumann 0x0000927c, 0x00030002, 0xffffffff, 50657e252bfSMichael Neumann 0x00009280, 0x00050004, 0xffffffff, 50757e252bfSMichael Neumann 0x0000928c, 0x00010006, 0xffffffff, 50857e252bfSMichael Neumann 0x00009290, 0x00090008, 0xffffffff, 50957e252bfSMichael Neumann 0x000092a8, 0x00070000, 0xffffffff, 51057e252bfSMichael Neumann 0x000092ac, 0x00030002, 0xffffffff, 51157e252bfSMichael Neumann 0x000092b0, 0x00050004, 0xffffffff, 51257e252bfSMichael Neumann 0x000092bc, 0x00010006, 0xffffffff, 51357e252bfSMichael Neumann 0x000092c0, 0x00090008, 0xffffffff, 51457e252bfSMichael Neumann 0x000092c4, 0x00070000, 0xffffffff, 51557e252bfSMichael Neumann 0x000092c8, 0x00030002, 0xffffffff, 51657e252bfSMichael Neumann 0x000092cc, 0x00050004, 0xffffffff, 51757e252bfSMichael Neumann 0x000092d8, 0x00010006, 0xffffffff, 51857e252bfSMichael Neumann 0x000092dc, 0x00090008, 0xffffffff, 51957e252bfSMichael Neumann 0x00009294, 0x00000000, 0xffffffff, 52057e252bfSMichael Neumann 0x0000802c, 0x40010000, 0xffffffff, 52157e252bfSMichael Neumann 0x00003fc4, 0x40010000, 0xffffffff, 52257e252bfSMichael Neumann 0x0000915c, 0x00010000, 0xffffffff, 52357e252bfSMichael Neumann 0x00009160, 0x00030002, 0xffffffff, 52457e252bfSMichael Neumann 0x00009164, 0x00050004, 0xffffffff, 52557e252bfSMichael Neumann 0x00009168, 0x00070006, 0xffffffff, 52657e252bfSMichael Neumann 0x00009178, 0x00070000, 0xffffffff, 52757e252bfSMichael Neumann 0x0000917c, 0x00030002, 0xffffffff, 52857e252bfSMichael Neumann 0x00009180, 0x00050004, 0xffffffff, 52957e252bfSMichael Neumann 0x0000918c, 0x00010006, 0xffffffff, 53057e252bfSMichael Neumann 0x00009190, 0x00090008, 0xffffffff, 53157e252bfSMichael Neumann 0x00009194, 0x00070000, 0xffffffff, 53257e252bfSMichael Neumann 0x00009198, 0x00030002, 0xffffffff, 53357e252bfSMichael Neumann 0x0000919c, 0x00050004, 0xffffffff, 53457e252bfSMichael Neumann 0x000091a8, 0x00010006, 0xffffffff, 53557e252bfSMichael Neumann 0x000091ac, 0x00090008, 0xffffffff, 53657e252bfSMichael Neumann 0x000091b0, 0x00070000, 0xffffffff, 53757e252bfSMichael Neumann 0x000091b4, 0x00030002, 0xffffffff, 53857e252bfSMichael Neumann 0x000091b8, 0x00050004, 0xffffffff, 53957e252bfSMichael Neumann 0x000091c4, 0x00010006, 0xffffffff, 54057e252bfSMichael Neumann 0x000091c8, 0x00090008, 0xffffffff, 54157e252bfSMichael Neumann 0x000091cc, 0x00070000, 0xffffffff, 54257e252bfSMichael Neumann 0x000091d0, 0x00030002, 0xffffffff, 54357e252bfSMichael Neumann 0x000091d4, 0x00050004, 0xffffffff, 54457e252bfSMichael Neumann 0x000091e0, 0x00010006, 0xffffffff, 54557e252bfSMichael Neumann 0x000091e4, 0x00090008, 0xffffffff, 54657e252bfSMichael Neumann 0x000091e8, 0x00000000, 0xffffffff, 54757e252bfSMichael Neumann 0x000091ec, 0x00070000, 0xffffffff, 54857e252bfSMichael Neumann 0x000091f0, 0x00030002, 0xffffffff, 54957e252bfSMichael Neumann 0x000091f4, 0x00050004, 0xffffffff, 55057e252bfSMichael Neumann 0x00009200, 0x00010006, 0xffffffff, 55157e252bfSMichael Neumann 0x00009204, 0x00090008, 0xffffffff, 55257e252bfSMichael Neumann 0x00009208, 0x00070000, 0xffffffff, 55357e252bfSMichael Neumann 0x0000920c, 0x00030002, 0xffffffff, 55457e252bfSMichael Neumann 0x00009210, 0x00050004, 0xffffffff, 55557e252bfSMichael Neumann 0x0000921c, 0x00010006, 0xffffffff, 55657e252bfSMichael Neumann 0x00009220, 0x00090008, 0xffffffff, 55757e252bfSMichael Neumann 0x00009224, 0x00070000, 0xffffffff, 55857e252bfSMichael Neumann 0x00009228, 0x00030002, 0xffffffff, 55957e252bfSMichael Neumann 0x0000922c, 0x00050004, 0xffffffff, 56057e252bfSMichael Neumann 0x00009238, 0x00010006, 0xffffffff, 56157e252bfSMichael Neumann 0x0000923c, 0x00090008, 0xffffffff, 56257e252bfSMichael Neumann 0x00009240, 0x00070000, 0xffffffff, 56357e252bfSMichael Neumann 0x00009244, 0x00030002, 0xffffffff, 56457e252bfSMichael Neumann 0x00009248, 0x00050004, 0xffffffff, 56557e252bfSMichael Neumann 0x00009254, 0x00010006, 0xffffffff, 56657e252bfSMichael Neumann 0x00009258, 0x00090008, 0xffffffff, 56757e252bfSMichael Neumann 0x0000925c, 0x00070000, 0xffffffff, 56857e252bfSMichael Neumann 0x00009260, 0x00030002, 0xffffffff, 56957e252bfSMichael Neumann 0x00009264, 0x00050004, 0xffffffff, 57057e252bfSMichael Neumann 0x00009270, 0x00010006, 0xffffffff, 57157e252bfSMichael Neumann 0x00009274, 0x00090008, 0xffffffff, 57257e252bfSMichael Neumann 0x00009278, 0x00070000, 0xffffffff, 57357e252bfSMichael Neumann 0x0000927c, 0x00030002, 0xffffffff, 57457e252bfSMichael Neumann 0x00009280, 0x00050004, 0xffffffff, 57557e252bfSMichael Neumann 0x0000928c, 0x00010006, 0xffffffff, 57657e252bfSMichael Neumann 0x00009290, 0x00090008, 0xffffffff, 57757e252bfSMichael Neumann 0x000092a8, 0x00070000, 0xffffffff, 57857e252bfSMichael Neumann 0x000092ac, 0x00030002, 0xffffffff, 57957e252bfSMichael Neumann 0x000092b0, 0x00050004, 0xffffffff, 58057e252bfSMichael Neumann 0x000092bc, 0x00010006, 0xffffffff, 58157e252bfSMichael Neumann 0x000092c0, 0x00090008, 0xffffffff, 58257e252bfSMichael Neumann 0x000092c4, 0x00070000, 0xffffffff, 58357e252bfSMichael Neumann 0x000092c8, 0x00030002, 0xffffffff, 58457e252bfSMichael Neumann 0x000092cc, 0x00050004, 0xffffffff, 58557e252bfSMichael Neumann 0x000092d8, 0x00010006, 0xffffffff, 58657e252bfSMichael Neumann 0x000092dc, 0x00090008, 0xffffffff, 58757e252bfSMichael Neumann 0x00009294, 0x00000000, 0xffffffff, 58857e252bfSMichael Neumann 0x0000802c, 0xc0000000, 0xffffffff, 58957e252bfSMichael Neumann 0x00003fc4, 0xc0000000, 0xffffffff, 59057e252bfSMichael Neumann 0x000008f8, 0x00000010, 0xffffffff, 59157e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 59257e252bfSMichael Neumann 0x000008f8, 0x00000011, 0xffffffff, 59357e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 59457e252bfSMichael Neumann 0x000008f8, 0x00000012, 0xffffffff, 59557e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 59657e252bfSMichael Neumann 0x000008f8, 0x00000013, 0xffffffff, 59757e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 59857e252bfSMichael Neumann 0x000008f8, 0x00000014, 0xffffffff, 59957e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 60057e252bfSMichael Neumann 0x000008f8, 0x00000015, 0xffffffff, 60157e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 60257e252bfSMichael Neumann 0x000008f8, 0x00000016, 0xffffffff, 60357e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 60457e252bfSMichael Neumann 0x000008f8, 0x00000017, 0xffffffff, 60557e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 60657e252bfSMichael Neumann 0x000008f8, 0x00000018, 0xffffffff, 60757e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 60857e252bfSMichael Neumann 0x000008f8, 0x00000019, 0xffffffff, 60957e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 61057e252bfSMichael Neumann 0x000008f8, 0x0000001a, 0xffffffff, 61157e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 61257e252bfSMichael Neumann 0x000008f8, 0x0000001b, 0xffffffff, 61357e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff 61457e252bfSMichael Neumann }; 61557e252bfSMichael Neumann #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32)) 61657e252bfSMichael Neumann 61757e252bfSMichael Neumann static const u32 cayman_mgcg_disable[] = 61857e252bfSMichael Neumann { 61957e252bfSMichael Neumann 0x0000802c, 0xc0000000, 0xffffffff, 62057e252bfSMichael Neumann 0x000008f8, 0x00000000, 0xffffffff, 62157e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 62257e252bfSMichael Neumann 0x000008f8, 0x00000001, 0xffffffff, 62357e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 62457e252bfSMichael Neumann 0x000008f8, 0x00000002, 0xffffffff, 62557e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 62657e252bfSMichael Neumann 0x000008f8, 0x00000003, 0xffffffff, 62757e252bfSMichael Neumann 0x000008fc, 0xffffffff, 0xffffffff, 62857e252bfSMichael Neumann 0x00009150, 0x00600000, 0xffffffff 62957e252bfSMichael Neumann }; 63057e252bfSMichael Neumann #define CAYMAN_MGCG_DISABLE_LENGTH sizeof(cayman_mgcg_disable) / (3 * sizeof(u32)) 63157e252bfSMichael Neumann 63257e252bfSMichael Neumann static const u32 cayman_mgcg_enable[] = 63357e252bfSMichael Neumann { 63457e252bfSMichael Neumann 0x0000802c, 0xc0000000, 0xffffffff, 63557e252bfSMichael Neumann 0x000008f8, 0x00000000, 0xffffffff, 63657e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 63757e252bfSMichael Neumann 0x000008f8, 0x00000001, 0xffffffff, 63857e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 63957e252bfSMichael Neumann 0x000008f8, 0x00000002, 0xffffffff, 64057e252bfSMichael Neumann 0x000008fc, 0x00600000, 0xffffffff, 64157e252bfSMichael Neumann 0x000008f8, 0x00000003, 0xffffffff, 64257e252bfSMichael Neumann 0x000008fc, 0x00000000, 0xffffffff, 64357e252bfSMichael Neumann 0x00009150, 0x96944200, 0xffffffff 64457e252bfSMichael Neumann }; 64557e252bfSMichael Neumann 64657e252bfSMichael Neumann #define CAYMAN_MGCG_ENABLE_LENGTH sizeof(cayman_mgcg_enable) / (3 * sizeof(u32)) 64757e252bfSMichael Neumann 64857e252bfSMichael Neumann #define NISLANDS_SYSLS_SEQUENCE 100 64957e252bfSMichael Neumann 65057e252bfSMichael Neumann static const u32 cayman_sysls_default[] = 65157e252bfSMichael Neumann { 65257e252bfSMichael Neumann /* Register, Value, Mask bits */ 65357e252bfSMichael Neumann 0x000055e8, 0x00000000, 0xffffffff, 65457e252bfSMichael Neumann 0x0000d0bc, 0x00000000, 0xffffffff, 65557e252bfSMichael Neumann 0x0000d8bc, 0x00000000, 0xffffffff, 65657e252bfSMichael Neumann 0x000015c0, 0x000c1401, 0xffffffff, 65757e252bfSMichael Neumann 0x0000264c, 0x000c0400, 0xffffffff, 65857e252bfSMichael Neumann 0x00002648, 0x000c0400, 0xffffffff, 65957e252bfSMichael Neumann 0x00002650, 0x000c0400, 0xffffffff, 66057e252bfSMichael Neumann 0x000020b8, 0x000c0400, 0xffffffff, 66157e252bfSMichael Neumann 0x000020bc, 0x000c0400, 0xffffffff, 66257e252bfSMichael Neumann 0x000020c0, 0x000c0c80, 0xffffffff, 66357e252bfSMichael Neumann 0x0000f4a0, 0x000000c0, 0xffffffff, 66457e252bfSMichael Neumann 0x0000f4a4, 0x00680fff, 0xffffffff, 66557e252bfSMichael Neumann 0x00002f50, 0x00000404, 0xffffffff, 66657e252bfSMichael Neumann 0x000004c8, 0x00000001, 0xffffffff, 66757e252bfSMichael Neumann 0x000064ec, 0x00000000, 0xffffffff, 66857e252bfSMichael Neumann 0x00000c7c, 0x00000000, 0xffffffff, 66957e252bfSMichael Neumann 0x00008dfc, 0x00000000, 0xffffffff 67057e252bfSMichael Neumann }; 67157e252bfSMichael Neumann #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32)) 67257e252bfSMichael Neumann 67357e252bfSMichael Neumann static const u32 cayman_sysls_disable[] = 67457e252bfSMichael Neumann { 67557e252bfSMichael Neumann /* Register, Value, Mask bits */ 67657e252bfSMichael Neumann 0x0000d0c0, 0x00000000, 0xffffffff, 67757e252bfSMichael Neumann 0x0000d8c0, 0x00000000, 0xffffffff, 67857e252bfSMichael Neumann 0x000055e8, 0x00000000, 0xffffffff, 67957e252bfSMichael Neumann 0x0000d0bc, 0x00000000, 0xffffffff, 68057e252bfSMichael Neumann 0x0000d8bc, 0x00000000, 0xffffffff, 68157e252bfSMichael Neumann 0x000015c0, 0x00041401, 0xffffffff, 68257e252bfSMichael Neumann 0x0000264c, 0x00040400, 0xffffffff, 68357e252bfSMichael Neumann 0x00002648, 0x00040400, 0xffffffff, 68457e252bfSMichael Neumann 0x00002650, 0x00040400, 0xffffffff, 68557e252bfSMichael Neumann 0x000020b8, 0x00040400, 0xffffffff, 68657e252bfSMichael Neumann 0x000020bc, 0x00040400, 0xffffffff, 68757e252bfSMichael Neumann 0x000020c0, 0x00040c80, 0xffffffff, 68857e252bfSMichael Neumann 0x0000f4a0, 0x000000c0, 0xffffffff, 68957e252bfSMichael Neumann 0x0000f4a4, 0x00680000, 0xffffffff, 69057e252bfSMichael Neumann 0x00002f50, 0x00000404, 0xffffffff, 69157e252bfSMichael Neumann 0x000004c8, 0x00000001, 0xffffffff, 69257e252bfSMichael Neumann 0x000064ec, 0x00007ffd, 0xffffffff, 69357e252bfSMichael Neumann 0x00000c7c, 0x0000ff00, 0xffffffff, 69457e252bfSMichael Neumann 0x00008dfc, 0x0000007f, 0xffffffff 69557e252bfSMichael Neumann }; 69657e252bfSMichael Neumann #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32)) 69757e252bfSMichael Neumann 69857e252bfSMichael Neumann static const u32 cayman_sysls_enable[] = 69957e252bfSMichael Neumann { 70057e252bfSMichael Neumann /* Register, Value, Mask bits */ 70157e252bfSMichael Neumann 0x000055e8, 0x00000001, 0xffffffff, 70257e252bfSMichael Neumann 0x0000d0bc, 0x00000100, 0xffffffff, 70357e252bfSMichael Neumann 0x0000d8bc, 0x00000100, 0xffffffff, 70457e252bfSMichael Neumann 0x000015c0, 0x000c1401, 0xffffffff, 70557e252bfSMichael Neumann 0x0000264c, 0x000c0400, 0xffffffff, 70657e252bfSMichael Neumann 0x00002648, 0x000c0400, 0xffffffff, 70757e252bfSMichael Neumann 0x00002650, 0x000c0400, 0xffffffff, 70857e252bfSMichael Neumann 0x000020b8, 0x000c0400, 0xffffffff, 70957e252bfSMichael Neumann 0x000020bc, 0x000c0400, 0xffffffff, 71057e252bfSMichael Neumann 0x000020c0, 0x000c0c80, 0xffffffff, 71157e252bfSMichael Neumann 0x0000f4a0, 0x000000c0, 0xffffffff, 71257e252bfSMichael Neumann 0x0000f4a4, 0x00680fff, 0xffffffff, 71357e252bfSMichael Neumann 0x00002f50, 0x00000903, 0xffffffff, 71457e252bfSMichael Neumann 0x000004c8, 0x00000000, 0xffffffff, 71557e252bfSMichael Neumann 0x000064ec, 0x00000000, 0xffffffff, 71657e252bfSMichael Neumann 0x00000c7c, 0x00000000, 0xffffffff, 71757e252bfSMichael Neumann 0x00008dfc, 0x00000000, 0xffffffff 71857e252bfSMichael Neumann }; 71957e252bfSMichael Neumann #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32)) 72057e252bfSMichael Neumann 72157e252bfSMichael Neumann struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev); 72257e252bfSMichael Neumann struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev); 72357e252bfSMichael Neumann struct ni_power_info *ni_get_pi(struct radeon_device *rdev); 72457e252bfSMichael Neumann struct ni_ps *ni_get_ps(struct radeon_ps *rps); 72557e252bfSMichael Neumann void ni_dpm_reset_asic(struct radeon_device *rdev); 72657e252bfSMichael Neumann 72757e252bfSMichael Neumann struct ni_power_info *ni_get_pi(struct radeon_device *rdev) 72857e252bfSMichael Neumann { 72957e252bfSMichael Neumann struct ni_power_info *pi = rdev->pm.dpm.priv; 73057e252bfSMichael Neumann 73157e252bfSMichael Neumann return pi; 73257e252bfSMichael Neumann } 73357e252bfSMichael Neumann 73457e252bfSMichael Neumann struct ni_ps *ni_get_ps(struct radeon_ps *rps) 73557e252bfSMichael Neumann { 73657e252bfSMichael Neumann struct ni_ps *ps = rps->ps_priv; 73757e252bfSMichael Neumann 73857e252bfSMichael Neumann return ps; 73957e252bfSMichael Neumann } 74057e252bfSMichael Neumann 74157e252bfSMichael Neumann static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff, 74257e252bfSMichael Neumann u16 v, s32 t, 74357e252bfSMichael Neumann u32 ileakage, 74457e252bfSMichael Neumann u32 *leakage) 74557e252bfSMichael Neumann { 74657e252bfSMichael Neumann s64 kt, kv, leakage_w, i_leakage, vddc, temperature; 74757e252bfSMichael Neumann 74857e252bfSMichael Neumann i_leakage = div64_s64(drm_int2fixp(ileakage), 1000); 74957e252bfSMichael Neumann vddc = div64_s64(drm_int2fixp(v), 1000); 75057e252bfSMichael Neumann temperature = div64_s64(drm_int2fixp(t), 1000); 75157e252bfSMichael Neumann 75257e252bfSMichael Neumann kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000), 75357e252bfSMichael Neumann drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature))); 75457e252bfSMichael Neumann kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000), 75557e252bfSMichael Neumann drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc))); 75657e252bfSMichael Neumann 75757e252bfSMichael Neumann leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc); 75857e252bfSMichael Neumann 75957e252bfSMichael Neumann *leakage = drm_fixp2int(leakage_w * 1000); 76057e252bfSMichael Neumann } 76157e252bfSMichael Neumann 76257e252bfSMichael Neumann static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev, 76357e252bfSMichael Neumann const struct ni_leakage_coeffients *coeff, 76457e252bfSMichael Neumann u16 v, 76557e252bfSMichael Neumann s32 t, 76657e252bfSMichael Neumann u32 i_leakage, 76757e252bfSMichael Neumann u32 *leakage) 76857e252bfSMichael Neumann { 76957e252bfSMichael Neumann ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage); 77057e252bfSMichael Neumann } 77157e252bfSMichael Neumann 77257e252bfSMichael Neumann bool ni_dpm_vblank_too_short(struct radeon_device *rdev) 77357e252bfSMichael Neumann { 77457e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 77557e252bfSMichael Neumann u32 vblank_time = r600_dpm_get_vblank_time(rdev); 776*4cd92098Szrj /* we never hit the non-gddr5 limit so disable it */ 777*4cd92098Szrj u32 switch_limit = pi->mem_gddr5 ? 450 : 0; 77857e252bfSMichael Neumann 77957e252bfSMichael Neumann if (vblank_time < switch_limit) 78057e252bfSMichael Neumann return true; 78157e252bfSMichael Neumann else 78257e252bfSMichael Neumann return false; 78357e252bfSMichael Neumann 78457e252bfSMichael Neumann } 78557e252bfSMichael Neumann 78657e252bfSMichael Neumann static void ni_apply_state_adjust_rules(struct radeon_device *rdev, 78757e252bfSMichael Neumann struct radeon_ps *rps) 78857e252bfSMichael Neumann { 78957e252bfSMichael Neumann struct ni_ps *ps = ni_get_ps(rps); 79057e252bfSMichael Neumann struct radeon_clock_and_voltage_limits *max_limits; 79157e252bfSMichael Neumann bool disable_mclk_switching; 79257e252bfSMichael Neumann u32 mclk, sclk; 79357e252bfSMichael Neumann u16 vddc, vddci; 794*4cd92098Szrj u32 max_sclk_vddc, max_mclk_vddci, max_mclk_vddc; 79557e252bfSMichael Neumann int i; 79657e252bfSMichael Neumann 79757e252bfSMichael Neumann if ((rdev->pm.dpm.new_active_crtc_count > 1) || 79857e252bfSMichael Neumann ni_dpm_vblank_too_short(rdev)) 79957e252bfSMichael Neumann disable_mclk_switching = true; 80057e252bfSMichael Neumann else 80157e252bfSMichael Neumann disable_mclk_switching = false; 80257e252bfSMichael Neumann 80357e252bfSMichael Neumann if (rdev->pm.dpm.ac_power) 80457e252bfSMichael Neumann max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; 80557e252bfSMichael Neumann else 80657e252bfSMichael Neumann max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc; 80757e252bfSMichael Neumann 80857e252bfSMichael Neumann if (rdev->pm.dpm.ac_power == false) { 80957e252bfSMichael Neumann for (i = 0; i < ps->performance_level_count; i++) { 81057e252bfSMichael Neumann if (ps->performance_levels[i].mclk > max_limits->mclk) 81157e252bfSMichael Neumann ps->performance_levels[i].mclk = max_limits->mclk; 81257e252bfSMichael Neumann if (ps->performance_levels[i].sclk > max_limits->sclk) 81357e252bfSMichael Neumann ps->performance_levels[i].sclk = max_limits->sclk; 81457e252bfSMichael Neumann if (ps->performance_levels[i].vddc > max_limits->vddc) 81557e252bfSMichael Neumann ps->performance_levels[i].vddc = max_limits->vddc; 81657e252bfSMichael Neumann if (ps->performance_levels[i].vddci > max_limits->vddci) 81757e252bfSMichael Neumann ps->performance_levels[i].vddci = max_limits->vddci; 81857e252bfSMichael Neumann } 81957e252bfSMichael Neumann } 82057e252bfSMichael Neumann 821*4cd92098Szrj /* limit clocks to max supported clocks based on voltage dependency tables */ 822*4cd92098Szrj btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, 823*4cd92098Szrj &max_sclk_vddc); 824*4cd92098Szrj btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, 825*4cd92098Szrj &max_mclk_vddci); 826*4cd92098Szrj btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, 827*4cd92098Szrj &max_mclk_vddc); 828*4cd92098Szrj 829*4cd92098Szrj for (i = 0; i < ps->performance_level_count; i++) { 830*4cd92098Szrj if (max_sclk_vddc) { 831*4cd92098Szrj if (ps->performance_levels[i].sclk > max_sclk_vddc) 832*4cd92098Szrj ps->performance_levels[i].sclk = max_sclk_vddc; 833*4cd92098Szrj } 834*4cd92098Szrj if (max_mclk_vddci) { 835*4cd92098Szrj if (ps->performance_levels[i].mclk > max_mclk_vddci) 836*4cd92098Szrj ps->performance_levels[i].mclk = max_mclk_vddci; 837*4cd92098Szrj } 838*4cd92098Szrj if (max_mclk_vddc) { 839*4cd92098Szrj if (ps->performance_levels[i].mclk > max_mclk_vddc) 840*4cd92098Szrj ps->performance_levels[i].mclk = max_mclk_vddc; 841*4cd92098Szrj } 842*4cd92098Szrj } 843*4cd92098Szrj 84457e252bfSMichael Neumann /* XXX validate the min clocks required for display */ 84557e252bfSMichael Neumann 84657e252bfSMichael Neumann if (disable_mclk_switching) { 84757e252bfSMichael Neumann mclk = ps->performance_levels[ps->performance_level_count - 1].mclk; 84857e252bfSMichael Neumann sclk = ps->performance_levels[0].sclk; 84957e252bfSMichael Neumann vddc = ps->performance_levels[0].vddc; 85057e252bfSMichael Neumann vddci = ps->performance_levels[ps->performance_level_count - 1].vddci; 85157e252bfSMichael Neumann } else { 85257e252bfSMichael Neumann sclk = ps->performance_levels[0].sclk; 85357e252bfSMichael Neumann mclk = ps->performance_levels[0].mclk; 85457e252bfSMichael Neumann vddc = ps->performance_levels[0].vddc; 85557e252bfSMichael Neumann vddci = ps->performance_levels[0].vddci; 85657e252bfSMichael Neumann } 85757e252bfSMichael Neumann 85857e252bfSMichael Neumann /* adjusted low state */ 85957e252bfSMichael Neumann ps->performance_levels[0].sclk = sclk; 86057e252bfSMichael Neumann ps->performance_levels[0].mclk = mclk; 86157e252bfSMichael Neumann ps->performance_levels[0].vddc = vddc; 86257e252bfSMichael Neumann ps->performance_levels[0].vddci = vddci; 86357e252bfSMichael Neumann 86457e252bfSMichael Neumann btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk, 86557e252bfSMichael Neumann &ps->performance_levels[0].sclk, 86657e252bfSMichael Neumann &ps->performance_levels[0].mclk); 86757e252bfSMichael Neumann 86857e252bfSMichael Neumann for (i = 1; i < ps->performance_level_count; i++) { 86957e252bfSMichael Neumann if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk) 87057e252bfSMichael Neumann ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk; 87157e252bfSMichael Neumann if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc) 87257e252bfSMichael Neumann ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc; 87357e252bfSMichael Neumann } 87457e252bfSMichael Neumann 87557e252bfSMichael Neumann if (disable_mclk_switching) { 87657e252bfSMichael Neumann mclk = ps->performance_levels[0].mclk; 87757e252bfSMichael Neumann for (i = 1; i < ps->performance_level_count; i++) { 87857e252bfSMichael Neumann if (mclk < ps->performance_levels[i].mclk) 87957e252bfSMichael Neumann mclk = ps->performance_levels[i].mclk; 88057e252bfSMichael Neumann } 88157e252bfSMichael Neumann for (i = 0; i < ps->performance_level_count; i++) { 88257e252bfSMichael Neumann ps->performance_levels[i].mclk = mclk; 88357e252bfSMichael Neumann ps->performance_levels[i].vddci = vddci; 88457e252bfSMichael Neumann } 88557e252bfSMichael Neumann } else { 88657e252bfSMichael Neumann for (i = 1; i < ps->performance_level_count; i++) { 88757e252bfSMichael Neumann if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk) 88857e252bfSMichael Neumann ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk; 88957e252bfSMichael Neumann if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci) 89057e252bfSMichael Neumann ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci; 89157e252bfSMichael Neumann } 89257e252bfSMichael Neumann } 89357e252bfSMichael Neumann 89457e252bfSMichael Neumann for (i = 1; i < ps->performance_level_count; i++) 89557e252bfSMichael Neumann btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk, 89657e252bfSMichael Neumann &ps->performance_levels[i].sclk, 89757e252bfSMichael Neumann &ps->performance_levels[i].mclk); 89857e252bfSMichael Neumann 89957e252bfSMichael Neumann for (i = 0; i < ps->performance_level_count; i++) 90057e252bfSMichael Neumann btc_adjust_clock_combinations(rdev, max_limits, 90157e252bfSMichael Neumann &ps->performance_levels[i]); 90257e252bfSMichael Neumann 90357e252bfSMichael Neumann for (i = 0; i < ps->performance_level_count; i++) { 90457e252bfSMichael Neumann btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk, 90557e252bfSMichael Neumann ps->performance_levels[i].sclk, 90657e252bfSMichael Neumann max_limits->vddc, &ps->performance_levels[i].vddc); 90757e252bfSMichael Neumann btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk, 90857e252bfSMichael Neumann ps->performance_levels[i].mclk, 90957e252bfSMichael Neumann max_limits->vddci, &ps->performance_levels[i].vddci); 91057e252bfSMichael Neumann btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk, 91157e252bfSMichael Neumann ps->performance_levels[i].mclk, 91257e252bfSMichael Neumann max_limits->vddc, &ps->performance_levels[i].vddc); 91357e252bfSMichael Neumann btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk, 91457e252bfSMichael Neumann rdev->clock.current_dispclk, 91557e252bfSMichael Neumann max_limits->vddc, &ps->performance_levels[i].vddc); 91657e252bfSMichael Neumann } 91757e252bfSMichael Neumann 91857e252bfSMichael Neumann for (i = 0; i < ps->performance_level_count; i++) { 91957e252bfSMichael Neumann btc_apply_voltage_delta_rules(rdev, 92057e252bfSMichael Neumann max_limits->vddc, max_limits->vddci, 92157e252bfSMichael Neumann &ps->performance_levels[i].vddc, 92257e252bfSMichael Neumann &ps->performance_levels[i].vddci); 92357e252bfSMichael Neumann } 92457e252bfSMichael Neumann 92557e252bfSMichael Neumann ps->dc_compatible = true; 92657e252bfSMichael Neumann for (i = 0; i < ps->performance_level_count; i++) { 92757e252bfSMichael Neumann if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) 92857e252bfSMichael Neumann ps->dc_compatible = false; 92957e252bfSMichael Neumann 93057e252bfSMichael Neumann if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2) 93157e252bfSMichael Neumann ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2; 93257e252bfSMichael Neumann } 93357e252bfSMichael Neumann } 93457e252bfSMichael Neumann 93557e252bfSMichael Neumann static void ni_cg_clockgating_default(struct radeon_device *rdev) 93657e252bfSMichael Neumann { 93757e252bfSMichael Neumann u32 count; 93857e252bfSMichael Neumann const u32 *ps = NULL; 93957e252bfSMichael Neumann 94057e252bfSMichael Neumann ps = (const u32 *)&cayman_cgcg_cgls_default; 94157e252bfSMichael Neumann count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH; 94257e252bfSMichael Neumann 94357e252bfSMichael Neumann btc_program_mgcg_hw_sequence(rdev, ps, count); 94457e252bfSMichael Neumann } 94557e252bfSMichael Neumann 94657e252bfSMichael Neumann static void ni_gfx_clockgating_enable(struct radeon_device *rdev, 94757e252bfSMichael Neumann bool enable) 94857e252bfSMichael Neumann { 94957e252bfSMichael Neumann u32 count; 95057e252bfSMichael Neumann const u32 *ps = NULL; 95157e252bfSMichael Neumann 95257e252bfSMichael Neumann if (enable) { 95357e252bfSMichael Neumann ps = (const u32 *)&cayman_cgcg_cgls_enable; 95457e252bfSMichael Neumann count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH; 95557e252bfSMichael Neumann } else { 95657e252bfSMichael Neumann ps = (const u32 *)&cayman_cgcg_cgls_disable; 95757e252bfSMichael Neumann count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH; 95857e252bfSMichael Neumann } 95957e252bfSMichael Neumann 96057e252bfSMichael Neumann btc_program_mgcg_hw_sequence(rdev, ps, count); 96157e252bfSMichael Neumann } 96257e252bfSMichael Neumann 96357e252bfSMichael Neumann static void ni_mg_clockgating_default(struct radeon_device *rdev) 96457e252bfSMichael Neumann { 96557e252bfSMichael Neumann u32 count; 96657e252bfSMichael Neumann const u32 *ps = NULL; 96757e252bfSMichael Neumann 96857e252bfSMichael Neumann ps = (const u32 *)&cayman_mgcg_default; 96957e252bfSMichael Neumann count = CAYMAN_MGCG_DEFAULT_LENGTH; 97057e252bfSMichael Neumann 97157e252bfSMichael Neumann btc_program_mgcg_hw_sequence(rdev, ps, count); 97257e252bfSMichael Neumann } 97357e252bfSMichael Neumann 97457e252bfSMichael Neumann static void ni_mg_clockgating_enable(struct radeon_device *rdev, 97557e252bfSMichael Neumann bool enable) 97657e252bfSMichael Neumann { 97757e252bfSMichael Neumann u32 count; 97857e252bfSMichael Neumann const u32 *ps = NULL; 97957e252bfSMichael Neumann 98057e252bfSMichael Neumann if (enable) { 98157e252bfSMichael Neumann ps = (const u32 *)&cayman_mgcg_enable; 98257e252bfSMichael Neumann count = CAYMAN_MGCG_ENABLE_LENGTH; 98357e252bfSMichael Neumann } else { 98457e252bfSMichael Neumann ps = (const u32 *)&cayman_mgcg_disable; 98557e252bfSMichael Neumann count = CAYMAN_MGCG_DISABLE_LENGTH; 98657e252bfSMichael Neumann } 98757e252bfSMichael Neumann 98857e252bfSMichael Neumann btc_program_mgcg_hw_sequence(rdev, ps, count); 98957e252bfSMichael Neumann } 99057e252bfSMichael Neumann 99157e252bfSMichael Neumann static void ni_ls_clockgating_default(struct radeon_device *rdev) 99257e252bfSMichael Neumann { 99357e252bfSMichael Neumann u32 count; 99457e252bfSMichael Neumann const u32 *ps = NULL; 99557e252bfSMichael Neumann 99657e252bfSMichael Neumann ps = (const u32 *)&cayman_sysls_default; 99757e252bfSMichael Neumann count = CAYMAN_SYSLS_DEFAULT_LENGTH; 99857e252bfSMichael Neumann 99957e252bfSMichael Neumann btc_program_mgcg_hw_sequence(rdev, ps, count); 100057e252bfSMichael Neumann } 100157e252bfSMichael Neumann 100257e252bfSMichael Neumann static void ni_ls_clockgating_enable(struct radeon_device *rdev, 100357e252bfSMichael Neumann bool enable) 100457e252bfSMichael Neumann { 100557e252bfSMichael Neumann u32 count; 100657e252bfSMichael Neumann const u32 *ps = NULL; 100757e252bfSMichael Neumann 100857e252bfSMichael Neumann if (enable) { 100957e252bfSMichael Neumann ps = (const u32 *)&cayman_sysls_enable; 101057e252bfSMichael Neumann count = CAYMAN_SYSLS_ENABLE_LENGTH; 101157e252bfSMichael Neumann } else { 101257e252bfSMichael Neumann ps = (const u32 *)&cayman_sysls_disable; 101357e252bfSMichael Neumann count = CAYMAN_SYSLS_DISABLE_LENGTH; 101457e252bfSMichael Neumann } 101557e252bfSMichael Neumann 101657e252bfSMichael Neumann btc_program_mgcg_hw_sequence(rdev, ps, count); 101757e252bfSMichael Neumann 101857e252bfSMichael Neumann } 101957e252bfSMichael Neumann 102057e252bfSMichael Neumann static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev, 102157e252bfSMichael Neumann struct radeon_clock_voltage_dependency_table *table) 102257e252bfSMichael Neumann { 102357e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 102457e252bfSMichael Neumann u32 i; 102557e252bfSMichael Neumann 102657e252bfSMichael Neumann if (table) { 102757e252bfSMichael Neumann for (i = 0; i < table->count; i++) { 102857e252bfSMichael Neumann if (0xff01 == table->entries[i].v) { 102957e252bfSMichael Neumann if (pi->max_vddc == 0) 103057e252bfSMichael Neumann return -EINVAL; 103157e252bfSMichael Neumann table->entries[i].v = pi->max_vddc; 103257e252bfSMichael Neumann } 103357e252bfSMichael Neumann } 103457e252bfSMichael Neumann } 103557e252bfSMichael Neumann return 0; 103657e252bfSMichael Neumann } 103757e252bfSMichael Neumann 103857e252bfSMichael Neumann static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev) 103957e252bfSMichael Neumann { 104057e252bfSMichael Neumann int ret = 0; 104157e252bfSMichael Neumann 104257e252bfSMichael Neumann ret = ni_patch_single_dependency_table_based_on_leakage(rdev, 104357e252bfSMichael Neumann &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk); 104457e252bfSMichael Neumann 104557e252bfSMichael Neumann ret = ni_patch_single_dependency_table_based_on_leakage(rdev, 104657e252bfSMichael Neumann &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk); 104757e252bfSMichael Neumann return ret; 104857e252bfSMichael Neumann } 104957e252bfSMichael Neumann 105057e252bfSMichael Neumann static void ni_stop_dpm(struct radeon_device *rdev) 105157e252bfSMichael Neumann { 105257e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN); 105357e252bfSMichael Neumann } 105457e252bfSMichael Neumann 105557e252bfSMichael Neumann #if 0 105657e252bfSMichael Neumann static int ni_notify_hw_of_power_source(struct radeon_device *rdev, 105757e252bfSMichael Neumann bool ac_power) 105857e252bfSMichael Neumann { 105957e252bfSMichael Neumann if (ac_power) 106057e252bfSMichael Neumann return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ? 106157e252bfSMichael Neumann 0 : -EINVAL; 106257e252bfSMichael Neumann 106357e252bfSMichael Neumann return 0; 106457e252bfSMichael Neumann } 106557e252bfSMichael Neumann #endif 106657e252bfSMichael Neumann 106757e252bfSMichael Neumann static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev, 106857e252bfSMichael Neumann PPSMC_Msg msg, u32 parameter) 106957e252bfSMichael Neumann { 107057e252bfSMichael Neumann WREG32(SMC_SCRATCH0, parameter); 107157e252bfSMichael Neumann return rv770_send_msg_to_smc(rdev, msg); 107257e252bfSMichael Neumann } 107357e252bfSMichael Neumann 107457e252bfSMichael Neumann static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev) 107557e252bfSMichael Neumann { 107657e252bfSMichael Neumann if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK) 107757e252bfSMichael Neumann return -EINVAL; 107857e252bfSMichael Neumann 107957e252bfSMichael Neumann return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ? 108057e252bfSMichael Neumann 0 : -EINVAL; 108157e252bfSMichael Neumann } 108257e252bfSMichael Neumann 108357e252bfSMichael Neumann int ni_dpm_force_performance_level(struct radeon_device *rdev, 108457e252bfSMichael Neumann enum radeon_dpm_forced_level level) 108557e252bfSMichael Neumann { 108657e252bfSMichael Neumann if (level == RADEON_DPM_FORCED_LEVEL_HIGH) { 108757e252bfSMichael Neumann if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK) 108857e252bfSMichael Neumann return -EINVAL; 108957e252bfSMichael Neumann 109057e252bfSMichael Neumann if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK) 109157e252bfSMichael Neumann return -EINVAL; 109257e252bfSMichael Neumann } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) { 109357e252bfSMichael Neumann if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK) 109457e252bfSMichael Neumann return -EINVAL; 109557e252bfSMichael Neumann 109657e252bfSMichael Neumann if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK) 109757e252bfSMichael Neumann return -EINVAL; 109857e252bfSMichael Neumann } else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) { 109957e252bfSMichael Neumann if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK) 110057e252bfSMichael Neumann return -EINVAL; 110157e252bfSMichael Neumann 110257e252bfSMichael Neumann if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK) 110357e252bfSMichael Neumann return -EINVAL; 110457e252bfSMichael Neumann } 110557e252bfSMichael Neumann 110657e252bfSMichael Neumann rdev->pm.dpm.forced_level = level; 110757e252bfSMichael Neumann 110857e252bfSMichael Neumann return 0; 110957e252bfSMichael Neumann } 111057e252bfSMichael Neumann 111157e252bfSMichael Neumann static void ni_stop_smc(struct radeon_device *rdev) 111257e252bfSMichael Neumann { 111357e252bfSMichael Neumann u32 tmp; 111457e252bfSMichael Neumann int i; 111557e252bfSMichael Neumann 111657e252bfSMichael Neumann for (i = 0; i < rdev->usec_timeout; i++) { 111757e252bfSMichael Neumann tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK; 111857e252bfSMichael Neumann if (tmp != 1) 111957e252bfSMichael Neumann break; 1120c4ef309bSzrj udelay(1); 112157e252bfSMichael Neumann } 112257e252bfSMichael Neumann 1123c4ef309bSzrj udelay(100); 112457e252bfSMichael Neumann 112557e252bfSMichael Neumann r7xx_stop_smc(rdev); 112657e252bfSMichael Neumann } 112757e252bfSMichael Neumann 112857e252bfSMichael Neumann static int ni_process_firmware_header(struct radeon_device *rdev) 112957e252bfSMichael Neumann { 113057e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 113157e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 113257e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 113357e252bfSMichael Neumann u32 tmp; 113457e252bfSMichael Neumann int ret; 113557e252bfSMichael Neumann 113657e252bfSMichael Neumann ret = rv770_read_smc_sram_dword(rdev, 113757e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_LOCATION + 113857e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_stateTable, 113957e252bfSMichael Neumann &tmp, pi->sram_end); 114057e252bfSMichael Neumann 114157e252bfSMichael Neumann if (ret) 114257e252bfSMichael Neumann return ret; 114357e252bfSMichael Neumann 114457e252bfSMichael Neumann pi->state_table_start = (u16)tmp; 114557e252bfSMichael Neumann 114657e252bfSMichael Neumann ret = rv770_read_smc_sram_dword(rdev, 114757e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_LOCATION + 114857e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_softRegisters, 114957e252bfSMichael Neumann &tmp, pi->sram_end); 115057e252bfSMichael Neumann 115157e252bfSMichael Neumann if (ret) 115257e252bfSMichael Neumann return ret; 115357e252bfSMichael Neumann 115457e252bfSMichael Neumann pi->soft_regs_start = (u16)tmp; 115557e252bfSMichael Neumann 115657e252bfSMichael Neumann ret = rv770_read_smc_sram_dword(rdev, 115757e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_LOCATION + 115857e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable, 115957e252bfSMichael Neumann &tmp, pi->sram_end); 116057e252bfSMichael Neumann 116157e252bfSMichael Neumann if (ret) 116257e252bfSMichael Neumann return ret; 116357e252bfSMichael Neumann 116457e252bfSMichael Neumann eg_pi->mc_reg_table_start = (u16)tmp; 116557e252bfSMichael Neumann 116657e252bfSMichael Neumann ret = rv770_read_smc_sram_dword(rdev, 116757e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_LOCATION + 116857e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_fanTable, 116957e252bfSMichael Neumann &tmp, pi->sram_end); 117057e252bfSMichael Neumann 117157e252bfSMichael Neumann if (ret) 117257e252bfSMichael Neumann return ret; 117357e252bfSMichael Neumann 117457e252bfSMichael Neumann ni_pi->fan_table_start = (u16)tmp; 117557e252bfSMichael Neumann 117657e252bfSMichael Neumann ret = rv770_read_smc_sram_dword(rdev, 117757e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_LOCATION + 117857e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable, 117957e252bfSMichael Neumann &tmp, pi->sram_end); 118057e252bfSMichael Neumann 118157e252bfSMichael Neumann if (ret) 118257e252bfSMichael Neumann return ret; 118357e252bfSMichael Neumann 118457e252bfSMichael Neumann ni_pi->arb_table_start = (u16)tmp; 118557e252bfSMichael Neumann 118657e252bfSMichael Neumann ret = rv770_read_smc_sram_dword(rdev, 118757e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_LOCATION + 118857e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_cacTable, 118957e252bfSMichael Neumann &tmp, pi->sram_end); 119057e252bfSMichael Neumann 119157e252bfSMichael Neumann if (ret) 119257e252bfSMichael Neumann return ret; 119357e252bfSMichael Neumann 119457e252bfSMichael Neumann ni_pi->cac_table_start = (u16)tmp; 119557e252bfSMichael Neumann 119657e252bfSMichael Neumann ret = rv770_read_smc_sram_dword(rdev, 119757e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_LOCATION + 119857e252bfSMichael Neumann NISLANDS_SMC_FIRMWARE_HEADER_spllTable, 119957e252bfSMichael Neumann &tmp, pi->sram_end); 120057e252bfSMichael Neumann 120157e252bfSMichael Neumann if (ret) 120257e252bfSMichael Neumann return ret; 120357e252bfSMichael Neumann 120457e252bfSMichael Neumann ni_pi->spll_table_start = (u16)tmp; 120557e252bfSMichael Neumann 120657e252bfSMichael Neumann 120757e252bfSMichael Neumann return ret; 120857e252bfSMichael Neumann } 120957e252bfSMichael Neumann 121057e252bfSMichael Neumann static void ni_read_clock_registers(struct radeon_device *rdev) 121157e252bfSMichael Neumann { 121257e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 121357e252bfSMichael Neumann 121457e252bfSMichael Neumann ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL); 121557e252bfSMichael Neumann ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2); 121657e252bfSMichael Neumann ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3); 121757e252bfSMichael Neumann ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4); 121857e252bfSMichael Neumann ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM); 121957e252bfSMichael Neumann ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2); 122057e252bfSMichael Neumann ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL); 122157e252bfSMichael Neumann ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2); 122257e252bfSMichael Neumann ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL); 122357e252bfSMichael Neumann ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2); 122457e252bfSMichael Neumann ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL); 122557e252bfSMichael Neumann ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL); 122657e252bfSMichael Neumann ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1); 122757e252bfSMichael Neumann ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2); 122857e252bfSMichael Neumann } 122957e252bfSMichael Neumann 123057e252bfSMichael Neumann #if 0 123157e252bfSMichael Neumann static int ni_enter_ulp_state(struct radeon_device *rdev) 123257e252bfSMichael Neumann { 123357e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 123457e252bfSMichael Neumann 123557e252bfSMichael Neumann if (pi->gfx_clock_gating) { 123657e252bfSMichael Neumann WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN); 123757e252bfSMichael Neumann WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON); 123857e252bfSMichael Neumann WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON); 123957e252bfSMichael Neumann RREG32(GB_ADDR_CONFIG); 124057e252bfSMichael Neumann } 124157e252bfSMichael Neumann 124257e252bfSMichael Neumann WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower), 124357e252bfSMichael Neumann ~HOST_SMC_MSG_MASK); 124457e252bfSMichael Neumann 1245c4ef309bSzrj udelay(25000); 124657e252bfSMichael Neumann 124757e252bfSMichael Neumann return 0; 124857e252bfSMichael Neumann } 124957e252bfSMichael Neumann #endif 125057e252bfSMichael Neumann 125157e252bfSMichael Neumann static void ni_program_response_times(struct radeon_device *rdev) 125257e252bfSMichael Neumann { 125357e252bfSMichael Neumann u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out; 125457e252bfSMichael Neumann u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit; 125557e252bfSMichael Neumann u32 reference_clock; 125657e252bfSMichael Neumann 125757e252bfSMichael Neumann rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1); 125857e252bfSMichael Neumann 125957e252bfSMichael Neumann voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time; 126057e252bfSMichael Neumann backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time; 126157e252bfSMichael Neumann 126257e252bfSMichael Neumann if (voltage_response_time == 0) 126357e252bfSMichael Neumann voltage_response_time = 1000; 126457e252bfSMichael Neumann 126557e252bfSMichael Neumann if (backbias_response_time == 0) 126657e252bfSMichael Neumann backbias_response_time = 1000; 126757e252bfSMichael Neumann 126857e252bfSMichael Neumann acpi_delay_time = 15000; 126957e252bfSMichael Neumann vbi_time_out = 100000; 127057e252bfSMichael Neumann 127157e252bfSMichael Neumann reference_clock = radeon_get_xclk(rdev); 127257e252bfSMichael Neumann 127357e252bfSMichael Neumann vddc_dly = (voltage_response_time * reference_clock) / 1600; 127457e252bfSMichael Neumann bb_dly = (backbias_response_time * reference_clock) / 1600; 127557e252bfSMichael Neumann acpi_dly = (acpi_delay_time * reference_clock) / 1600; 127657e252bfSMichael Neumann vbi_dly = (vbi_time_out * reference_clock) / 1600; 127757e252bfSMichael Neumann 127857e252bfSMichael Neumann mclk_switch_limit = (460 * reference_clock) / 100; 127957e252bfSMichael Neumann 128057e252bfSMichael Neumann rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg, vddc_dly); 128157e252bfSMichael Neumann rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly); 128257e252bfSMichael Neumann rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi, acpi_dly); 128357e252bfSMichael Neumann rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly); 128457e252bfSMichael Neumann rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA); 128557e252bfSMichael Neumann rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit); 128657e252bfSMichael Neumann } 128757e252bfSMichael Neumann 128857e252bfSMichael Neumann static void ni_populate_smc_voltage_table(struct radeon_device *rdev, 128957e252bfSMichael Neumann struct atom_voltage_table *voltage_table, 129057e252bfSMichael Neumann NISLANDS_SMC_STATETABLE *table) 129157e252bfSMichael Neumann { 129257e252bfSMichael Neumann unsigned int i; 129357e252bfSMichael Neumann 129457e252bfSMichael Neumann for (i = 0; i < voltage_table->count; i++) { 129557e252bfSMichael Neumann table->highSMIO[i] = 0; 129657e252bfSMichael Neumann table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low); 129757e252bfSMichael Neumann } 129857e252bfSMichael Neumann } 129957e252bfSMichael Neumann 130057e252bfSMichael Neumann static void ni_populate_smc_voltage_tables(struct radeon_device *rdev, 130157e252bfSMichael Neumann NISLANDS_SMC_STATETABLE *table) 130257e252bfSMichael Neumann { 130357e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 130457e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 130557e252bfSMichael Neumann unsigned char i; 130657e252bfSMichael Neumann 130757e252bfSMichael Neumann if (eg_pi->vddc_voltage_table.count) { 130857e252bfSMichael Neumann ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table); 130957e252bfSMichael Neumann table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0; 131057e252bfSMichael Neumann table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 131157e252bfSMichael Neumann cpu_to_be32(eg_pi->vddc_voltage_table.mask_low); 131257e252bfSMichael Neumann 131357e252bfSMichael Neumann for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) { 131457e252bfSMichael Neumann if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) { 131557e252bfSMichael Neumann table->maxVDDCIndexInPPTable = i; 131657e252bfSMichael Neumann break; 131757e252bfSMichael Neumann } 131857e252bfSMichael Neumann } 131957e252bfSMichael Neumann } 132057e252bfSMichael Neumann 132157e252bfSMichael Neumann if (eg_pi->vddci_voltage_table.count) { 132257e252bfSMichael Neumann ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table); 132357e252bfSMichael Neumann 132457e252bfSMichael Neumann table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0; 132557e252bfSMichael Neumann table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 132657e252bfSMichael Neumann cpu_to_be32(eg_pi->vddc_voltage_table.mask_low); 132757e252bfSMichael Neumann } 132857e252bfSMichael Neumann } 132957e252bfSMichael Neumann 133057e252bfSMichael Neumann static int ni_populate_voltage_value(struct radeon_device *rdev, 133157e252bfSMichael Neumann struct atom_voltage_table *table, 133257e252bfSMichael Neumann u16 value, 133357e252bfSMichael Neumann NISLANDS_SMC_VOLTAGE_VALUE *voltage) 133457e252bfSMichael Neumann { 133557e252bfSMichael Neumann unsigned int i; 133657e252bfSMichael Neumann 133757e252bfSMichael Neumann for (i = 0; i < table->count; i++) { 133857e252bfSMichael Neumann if (value <= table->entries[i].value) { 133957e252bfSMichael Neumann voltage->index = (u8)i; 134057e252bfSMichael Neumann voltage->value = cpu_to_be16(table->entries[i].value); 134157e252bfSMichael Neumann break; 134257e252bfSMichael Neumann } 134357e252bfSMichael Neumann } 134457e252bfSMichael Neumann 134557e252bfSMichael Neumann if (i >= table->count) 134657e252bfSMichael Neumann return -EINVAL; 134757e252bfSMichael Neumann 134857e252bfSMichael Neumann return 0; 134957e252bfSMichael Neumann } 135057e252bfSMichael Neumann 135157e252bfSMichael Neumann static void ni_populate_mvdd_value(struct radeon_device *rdev, 135257e252bfSMichael Neumann u32 mclk, 135357e252bfSMichael Neumann NISLANDS_SMC_VOLTAGE_VALUE *voltage) 135457e252bfSMichael Neumann { 135557e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 135657e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 135757e252bfSMichael Neumann 135857e252bfSMichael Neumann if (!pi->mvdd_control) { 135957e252bfSMichael Neumann voltage->index = eg_pi->mvdd_high_index; 136057e252bfSMichael Neumann voltage->value = cpu_to_be16(MVDD_HIGH_VALUE); 136157e252bfSMichael Neumann return; 136257e252bfSMichael Neumann } 136357e252bfSMichael Neumann 136457e252bfSMichael Neumann if (mclk <= pi->mvdd_split_frequency) { 136557e252bfSMichael Neumann voltage->index = eg_pi->mvdd_low_index; 136657e252bfSMichael Neumann voltage->value = cpu_to_be16(MVDD_LOW_VALUE); 136757e252bfSMichael Neumann } else { 136857e252bfSMichael Neumann voltage->index = eg_pi->mvdd_high_index; 136957e252bfSMichael Neumann voltage->value = cpu_to_be16(MVDD_HIGH_VALUE); 137057e252bfSMichael Neumann } 137157e252bfSMichael Neumann } 137257e252bfSMichael Neumann 137357e252bfSMichael Neumann static int ni_get_std_voltage_value(struct radeon_device *rdev, 137457e252bfSMichael Neumann NISLANDS_SMC_VOLTAGE_VALUE *voltage, 137557e252bfSMichael Neumann u16 *std_voltage) 137657e252bfSMichael Neumann { 137757e252bfSMichael Neumann if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries && 137857e252bfSMichael Neumann ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count)) 137957e252bfSMichael Neumann *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc; 138057e252bfSMichael Neumann else 138157e252bfSMichael Neumann *std_voltage = be16_to_cpu(voltage->value); 138257e252bfSMichael Neumann 138357e252bfSMichael Neumann return 0; 138457e252bfSMichael Neumann } 138557e252bfSMichael Neumann 138657e252bfSMichael Neumann static void ni_populate_std_voltage_value(struct radeon_device *rdev, 138757e252bfSMichael Neumann u16 value, u8 index, 138857e252bfSMichael Neumann NISLANDS_SMC_VOLTAGE_VALUE *voltage) 138957e252bfSMichael Neumann { 139057e252bfSMichael Neumann voltage->index = index; 139157e252bfSMichael Neumann voltage->value = cpu_to_be16(value); 139257e252bfSMichael Neumann } 139357e252bfSMichael Neumann 139457e252bfSMichael Neumann static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev) 139557e252bfSMichael Neumann { 139657e252bfSMichael Neumann u32 xclk_period; 139757e252bfSMichael Neumann u32 xclk = radeon_get_xclk(rdev); 139857e252bfSMichael Neumann u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK; 139957e252bfSMichael Neumann 140057e252bfSMichael Neumann xclk_period = (1000000000UL / xclk); 140157e252bfSMichael Neumann xclk_period /= 10000UL; 140257e252bfSMichael Neumann 140357e252bfSMichael Neumann return tmp * xclk_period; 140457e252bfSMichael Neumann } 140557e252bfSMichael Neumann 140657e252bfSMichael Neumann static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor) 140757e252bfSMichael Neumann { 140857e252bfSMichael Neumann return (power_in_watts * scaling_factor) << 2; 140957e252bfSMichael Neumann } 141057e252bfSMichael Neumann 141157e252bfSMichael Neumann static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev, 141257e252bfSMichael Neumann struct radeon_ps *radeon_state, 141357e252bfSMichael Neumann u32 near_tdp_limit) 141457e252bfSMichael Neumann { 141557e252bfSMichael Neumann struct ni_ps *state = ni_get_ps(radeon_state); 141657e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 141757e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 141857e252bfSMichael Neumann u32 power_boost_limit = 0; 141957e252bfSMichael Neumann int ret; 142057e252bfSMichael Neumann 142157e252bfSMichael Neumann if (ni_pi->enable_power_containment && 142257e252bfSMichael Neumann ni_pi->use_power_boost_limit) { 142357e252bfSMichael Neumann NISLANDS_SMC_VOLTAGE_VALUE vddc; 142457e252bfSMichael Neumann u16 std_vddc_med; 142557e252bfSMichael Neumann u16 std_vddc_high; 142657e252bfSMichael Neumann u64 tmp, n, d; 142757e252bfSMichael Neumann 142857e252bfSMichael Neumann if (state->performance_level_count < 3) 142957e252bfSMichael Neumann return 0; 143057e252bfSMichael Neumann 143157e252bfSMichael Neumann ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, 143257e252bfSMichael Neumann state->performance_levels[state->performance_level_count - 2].vddc, 143357e252bfSMichael Neumann &vddc); 143457e252bfSMichael Neumann if (ret) 143557e252bfSMichael Neumann return 0; 143657e252bfSMichael Neumann 143757e252bfSMichael Neumann ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med); 143857e252bfSMichael Neumann if (ret) 143957e252bfSMichael Neumann return 0; 144057e252bfSMichael Neumann 144157e252bfSMichael Neumann ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, 144257e252bfSMichael Neumann state->performance_levels[state->performance_level_count - 1].vddc, 144357e252bfSMichael Neumann &vddc); 144457e252bfSMichael Neumann if (ret) 144557e252bfSMichael Neumann return 0; 144657e252bfSMichael Neumann 144757e252bfSMichael Neumann ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high); 144857e252bfSMichael Neumann if (ret) 144957e252bfSMichael Neumann return 0; 145057e252bfSMichael Neumann 145157e252bfSMichael Neumann n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90); 145257e252bfSMichael Neumann d = ((u64)std_vddc_high * (u64)std_vddc_high * 100); 145357e252bfSMichael Neumann tmp = div64_u64(n, d); 145457e252bfSMichael Neumann 145557e252bfSMichael Neumann if (tmp >> 32) 145657e252bfSMichael Neumann return 0; 145757e252bfSMichael Neumann power_boost_limit = (u32)tmp; 145857e252bfSMichael Neumann } 145957e252bfSMichael Neumann 146057e252bfSMichael Neumann return power_boost_limit; 146157e252bfSMichael Neumann } 146257e252bfSMichael Neumann 146357e252bfSMichael Neumann static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev, 146457e252bfSMichael Neumann bool adjust_polarity, 146557e252bfSMichael Neumann u32 tdp_adjustment, 146657e252bfSMichael Neumann u32 *tdp_limit, 146757e252bfSMichael Neumann u32 *near_tdp_limit) 146857e252bfSMichael Neumann { 146957e252bfSMichael Neumann if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit) 147057e252bfSMichael Neumann return -EINVAL; 147157e252bfSMichael Neumann 147257e252bfSMichael Neumann if (adjust_polarity) { 147357e252bfSMichael Neumann *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; 147457e252bfSMichael Neumann *near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit); 147557e252bfSMichael Neumann } else { 147657e252bfSMichael Neumann *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100; 147757e252bfSMichael Neumann *near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit); 147857e252bfSMichael Neumann } 147957e252bfSMichael Neumann 148057e252bfSMichael Neumann return 0; 148157e252bfSMichael Neumann } 148257e252bfSMichael Neumann 148357e252bfSMichael Neumann static int ni_populate_smc_tdp_limits(struct radeon_device *rdev, 148457e252bfSMichael Neumann struct radeon_ps *radeon_state) 148557e252bfSMichael Neumann { 148657e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 148757e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 148857e252bfSMichael Neumann 148957e252bfSMichael Neumann if (ni_pi->enable_power_containment) { 149057e252bfSMichael Neumann NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable; 149157e252bfSMichael Neumann u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev); 149257e252bfSMichael Neumann u32 tdp_limit; 149357e252bfSMichael Neumann u32 near_tdp_limit; 149457e252bfSMichael Neumann u32 power_boost_limit; 149557e252bfSMichael Neumann int ret; 149657e252bfSMichael Neumann 149757e252bfSMichael Neumann if (scaling_factor == 0) 149857e252bfSMichael Neumann return -EINVAL; 149957e252bfSMichael Neumann 150057e252bfSMichael Neumann memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE)); 150157e252bfSMichael Neumann 150257e252bfSMichael Neumann ret = ni_calculate_adjusted_tdp_limits(rdev, 150357e252bfSMichael Neumann false, /* ??? */ 150457e252bfSMichael Neumann rdev->pm.dpm.tdp_adjustment, 150557e252bfSMichael Neumann &tdp_limit, 150657e252bfSMichael Neumann &near_tdp_limit); 150757e252bfSMichael Neumann if (ret) 150857e252bfSMichael Neumann return ret; 150957e252bfSMichael Neumann 151057e252bfSMichael Neumann power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, 151157e252bfSMichael Neumann near_tdp_limit); 151257e252bfSMichael Neumann 151357e252bfSMichael Neumann smc_table->dpm2Params.TDPLimit = 151457e252bfSMichael Neumann cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor)); 151557e252bfSMichael Neumann smc_table->dpm2Params.NearTDPLimit = 151657e252bfSMichael Neumann cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor)); 151757e252bfSMichael Neumann smc_table->dpm2Params.SafePowerLimit = 151857e252bfSMichael Neumann cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100, 151957e252bfSMichael Neumann scaling_factor)); 152057e252bfSMichael Neumann smc_table->dpm2Params.PowerBoostLimit = 152157e252bfSMichael Neumann cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor)); 152257e252bfSMichael Neumann 152357e252bfSMichael Neumann ret = rv770_copy_bytes_to_smc(rdev, 152457e252bfSMichael Neumann (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) + 152557e252bfSMichael Neumann offsetof(PP_NIslands_DPM2Parameters, TDPLimit)), 152657e252bfSMichael Neumann (u8 *)(&smc_table->dpm2Params.TDPLimit), 152757e252bfSMichael Neumann sizeof(u32) * 4, pi->sram_end); 152857e252bfSMichael Neumann if (ret) 152957e252bfSMichael Neumann return ret; 153057e252bfSMichael Neumann } 153157e252bfSMichael Neumann 153257e252bfSMichael Neumann return 0; 153357e252bfSMichael Neumann } 153457e252bfSMichael Neumann 153557e252bfSMichael Neumann int ni_copy_and_switch_arb_sets(struct radeon_device *rdev, 153657e252bfSMichael Neumann u32 arb_freq_src, u32 arb_freq_dest) 153757e252bfSMichael Neumann { 153857e252bfSMichael Neumann u32 mc_arb_dram_timing; 153957e252bfSMichael Neumann u32 mc_arb_dram_timing2; 154057e252bfSMichael Neumann u32 burst_time; 154157e252bfSMichael Neumann u32 mc_cg_config; 154257e252bfSMichael Neumann 154357e252bfSMichael Neumann switch (arb_freq_src) { 154457e252bfSMichael Neumann case MC_CG_ARB_FREQ_F0: 154557e252bfSMichael Neumann mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING); 154657e252bfSMichael Neumann mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2); 154757e252bfSMichael Neumann burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT; 154857e252bfSMichael Neumann break; 154957e252bfSMichael Neumann case MC_CG_ARB_FREQ_F1: 155057e252bfSMichael Neumann mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_1); 155157e252bfSMichael Neumann mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1); 155257e252bfSMichael Neumann burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT; 155357e252bfSMichael Neumann break; 155457e252bfSMichael Neumann case MC_CG_ARB_FREQ_F2: 155557e252bfSMichael Neumann mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_2); 155657e252bfSMichael Neumann mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2); 155757e252bfSMichael Neumann burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT; 155857e252bfSMichael Neumann break; 155957e252bfSMichael Neumann case MC_CG_ARB_FREQ_F3: 156057e252bfSMichael Neumann mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_3); 156157e252bfSMichael Neumann mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3); 156257e252bfSMichael Neumann burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT; 156357e252bfSMichael Neumann break; 156457e252bfSMichael Neumann default: 156557e252bfSMichael Neumann return -EINVAL; 156657e252bfSMichael Neumann } 156757e252bfSMichael Neumann 156857e252bfSMichael Neumann switch (arb_freq_dest) { 156957e252bfSMichael Neumann case MC_CG_ARB_FREQ_F0: 157057e252bfSMichael Neumann WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing); 157157e252bfSMichael Neumann WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2); 157257e252bfSMichael Neumann WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK); 157357e252bfSMichael Neumann break; 157457e252bfSMichael Neumann case MC_CG_ARB_FREQ_F1: 157557e252bfSMichael Neumann WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing); 157657e252bfSMichael Neumann WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2); 157757e252bfSMichael Neumann WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK); 157857e252bfSMichael Neumann break; 157957e252bfSMichael Neumann case MC_CG_ARB_FREQ_F2: 158057e252bfSMichael Neumann WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing); 158157e252bfSMichael Neumann WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2); 158257e252bfSMichael Neumann WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK); 158357e252bfSMichael Neumann break; 158457e252bfSMichael Neumann case MC_CG_ARB_FREQ_F3: 158557e252bfSMichael Neumann WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing); 158657e252bfSMichael Neumann WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2); 158757e252bfSMichael Neumann WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK); 158857e252bfSMichael Neumann break; 158957e252bfSMichael Neumann default: 159057e252bfSMichael Neumann return -EINVAL; 159157e252bfSMichael Neumann } 159257e252bfSMichael Neumann 159357e252bfSMichael Neumann mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F; 159457e252bfSMichael Neumann WREG32(MC_CG_CONFIG, mc_cg_config); 159557e252bfSMichael Neumann WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK); 159657e252bfSMichael Neumann 159757e252bfSMichael Neumann return 0; 159857e252bfSMichael Neumann } 159957e252bfSMichael Neumann 160057e252bfSMichael Neumann static int ni_init_arb_table_index(struct radeon_device *rdev) 160157e252bfSMichael Neumann { 160257e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 160357e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 160457e252bfSMichael Neumann u32 tmp; 160557e252bfSMichael Neumann int ret; 160657e252bfSMichael Neumann 160757e252bfSMichael Neumann ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start, 160857e252bfSMichael Neumann &tmp, pi->sram_end); 160957e252bfSMichael Neumann if (ret) 161057e252bfSMichael Neumann return ret; 161157e252bfSMichael Neumann 161257e252bfSMichael Neumann tmp &= 0x00FFFFFF; 161357e252bfSMichael Neumann tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24; 161457e252bfSMichael Neumann 161557e252bfSMichael Neumann return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start, 161657e252bfSMichael Neumann tmp, pi->sram_end); 161757e252bfSMichael Neumann } 161857e252bfSMichael Neumann 161957e252bfSMichael Neumann static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev) 162057e252bfSMichael Neumann { 162157e252bfSMichael Neumann return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1); 162257e252bfSMichael Neumann } 162357e252bfSMichael Neumann 162457e252bfSMichael Neumann static int ni_force_switch_to_arb_f0(struct radeon_device *rdev) 162557e252bfSMichael Neumann { 162657e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 162757e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 162857e252bfSMichael Neumann u32 tmp; 162957e252bfSMichael Neumann int ret; 163057e252bfSMichael Neumann 163157e252bfSMichael Neumann ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start, 163257e252bfSMichael Neumann &tmp, pi->sram_end); 163357e252bfSMichael Neumann if (ret) 163457e252bfSMichael Neumann return ret; 163557e252bfSMichael Neumann 163657e252bfSMichael Neumann tmp = (tmp >> 24) & 0xff; 163757e252bfSMichael Neumann 163857e252bfSMichael Neumann if (tmp == MC_CG_ARB_FREQ_F0) 163957e252bfSMichael Neumann return 0; 164057e252bfSMichael Neumann 164157e252bfSMichael Neumann return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0); 164257e252bfSMichael Neumann } 164357e252bfSMichael Neumann 164457e252bfSMichael Neumann static int ni_populate_memory_timing_parameters(struct radeon_device *rdev, 164557e252bfSMichael Neumann struct rv7xx_pl *pl, 164657e252bfSMichael Neumann SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs) 164757e252bfSMichael Neumann { 164857e252bfSMichael Neumann u32 dram_timing; 164957e252bfSMichael Neumann u32 dram_timing2; 165057e252bfSMichael Neumann 165157e252bfSMichael Neumann arb_regs->mc_arb_rfsh_rate = 165257e252bfSMichael Neumann (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk); 165357e252bfSMichael Neumann 165457e252bfSMichael Neumann 165557e252bfSMichael Neumann radeon_atom_set_engine_dram_timings(rdev, 165657e252bfSMichael Neumann pl->sclk, 165757e252bfSMichael Neumann pl->mclk); 165857e252bfSMichael Neumann 165957e252bfSMichael Neumann dram_timing = RREG32(MC_ARB_DRAM_TIMING); 166057e252bfSMichael Neumann dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2); 166157e252bfSMichael Neumann 166257e252bfSMichael Neumann arb_regs->mc_arb_dram_timing = cpu_to_be32(dram_timing); 166357e252bfSMichael Neumann arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2); 166457e252bfSMichael Neumann 166557e252bfSMichael Neumann return 0; 166657e252bfSMichael Neumann } 166757e252bfSMichael Neumann 166857e252bfSMichael Neumann static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev, 166957e252bfSMichael Neumann struct radeon_ps *radeon_state, 167057e252bfSMichael Neumann unsigned int first_arb_set) 167157e252bfSMichael Neumann { 167257e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 167357e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 167457e252bfSMichael Neumann struct ni_ps *state = ni_get_ps(radeon_state); 167557e252bfSMichael Neumann SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 }; 167657e252bfSMichael Neumann int i, ret = 0; 167757e252bfSMichael Neumann 167857e252bfSMichael Neumann for (i = 0; i < state->performance_level_count; i++) { 167957e252bfSMichael Neumann ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs); 168057e252bfSMichael Neumann if (ret) 168157e252bfSMichael Neumann break; 168257e252bfSMichael Neumann 168357e252bfSMichael Neumann ret = rv770_copy_bytes_to_smc(rdev, 168457e252bfSMichael Neumann (u16)(ni_pi->arb_table_start + 168557e252bfSMichael Neumann offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) + 168657e252bfSMichael Neumann sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)), 168757e252bfSMichael Neumann (u8 *)&arb_regs, 168857e252bfSMichael Neumann (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet), 168957e252bfSMichael Neumann pi->sram_end); 169057e252bfSMichael Neumann if (ret) 169157e252bfSMichael Neumann break; 169257e252bfSMichael Neumann } 169357e252bfSMichael Neumann return ret; 169457e252bfSMichael Neumann } 169557e252bfSMichael Neumann 169657e252bfSMichael Neumann static int ni_program_memory_timing_parameters(struct radeon_device *rdev, 169757e252bfSMichael Neumann struct radeon_ps *radeon_new_state) 169857e252bfSMichael Neumann { 169957e252bfSMichael Neumann return ni_do_program_memory_timing_parameters(rdev, radeon_new_state, 170057e252bfSMichael Neumann NISLANDS_DRIVER_STATE_ARB_INDEX); 170157e252bfSMichael Neumann } 170257e252bfSMichael Neumann 170357e252bfSMichael Neumann static void ni_populate_initial_mvdd_value(struct radeon_device *rdev, 170457e252bfSMichael Neumann struct NISLANDS_SMC_VOLTAGE_VALUE *voltage) 170557e252bfSMichael Neumann { 170657e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 170757e252bfSMichael Neumann 170857e252bfSMichael Neumann voltage->index = eg_pi->mvdd_high_index; 170957e252bfSMichael Neumann voltage->value = cpu_to_be16(MVDD_HIGH_VALUE); 171057e252bfSMichael Neumann } 171157e252bfSMichael Neumann 171257e252bfSMichael Neumann static int ni_populate_smc_initial_state(struct radeon_device *rdev, 171357e252bfSMichael Neumann struct radeon_ps *radeon_initial_state, 171457e252bfSMichael Neumann NISLANDS_SMC_STATETABLE *table) 171557e252bfSMichael Neumann { 171657e252bfSMichael Neumann struct ni_ps *initial_state = ni_get_ps(radeon_initial_state); 171757e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 171857e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 171957e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 172057e252bfSMichael Neumann u32 reg; 172157e252bfSMichael Neumann int ret; 172257e252bfSMichael Neumann 172357e252bfSMichael Neumann table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = 172457e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl); 172557e252bfSMichael Neumann table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = 172657e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2); 172757e252bfSMichael Neumann table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = 172857e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl); 172957e252bfSMichael Neumann table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = 173057e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2); 173157e252bfSMichael Neumann table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL = 173257e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl); 173357e252bfSMichael Neumann table->initialState.levels[0].mclk.vDLL_CNTL = 173457e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.dll_cntl); 173557e252bfSMichael Neumann table->initialState.levels[0].mclk.vMPLL_SS = 173657e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.mpll_ss1); 173757e252bfSMichael Neumann table->initialState.levels[0].mclk.vMPLL_SS2 = 173857e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.mpll_ss2); 173957e252bfSMichael Neumann table->initialState.levels[0].mclk.mclk_value = 174057e252bfSMichael Neumann cpu_to_be32(initial_state->performance_levels[0].mclk); 174157e252bfSMichael Neumann 174257e252bfSMichael Neumann table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = 174357e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl); 174457e252bfSMichael Neumann table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = 174557e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2); 174657e252bfSMichael Neumann table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = 174757e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3); 174857e252bfSMichael Neumann table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = 174957e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4); 175057e252bfSMichael Neumann table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM = 175157e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum); 175257e252bfSMichael Neumann table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 = 175357e252bfSMichael Neumann cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2); 175457e252bfSMichael Neumann table->initialState.levels[0].sclk.sclk_value = 175557e252bfSMichael Neumann cpu_to_be32(initial_state->performance_levels[0].sclk); 175657e252bfSMichael Neumann table->initialState.levels[0].arbRefreshState = 175757e252bfSMichael Neumann NISLANDS_INITIAL_STATE_ARB_INDEX; 175857e252bfSMichael Neumann 175957e252bfSMichael Neumann table->initialState.levels[0].ACIndex = 0; 176057e252bfSMichael Neumann 176157e252bfSMichael Neumann ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, 176257e252bfSMichael Neumann initial_state->performance_levels[0].vddc, 176357e252bfSMichael Neumann &table->initialState.levels[0].vddc); 176457e252bfSMichael Neumann if (!ret) { 176557e252bfSMichael Neumann u16 std_vddc; 176657e252bfSMichael Neumann 176757e252bfSMichael Neumann ret = ni_get_std_voltage_value(rdev, 176857e252bfSMichael Neumann &table->initialState.levels[0].vddc, 176957e252bfSMichael Neumann &std_vddc); 177057e252bfSMichael Neumann if (!ret) 177157e252bfSMichael Neumann ni_populate_std_voltage_value(rdev, std_vddc, 177257e252bfSMichael Neumann table->initialState.levels[0].vddc.index, 177357e252bfSMichael Neumann &table->initialState.levels[0].std_vddc); 177457e252bfSMichael Neumann } 177557e252bfSMichael Neumann 177657e252bfSMichael Neumann if (eg_pi->vddci_control) 177757e252bfSMichael Neumann ni_populate_voltage_value(rdev, 177857e252bfSMichael Neumann &eg_pi->vddci_voltage_table, 177957e252bfSMichael Neumann initial_state->performance_levels[0].vddci, 178057e252bfSMichael Neumann &table->initialState.levels[0].vddci); 178157e252bfSMichael Neumann 178257e252bfSMichael Neumann ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd); 178357e252bfSMichael Neumann 178457e252bfSMichael Neumann reg = CG_R(0xffff) | CG_L(0); 178557e252bfSMichael Neumann table->initialState.levels[0].aT = cpu_to_be32(reg); 178657e252bfSMichael Neumann 178757e252bfSMichael Neumann table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp); 178857e252bfSMichael Neumann 178957e252bfSMichael Neumann if (pi->boot_in_gen2) 179057e252bfSMichael Neumann table->initialState.levels[0].gen2PCIE = 1; 179157e252bfSMichael Neumann else 179257e252bfSMichael Neumann table->initialState.levels[0].gen2PCIE = 0; 179357e252bfSMichael Neumann 179457e252bfSMichael Neumann if (pi->mem_gddr5) { 179557e252bfSMichael Neumann table->initialState.levels[0].strobeMode = 179657e252bfSMichael Neumann cypress_get_strobe_mode_settings(rdev, 179757e252bfSMichael Neumann initial_state->performance_levels[0].mclk); 179857e252bfSMichael Neumann 179957e252bfSMichael Neumann if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold) 180057e252bfSMichael Neumann table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG; 180157e252bfSMichael Neumann else 180257e252bfSMichael Neumann table->initialState.levels[0].mcFlags = 0; 180357e252bfSMichael Neumann } 180457e252bfSMichael Neumann 180557e252bfSMichael Neumann table->initialState.levelCount = 1; 180657e252bfSMichael Neumann 180757e252bfSMichael Neumann table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC; 180857e252bfSMichael Neumann 180957e252bfSMichael Neumann table->initialState.levels[0].dpm2.MaxPS = 0; 181057e252bfSMichael Neumann table->initialState.levels[0].dpm2.NearTDPDec = 0; 181157e252bfSMichael Neumann table->initialState.levels[0].dpm2.AboveSafeInc = 0; 181257e252bfSMichael Neumann table->initialState.levels[0].dpm2.BelowSafeInc = 0; 181357e252bfSMichael Neumann 181457e252bfSMichael Neumann reg = MIN_POWER_MASK | MAX_POWER_MASK; 181557e252bfSMichael Neumann table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg); 181657e252bfSMichael Neumann 181757e252bfSMichael Neumann reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK; 181857e252bfSMichael Neumann table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg); 181957e252bfSMichael Neumann 182057e252bfSMichael Neumann return 0; 182157e252bfSMichael Neumann } 182257e252bfSMichael Neumann 182357e252bfSMichael Neumann static int ni_populate_smc_acpi_state(struct radeon_device *rdev, 182457e252bfSMichael Neumann NISLANDS_SMC_STATETABLE *table) 182557e252bfSMichael Neumann { 182657e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 182757e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 182857e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 182957e252bfSMichael Neumann u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl; 183057e252bfSMichael Neumann u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2; 183157e252bfSMichael Neumann u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl; 183257e252bfSMichael Neumann u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2; 183357e252bfSMichael Neumann u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl; 183457e252bfSMichael Neumann u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2; 183557e252bfSMichael Neumann u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3; 183657e252bfSMichael Neumann u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4; 183757e252bfSMichael Neumann u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl; 183857e252bfSMichael Neumann u32 dll_cntl = ni_pi->clock_registers.dll_cntl; 183957e252bfSMichael Neumann u32 reg; 184057e252bfSMichael Neumann int ret; 184157e252bfSMichael Neumann 184257e252bfSMichael Neumann table->ACPIState = table->initialState; 184357e252bfSMichael Neumann 184457e252bfSMichael Neumann table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC; 184557e252bfSMichael Neumann 184657e252bfSMichael Neumann if (pi->acpi_vddc) { 184757e252bfSMichael Neumann ret = ni_populate_voltage_value(rdev, 184857e252bfSMichael Neumann &eg_pi->vddc_voltage_table, 184957e252bfSMichael Neumann pi->acpi_vddc, &table->ACPIState.levels[0].vddc); 185057e252bfSMichael Neumann if (!ret) { 185157e252bfSMichael Neumann u16 std_vddc; 185257e252bfSMichael Neumann 185357e252bfSMichael Neumann ret = ni_get_std_voltage_value(rdev, 185457e252bfSMichael Neumann &table->ACPIState.levels[0].vddc, &std_vddc); 185557e252bfSMichael Neumann if (!ret) 185657e252bfSMichael Neumann ni_populate_std_voltage_value(rdev, std_vddc, 185757e252bfSMichael Neumann table->ACPIState.levels[0].vddc.index, 185857e252bfSMichael Neumann &table->ACPIState.levels[0].std_vddc); 185957e252bfSMichael Neumann } 186057e252bfSMichael Neumann 186157e252bfSMichael Neumann if (pi->pcie_gen2) { 186257e252bfSMichael Neumann if (pi->acpi_pcie_gen2) 186357e252bfSMichael Neumann table->ACPIState.levels[0].gen2PCIE = 1; 186457e252bfSMichael Neumann else 186557e252bfSMichael Neumann table->ACPIState.levels[0].gen2PCIE = 0; 186657e252bfSMichael Neumann } else { 186757e252bfSMichael Neumann table->ACPIState.levels[0].gen2PCIE = 0; 186857e252bfSMichael Neumann } 186957e252bfSMichael Neumann } else { 187057e252bfSMichael Neumann ret = ni_populate_voltage_value(rdev, 187157e252bfSMichael Neumann &eg_pi->vddc_voltage_table, 187257e252bfSMichael Neumann pi->min_vddc_in_table, 187357e252bfSMichael Neumann &table->ACPIState.levels[0].vddc); 187457e252bfSMichael Neumann if (!ret) { 187557e252bfSMichael Neumann u16 std_vddc; 187657e252bfSMichael Neumann 187757e252bfSMichael Neumann ret = ni_get_std_voltage_value(rdev, 187857e252bfSMichael Neumann &table->ACPIState.levels[0].vddc, 187957e252bfSMichael Neumann &std_vddc); 188057e252bfSMichael Neumann if (!ret) 188157e252bfSMichael Neumann ni_populate_std_voltage_value(rdev, std_vddc, 188257e252bfSMichael Neumann table->ACPIState.levels[0].vddc.index, 188357e252bfSMichael Neumann &table->ACPIState.levels[0].std_vddc); 188457e252bfSMichael Neumann } 188557e252bfSMichael Neumann table->ACPIState.levels[0].gen2PCIE = 0; 188657e252bfSMichael Neumann } 188757e252bfSMichael Neumann 188857e252bfSMichael Neumann if (eg_pi->acpi_vddci) { 188957e252bfSMichael Neumann if (eg_pi->vddci_control) 189057e252bfSMichael Neumann ni_populate_voltage_value(rdev, 189157e252bfSMichael Neumann &eg_pi->vddci_voltage_table, 189257e252bfSMichael Neumann eg_pi->acpi_vddci, 189357e252bfSMichael Neumann &table->ACPIState.levels[0].vddci); 189457e252bfSMichael Neumann } 189557e252bfSMichael Neumann 189657e252bfSMichael Neumann 189757e252bfSMichael Neumann mpll_ad_func_cntl &= ~PDNB; 189857e252bfSMichael Neumann 189957e252bfSMichael Neumann mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN; 190057e252bfSMichael Neumann 190157e252bfSMichael Neumann if (pi->mem_gddr5) 190257e252bfSMichael Neumann mpll_dq_func_cntl &= ~PDNB; 190357e252bfSMichael Neumann mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS; 190457e252bfSMichael Neumann 190557e252bfSMichael Neumann 190657e252bfSMichael Neumann mclk_pwrmgt_cntl |= (MRDCKA0_RESET | 190757e252bfSMichael Neumann MRDCKA1_RESET | 190857e252bfSMichael Neumann MRDCKB0_RESET | 190957e252bfSMichael Neumann MRDCKB1_RESET | 191057e252bfSMichael Neumann MRDCKC0_RESET | 191157e252bfSMichael Neumann MRDCKC1_RESET | 191257e252bfSMichael Neumann MRDCKD0_RESET | 191357e252bfSMichael Neumann MRDCKD1_RESET); 191457e252bfSMichael Neumann 191557e252bfSMichael Neumann mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB | 191657e252bfSMichael Neumann MRDCKA1_PDNB | 191757e252bfSMichael Neumann MRDCKB0_PDNB | 191857e252bfSMichael Neumann MRDCKB1_PDNB | 191957e252bfSMichael Neumann MRDCKC0_PDNB | 192057e252bfSMichael Neumann MRDCKC1_PDNB | 192157e252bfSMichael Neumann MRDCKD0_PDNB | 192257e252bfSMichael Neumann MRDCKD1_PDNB); 192357e252bfSMichael Neumann 192457e252bfSMichael Neumann dll_cntl |= (MRDCKA0_BYPASS | 192557e252bfSMichael Neumann MRDCKA1_BYPASS | 192657e252bfSMichael Neumann MRDCKB0_BYPASS | 192757e252bfSMichael Neumann MRDCKB1_BYPASS | 192857e252bfSMichael Neumann MRDCKC0_BYPASS | 192957e252bfSMichael Neumann MRDCKC1_BYPASS | 193057e252bfSMichael Neumann MRDCKD0_BYPASS | 193157e252bfSMichael Neumann MRDCKD1_BYPASS); 193257e252bfSMichael Neumann 193357e252bfSMichael Neumann spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK; 193457e252bfSMichael Neumann spll_func_cntl_2 |= SCLK_MUX_SEL(4); 193557e252bfSMichael Neumann 193657e252bfSMichael Neumann table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl); 193757e252bfSMichael Neumann table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2); 193857e252bfSMichael Neumann table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl); 193957e252bfSMichael Neumann table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2); 194057e252bfSMichael Neumann table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl); 194157e252bfSMichael Neumann table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl); 194257e252bfSMichael Neumann 194357e252bfSMichael Neumann table->ACPIState.levels[0].mclk.mclk_value = 0; 194457e252bfSMichael Neumann 194557e252bfSMichael Neumann table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl); 194657e252bfSMichael Neumann table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2); 194757e252bfSMichael Neumann table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3); 194857e252bfSMichael Neumann table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4); 194957e252bfSMichael Neumann 195057e252bfSMichael Neumann table->ACPIState.levels[0].sclk.sclk_value = 0; 195157e252bfSMichael Neumann 195257e252bfSMichael Neumann ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd); 195357e252bfSMichael Neumann 195457e252bfSMichael Neumann if (eg_pi->dynamic_ac_timing) 195557e252bfSMichael Neumann table->ACPIState.levels[0].ACIndex = 1; 195657e252bfSMichael Neumann 195757e252bfSMichael Neumann table->ACPIState.levels[0].dpm2.MaxPS = 0; 195857e252bfSMichael Neumann table->ACPIState.levels[0].dpm2.NearTDPDec = 0; 195957e252bfSMichael Neumann table->ACPIState.levels[0].dpm2.AboveSafeInc = 0; 196057e252bfSMichael Neumann table->ACPIState.levels[0].dpm2.BelowSafeInc = 0; 196157e252bfSMichael Neumann 196257e252bfSMichael Neumann reg = MIN_POWER_MASK | MAX_POWER_MASK; 196357e252bfSMichael Neumann table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg); 196457e252bfSMichael Neumann 196557e252bfSMichael Neumann reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK; 196657e252bfSMichael Neumann table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg); 196757e252bfSMichael Neumann 196857e252bfSMichael Neumann return 0; 196957e252bfSMichael Neumann } 197057e252bfSMichael Neumann 197157e252bfSMichael Neumann static int ni_init_smc_table(struct radeon_device *rdev) 197257e252bfSMichael Neumann { 197357e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 197457e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 197557e252bfSMichael Neumann int ret; 197657e252bfSMichael Neumann struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps; 197757e252bfSMichael Neumann NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable; 197857e252bfSMichael Neumann 197957e252bfSMichael Neumann memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE)); 198057e252bfSMichael Neumann 198157e252bfSMichael Neumann ni_populate_smc_voltage_tables(rdev, table); 198257e252bfSMichael Neumann 198357e252bfSMichael Neumann switch (rdev->pm.int_thermal_type) { 198457e252bfSMichael Neumann case THERMAL_TYPE_NI: 198557e252bfSMichael Neumann case THERMAL_TYPE_EMC2103_WITH_INTERNAL: 198657e252bfSMichael Neumann table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL; 198757e252bfSMichael Neumann break; 198857e252bfSMichael Neumann case THERMAL_TYPE_NONE: 198957e252bfSMichael Neumann table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE; 199057e252bfSMichael Neumann break; 199157e252bfSMichael Neumann default: 199257e252bfSMichael Neumann table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL; 199357e252bfSMichael Neumann break; 199457e252bfSMichael Neumann } 199557e252bfSMichael Neumann 199657e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) 199757e252bfSMichael Neumann table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC; 199857e252bfSMichael Neumann 199957e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT) 200057e252bfSMichael Neumann table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT; 200157e252bfSMichael Neumann 200257e252bfSMichael Neumann if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) 200357e252bfSMichael Neumann table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC; 200457e252bfSMichael Neumann 200557e252bfSMichael Neumann if (pi->mem_gddr5) 200657e252bfSMichael Neumann table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5; 200757e252bfSMichael Neumann 200857e252bfSMichael Neumann ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table); 200957e252bfSMichael Neumann if (ret) 201057e252bfSMichael Neumann return ret; 201157e252bfSMichael Neumann 201257e252bfSMichael Neumann ret = ni_populate_smc_acpi_state(rdev, table); 201357e252bfSMichael Neumann if (ret) 201457e252bfSMichael Neumann return ret; 201557e252bfSMichael Neumann 201657e252bfSMichael Neumann table->driverState = table->initialState; 201757e252bfSMichael Neumann 201857e252bfSMichael Neumann table->ULVState = table->initialState; 201957e252bfSMichael Neumann 202057e252bfSMichael Neumann ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state, 202157e252bfSMichael Neumann NISLANDS_INITIAL_STATE_ARB_INDEX); 202257e252bfSMichael Neumann if (ret) 202357e252bfSMichael Neumann return ret; 202457e252bfSMichael Neumann 202557e252bfSMichael Neumann return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table, 202657e252bfSMichael Neumann sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end); 202757e252bfSMichael Neumann } 202857e252bfSMichael Neumann 202957e252bfSMichael Neumann static int ni_calculate_sclk_params(struct radeon_device *rdev, 203057e252bfSMichael Neumann u32 engine_clock, 203157e252bfSMichael Neumann NISLANDS_SMC_SCLK_VALUE *sclk) 203257e252bfSMichael Neumann { 203357e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 203457e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 203557e252bfSMichael Neumann struct atom_clock_dividers dividers; 203657e252bfSMichael Neumann u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl; 203757e252bfSMichael Neumann u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2; 203857e252bfSMichael Neumann u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3; 203957e252bfSMichael Neumann u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4; 204057e252bfSMichael Neumann u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum; 204157e252bfSMichael Neumann u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2; 204257e252bfSMichael Neumann u64 tmp; 204357e252bfSMichael Neumann u32 reference_clock = rdev->clock.spll.reference_freq; 204457e252bfSMichael Neumann u32 reference_divider; 204557e252bfSMichael Neumann u32 fbdiv; 204657e252bfSMichael Neumann int ret; 204757e252bfSMichael Neumann 204857e252bfSMichael Neumann ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, 204957e252bfSMichael Neumann engine_clock, false, ÷rs); 205057e252bfSMichael Neumann if (ret) 205157e252bfSMichael Neumann return ret; 205257e252bfSMichael Neumann 205357e252bfSMichael Neumann reference_divider = 1 + dividers.ref_div; 205457e252bfSMichael Neumann 205557e252bfSMichael Neumann 205657e252bfSMichael Neumann tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834; 205757e252bfSMichael Neumann do_div(tmp, reference_clock); 205857e252bfSMichael Neumann fbdiv = (u32) tmp; 205957e252bfSMichael Neumann 206057e252bfSMichael Neumann spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK); 206157e252bfSMichael Neumann spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div); 206257e252bfSMichael Neumann spll_func_cntl |= SPLL_PDIV_A(dividers.post_div); 206357e252bfSMichael Neumann 206457e252bfSMichael Neumann spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK; 206557e252bfSMichael Neumann spll_func_cntl_2 |= SCLK_MUX_SEL(2); 206657e252bfSMichael Neumann 206757e252bfSMichael Neumann spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK; 206857e252bfSMichael Neumann spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv); 206957e252bfSMichael Neumann spll_func_cntl_3 |= SPLL_DITHEN; 207057e252bfSMichael Neumann 207157e252bfSMichael Neumann if (pi->sclk_ss) { 207257e252bfSMichael Neumann struct radeon_atom_ss ss; 207357e252bfSMichael Neumann u32 vco_freq = engine_clock * dividers.post_div; 207457e252bfSMichael Neumann 207557e252bfSMichael Neumann if (radeon_atombios_get_asic_ss_info(rdev, &ss, 207657e252bfSMichael Neumann ASIC_INTERNAL_ENGINE_SS, vco_freq)) { 207757e252bfSMichael Neumann u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate); 207857e252bfSMichael Neumann u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000); 207957e252bfSMichael Neumann 208057e252bfSMichael Neumann cg_spll_spread_spectrum &= ~CLK_S_MASK; 208157e252bfSMichael Neumann cg_spll_spread_spectrum |= CLK_S(clk_s); 208257e252bfSMichael Neumann cg_spll_spread_spectrum |= SSEN; 208357e252bfSMichael Neumann 208457e252bfSMichael Neumann cg_spll_spread_spectrum_2 &= ~CLK_V_MASK; 208557e252bfSMichael Neumann cg_spll_spread_spectrum_2 |= CLK_V(clk_v); 208657e252bfSMichael Neumann } 208757e252bfSMichael Neumann } 208857e252bfSMichael Neumann 208957e252bfSMichael Neumann sclk->sclk_value = engine_clock; 209057e252bfSMichael Neumann sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl; 209157e252bfSMichael Neumann sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2; 209257e252bfSMichael Neumann sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3; 209357e252bfSMichael Neumann sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4; 209457e252bfSMichael Neumann sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum; 209557e252bfSMichael Neumann sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2; 209657e252bfSMichael Neumann 209757e252bfSMichael Neumann return 0; 209857e252bfSMichael Neumann } 209957e252bfSMichael Neumann 210057e252bfSMichael Neumann static int ni_populate_sclk_value(struct radeon_device *rdev, 210157e252bfSMichael Neumann u32 engine_clock, 210257e252bfSMichael Neumann NISLANDS_SMC_SCLK_VALUE *sclk) 210357e252bfSMichael Neumann { 210457e252bfSMichael Neumann NISLANDS_SMC_SCLK_VALUE sclk_tmp; 210557e252bfSMichael Neumann int ret; 210657e252bfSMichael Neumann 210757e252bfSMichael Neumann ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp); 210857e252bfSMichael Neumann if (!ret) { 210957e252bfSMichael Neumann sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value); 211057e252bfSMichael Neumann sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL); 211157e252bfSMichael Neumann sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2); 211257e252bfSMichael Neumann sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3); 211357e252bfSMichael Neumann sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4); 211457e252bfSMichael Neumann sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM); 211557e252bfSMichael Neumann sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2); 211657e252bfSMichael Neumann } 211757e252bfSMichael Neumann 211857e252bfSMichael Neumann return ret; 211957e252bfSMichael Neumann } 212057e252bfSMichael Neumann 212157e252bfSMichael Neumann static int ni_init_smc_spll_table(struct radeon_device *rdev) 212257e252bfSMichael Neumann { 212357e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 212457e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 212557e252bfSMichael Neumann SMC_NISLANDS_SPLL_DIV_TABLE *spll_table; 212657e252bfSMichael Neumann NISLANDS_SMC_SCLK_VALUE sclk_params; 212757e252bfSMichael Neumann u32 fb_div; 212857e252bfSMichael Neumann u32 p_div; 212957e252bfSMichael Neumann u32 clk_s; 213057e252bfSMichael Neumann u32 clk_v; 213157e252bfSMichael Neumann u32 sclk = 0; 213257e252bfSMichael Neumann int i, ret; 213357e252bfSMichael Neumann u32 tmp; 213457e252bfSMichael Neumann 213557e252bfSMichael Neumann if (ni_pi->spll_table_start == 0) 213657e252bfSMichael Neumann return -EINVAL; 213757e252bfSMichael Neumann 213857e252bfSMichael Neumann spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL); 213957e252bfSMichael Neumann if (spll_table == NULL) 214057e252bfSMichael Neumann return -ENOMEM; 214157e252bfSMichael Neumann 214257e252bfSMichael Neumann for (i = 0; i < 256; i++) { 214357e252bfSMichael Neumann ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params); 214457e252bfSMichael Neumann if (ret) 214557e252bfSMichael Neumann break; 214657e252bfSMichael Neumann 214757e252bfSMichael Neumann p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT; 214857e252bfSMichael Neumann fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT; 214957e252bfSMichael Neumann clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT; 215057e252bfSMichael Neumann clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT; 215157e252bfSMichael Neumann 215257e252bfSMichael Neumann fb_div &= ~0x00001FFF; 215357e252bfSMichael Neumann fb_div >>= 1; 215457e252bfSMichael Neumann clk_v >>= 6; 215557e252bfSMichael Neumann 215657e252bfSMichael Neumann if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT)) 215757e252bfSMichael Neumann ret = -EINVAL; 215857e252bfSMichael Neumann 215957e252bfSMichael Neumann if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT)) 216057e252bfSMichael Neumann ret = -EINVAL; 216157e252bfSMichael Neumann 216257e252bfSMichael Neumann if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT)) 216357e252bfSMichael Neumann ret = -EINVAL; 216457e252bfSMichael Neumann 216557e252bfSMichael Neumann if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT)) 216657e252bfSMichael Neumann ret = -EINVAL; 216757e252bfSMichael Neumann 216857e252bfSMichael Neumann if (ret) 216957e252bfSMichael Neumann break; 217057e252bfSMichael Neumann 217157e252bfSMichael Neumann tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) | 217257e252bfSMichael Neumann ((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK); 217357e252bfSMichael Neumann spll_table->freq[i] = cpu_to_be32(tmp); 217457e252bfSMichael Neumann 217557e252bfSMichael Neumann tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) | 217657e252bfSMichael Neumann ((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK); 217757e252bfSMichael Neumann spll_table->ss[i] = cpu_to_be32(tmp); 217857e252bfSMichael Neumann 217957e252bfSMichael Neumann sclk += 512; 218057e252bfSMichael Neumann } 218157e252bfSMichael Neumann 218257e252bfSMichael Neumann if (!ret) 218357e252bfSMichael Neumann ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table, 218457e252bfSMichael Neumann sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end); 218557e252bfSMichael Neumann 218657e252bfSMichael Neumann kfree(spll_table); 218757e252bfSMichael Neumann 218857e252bfSMichael Neumann return ret; 218957e252bfSMichael Neumann } 219057e252bfSMichael Neumann 219157e252bfSMichael Neumann static int ni_populate_mclk_value(struct radeon_device *rdev, 219257e252bfSMichael Neumann u32 engine_clock, 219357e252bfSMichael Neumann u32 memory_clock, 219457e252bfSMichael Neumann NISLANDS_SMC_MCLK_VALUE *mclk, 219557e252bfSMichael Neumann bool strobe_mode, 219657e252bfSMichael Neumann bool dll_state_on) 219757e252bfSMichael Neumann { 219857e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 219957e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 220057e252bfSMichael Neumann u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl; 220157e252bfSMichael Neumann u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2; 220257e252bfSMichael Neumann u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl; 220357e252bfSMichael Neumann u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2; 220457e252bfSMichael Neumann u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl; 220557e252bfSMichael Neumann u32 dll_cntl = ni_pi->clock_registers.dll_cntl; 220657e252bfSMichael Neumann u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1; 220757e252bfSMichael Neumann u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2; 220857e252bfSMichael Neumann struct atom_clock_dividers dividers; 220957e252bfSMichael Neumann u32 ibias; 221057e252bfSMichael Neumann u32 dll_speed; 221157e252bfSMichael Neumann int ret; 221257e252bfSMichael Neumann u32 mc_seq_misc7; 221357e252bfSMichael Neumann 221457e252bfSMichael Neumann ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, 221557e252bfSMichael Neumann memory_clock, strobe_mode, ÷rs); 221657e252bfSMichael Neumann if (ret) 221757e252bfSMichael Neumann return ret; 221857e252bfSMichael Neumann 221957e252bfSMichael Neumann if (!strobe_mode) { 222057e252bfSMichael Neumann mc_seq_misc7 = RREG32(MC_SEQ_MISC7); 222157e252bfSMichael Neumann 222257e252bfSMichael Neumann if (mc_seq_misc7 & 0x8000000) 222357e252bfSMichael Neumann dividers.post_div = 1; 222457e252bfSMichael Neumann } 222557e252bfSMichael Neumann 222657e252bfSMichael Neumann ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div); 222757e252bfSMichael Neumann 222857e252bfSMichael Neumann mpll_ad_func_cntl &= ~(CLKR_MASK | 222957e252bfSMichael Neumann YCLK_POST_DIV_MASK | 223057e252bfSMichael Neumann CLKF_MASK | 223157e252bfSMichael Neumann CLKFRAC_MASK | 223257e252bfSMichael Neumann IBIAS_MASK); 223357e252bfSMichael Neumann mpll_ad_func_cntl |= CLKR(dividers.ref_div); 223457e252bfSMichael Neumann mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div); 223557e252bfSMichael Neumann mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div); 223657e252bfSMichael Neumann mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div); 223757e252bfSMichael Neumann mpll_ad_func_cntl |= IBIAS(ibias); 223857e252bfSMichael Neumann 223957e252bfSMichael Neumann if (dividers.vco_mode) 224057e252bfSMichael Neumann mpll_ad_func_cntl_2 |= VCO_MODE; 224157e252bfSMichael Neumann else 224257e252bfSMichael Neumann mpll_ad_func_cntl_2 &= ~VCO_MODE; 224357e252bfSMichael Neumann 224457e252bfSMichael Neumann if (pi->mem_gddr5) { 224557e252bfSMichael Neumann mpll_dq_func_cntl &= ~(CLKR_MASK | 224657e252bfSMichael Neumann YCLK_POST_DIV_MASK | 224757e252bfSMichael Neumann CLKF_MASK | 224857e252bfSMichael Neumann CLKFRAC_MASK | 224957e252bfSMichael Neumann IBIAS_MASK); 225057e252bfSMichael Neumann mpll_dq_func_cntl |= CLKR(dividers.ref_div); 225157e252bfSMichael Neumann mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div); 225257e252bfSMichael Neumann mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div); 225357e252bfSMichael Neumann mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div); 225457e252bfSMichael Neumann mpll_dq_func_cntl |= IBIAS(ibias); 225557e252bfSMichael Neumann 225657e252bfSMichael Neumann if (strobe_mode) 225757e252bfSMichael Neumann mpll_dq_func_cntl &= ~PDNB; 225857e252bfSMichael Neumann else 225957e252bfSMichael Neumann mpll_dq_func_cntl |= PDNB; 226057e252bfSMichael Neumann 226157e252bfSMichael Neumann if (dividers.vco_mode) 226257e252bfSMichael Neumann mpll_dq_func_cntl_2 |= VCO_MODE; 226357e252bfSMichael Neumann else 226457e252bfSMichael Neumann mpll_dq_func_cntl_2 &= ~VCO_MODE; 226557e252bfSMichael Neumann } 226657e252bfSMichael Neumann 226757e252bfSMichael Neumann if (pi->mclk_ss) { 226857e252bfSMichael Neumann struct radeon_atom_ss ss; 226957e252bfSMichael Neumann u32 vco_freq = memory_clock * dividers.post_div; 227057e252bfSMichael Neumann 227157e252bfSMichael Neumann if (radeon_atombios_get_asic_ss_info(rdev, &ss, 227257e252bfSMichael Neumann ASIC_INTERNAL_MEMORY_SS, vco_freq)) { 227357e252bfSMichael Neumann u32 reference_clock = rdev->clock.mpll.reference_freq; 227457e252bfSMichael Neumann u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div); 227557e252bfSMichael Neumann u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate); 227657e252bfSMichael Neumann u32 clk_v = ss.percentage * 227757e252bfSMichael Neumann (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625); 227857e252bfSMichael Neumann 227957e252bfSMichael Neumann mpll_ss1 &= ~CLKV_MASK; 228057e252bfSMichael Neumann mpll_ss1 |= CLKV(clk_v); 228157e252bfSMichael Neumann 228257e252bfSMichael Neumann mpll_ss2 &= ~CLKS_MASK; 228357e252bfSMichael Neumann mpll_ss2 |= CLKS(clk_s); 228457e252bfSMichael Neumann } 228557e252bfSMichael Neumann } 228657e252bfSMichael Neumann 228757e252bfSMichael Neumann dll_speed = rv740_get_dll_speed(pi->mem_gddr5, 228857e252bfSMichael Neumann memory_clock); 228957e252bfSMichael Neumann 229057e252bfSMichael Neumann mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK; 229157e252bfSMichael Neumann mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed); 229257e252bfSMichael Neumann if (dll_state_on) 229357e252bfSMichael Neumann mclk_pwrmgt_cntl |= (MRDCKA0_PDNB | 229457e252bfSMichael Neumann MRDCKA1_PDNB | 229557e252bfSMichael Neumann MRDCKB0_PDNB | 229657e252bfSMichael Neumann MRDCKB1_PDNB | 229757e252bfSMichael Neumann MRDCKC0_PDNB | 229857e252bfSMichael Neumann MRDCKC1_PDNB | 229957e252bfSMichael Neumann MRDCKD0_PDNB | 230057e252bfSMichael Neumann MRDCKD1_PDNB); 230157e252bfSMichael Neumann else 230257e252bfSMichael Neumann mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB | 230357e252bfSMichael Neumann MRDCKA1_PDNB | 230457e252bfSMichael Neumann MRDCKB0_PDNB | 230557e252bfSMichael Neumann MRDCKB1_PDNB | 230657e252bfSMichael Neumann MRDCKC0_PDNB | 230757e252bfSMichael Neumann MRDCKC1_PDNB | 230857e252bfSMichael Neumann MRDCKD0_PDNB | 230957e252bfSMichael Neumann MRDCKD1_PDNB); 231057e252bfSMichael Neumann 231157e252bfSMichael Neumann 231257e252bfSMichael Neumann mclk->mclk_value = cpu_to_be32(memory_clock); 231357e252bfSMichael Neumann mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl); 231457e252bfSMichael Neumann mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2); 231557e252bfSMichael Neumann mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl); 231657e252bfSMichael Neumann mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2); 231757e252bfSMichael Neumann mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl); 231857e252bfSMichael Neumann mclk->vDLL_CNTL = cpu_to_be32(dll_cntl); 231957e252bfSMichael Neumann mclk->vMPLL_SS = cpu_to_be32(mpll_ss1); 232057e252bfSMichael Neumann mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2); 232157e252bfSMichael Neumann 232257e252bfSMichael Neumann return 0; 232357e252bfSMichael Neumann } 232457e252bfSMichael Neumann 232557e252bfSMichael Neumann static void ni_populate_smc_sp(struct radeon_device *rdev, 232657e252bfSMichael Neumann struct radeon_ps *radeon_state, 232757e252bfSMichael Neumann NISLANDS_SMC_SWSTATE *smc_state) 232857e252bfSMichael Neumann { 232957e252bfSMichael Neumann struct ni_ps *ps = ni_get_ps(radeon_state); 233057e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 233157e252bfSMichael Neumann int i; 233257e252bfSMichael Neumann 233357e252bfSMichael Neumann for (i = 0; i < ps->performance_level_count - 1; i++) 233457e252bfSMichael Neumann smc_state->levels[i].bSP = cpu_to_be32(pi->dsp); 233557e252bfSMichael Neumann 233657e252bfSMichael Neumann smc_state->levels[ps->performance_level_count - 1].bSP = 233757e252bfSMichael Neumann cpu_to_be32(pi->psp); 233857e252bfSMichael Neumann } 233957e252bfSMichael Neumann 234057e252bfSMichael Neumann static int ni_convert_power_level_to_smc(struct radeon_device *rdev, 234157e252bfSMichael Neumann struct rv7xx_pl *pl, 234257e252bfSMichael Neumann NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level) 234357e252bfSMichael Neumann { 234457e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 234557e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 234657e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 234757e252bfSMichael Neumann int ret; 234857e252bfSMichael Neumann bool dll_state_on; 234957e252bfSMichael Neumann u16 std_vddc; 235057e252bfSMichael Neumann u32 tmp = RREG32(DC_STUTTER_CNTL); 235157e252bfSMichael Neumann 235257e252bfSMichael Neumann level->gen2PCIE = pi->pcie_gen2 ? 235357e252bfSMichael Neumann ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0; 235457e252bfSMichael Neumann 235557e252bfSMichael Neumann ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk); 235657e252bfSMichael Neumann if (ret) 235757e252bfSMichael Neumann return ret; 235857e252bfSMichael Neumann 235957e252bfSMichael Neumann level->mcFlags = 0; 236057e252bfSMichael Neumann if (pi->mclk_stutter_mode_threshold && 236157e252bfSMichael Neumann (pl->mclk <= pi->mclk_stutter_mode_threshold) && 236257e252bfSMichael Neumann !eg_pi->uvd_enabled && 236357e252bfSMichael Neumann (tmp & DC_STUTTER_ENABLE_A) && 236457e252bfSMichael Neumann (tmp & DC_STUTTER_ENABLE_B)) 236557e252bfSMichael Neumann level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN; 236657e252bfSMichael Neumann 236757e252bfSMichael Neumann if (pi->mem_gddr5) { 236857e252bfSMichael Neumann if (pl->mclk > pi->mclk_edc_enable_threshold) 236957e252bfSMichael Neumann level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG; 237057e252bfSMichael Neumann if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold) 237157e252bfSMichael Neumann level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG; 237257e252bfSMichael Neumann 237357e252bfSMichael Neumann level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk); 237457e252bfSMichael Neumann 237557e252bfSMichael Neumann if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) { 237657e252bfSMichael Neumann if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >= 237757e252bfSMichael Neumann ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf)) 237857e252bfSMichael Neumann dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false; 237957e252bfSMichael Neumann else 238057e252bfSMichael Neumann dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false; 238157e252bfSMichael Neumann } else { 238257e252bfSMichael Neumann dll_state_on = false; 238357e252bfSMichael Neumann if (pl->mclk > ni_pi->mclk_rtt_mode_threshold) 238457e252bfSMichael Neumann level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE; 238557e252bfSMichael Neumann } 238657e252bfSMichael Neumann 238757e252bfSMichael Neumann ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, 238857e252bfSMichael Neumann &level->mclk, 238957e252bfSMichael Neumann (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0, 239057e252bfSMichael Neumann dll_state_on); 239157e252bfSMichael Neumann } else 239257e252bfSMichael Neumann ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1); 239357e252bfSMichael Neumann 239457e252bfSMichael Neumann if (ret) 239557e252bfSMichael Neumann return ret; 239657e252bfSMichael Neumann 239757e252bfSMichael Neumann ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table, 239857e252bfSMichael Neumann pl->vddc, &level->vddc); 239957e252bfSMichael Neumann if (ret) 240057e252bfSMichael Neumann return ret; 240157e252bfSMichael Neumann 240257e252bfSMichael Neumann ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc); 240357e252bfSMichael Neumann if (ret) 240457e252bfSMichael Neumann return ret; 240557e252bfSMichael Neumann 240657e252bfSMichael Neumann ni_populate_std_voltage_value(rdev, std_vddc, 240757e252bfSMichael Neumann level->vddc.index, &level->std_vddc); 240857e252bfSMichael Neumann 240957e252bfSMichael Neumann if (eg_pi->vddci_control) { 241057e252bfSMichael Neumann ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table, 241157e252bfSMichael Neumann pl->vddci, &level->vddci); 241257e252bfSMichael Neumann if (ret) 241357e252bfSMichael Neumann return ret; 241457e252bfSMichael Neumann } 241557e252bfSMichael Neumann 241657e252bfSMichael Neumann ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd); 241757e252bfSMichael Neumann 241857e252bfSMichael Neumann return ret; 241957e252bfSMichael Neumann } 242057e252bfSMichael Neumann 242157e252bfSMichael Neumann static int ni_populate_smc_t(struct radeon_device *rdev, 242257e252bfSMichael Neumann struct radeon_ps *radeon_state, 242357e252bfSMichael Neumann NISLANDS_SMC_SWSTATE *smc_state) 242457e252bfSMichael Neumann { 242557e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 242657e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 242757e252bfSMichael Neumann struct ni_ps *state = ni_get_ps(radeon_state); 242857e252bfSMichael Neumann u32 a_t; 242957e252bfSMichael Neumann u32 t_l, t_h; 243057e252bfSMichael Neumann u32 high_bsp; 243157e252bfSMichael Neumann int i, ret; 243257e252bfSMichael Neumann 243357e252bfSMichael Neumann if (state->performance_level_count >= 9) 243457e252bfSMichael Neumann return -EINVAL; 243557e252bfSMichael Neumann 243657e252bfSMichael Neumann if (state->performance_level_count < 2) { 243757e252bfSMichael Neumann a_t = CG_R(0xffff) | CG_L(0); 243857e252bfSMichael Neumann smc_state->levels[0].aT = cpu_to_be32(a_t); 243957e252bfSMichael Neumann return 0; 244057e252bfSMichael Neumann } 244157e252bfSMichael Neumann 244257e252bfSMichael Neumann smc_state->levels[0].aT = cpu_to_be32(0); 244357e252bfSMichael Neumann 244457e252bfSMichael Neumann for (i = 0; i <= state->performance_level_count - 2; i++) { 244557e252bfSMichael Neumann if (eg_pi->uvd_enabled) 244657e252bfSMichael Neumann ret = r600_calculate_at( 244757e252bfSMichael Neumann 1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2), 244857e252bfSMichael Neumann 100 * R600_AH_DFLT, 244957e252bfSMichael Neumann state->performance_levels[i + 1].sclk, 245057e252bfSMichael Neumann state->performance_levels[i].sclk, 245157e252bfSMichael Neumann &t_l, 245257e252bfSMichael Neumann &t_h); 245357e252bfSMichael Neumann else 245457e252bfSMichael Neumann ret = r600_calculate_at( 245557e252bfSMichael Neumann 1000 * (i + 1), 245657e252bfSMichael Neumann 100 * R600_AH_DFLT, 245757e252bfSMichael Neumann state->performance_levels[i + 1].sclk, 245857e252bfSMichael Neumann state->performance_levels[i].sclk, 245957e252bfSMichael Neumann &t_l, 246057e252bfSMichael Neumann &t_h); 246157e252bfSMichael Neumann 246257e252bfSMichael Neumann if (ret) { 246357e252bfSMichael Neumann t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT; 246457e252bfSMichael Neumann t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT; 246557e252bfSMichael Neumann } 246657e252bfSMichael Neumann 246757e252bfSMichael Neumann a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK; 246857e252bfSMichael Neumann a_t |= CG_R(t_l * pi->bsp / 20000); 246957e252bfSMichael Neumann smc_state->levels[i].aT = cpu_to_be32(a_t); 247057e252bfSMichael Neumann 247157e252bfSMichael Neumann high_bsp = (i == state->performance_level_count - 2) ? 247257e252bfSMichael Neumann pi->pbsp : pi->bsp; 247357e252bfSMichael Neumann 247457e252bfSMichael Neumann a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000); 247557e252bfSMichael Neumann smc_state->levels[i + 1].aT = cpu_to_be32(a_t); 247657e252bfSMichael Neumann } 247757e252bfSMichael Neumann 247857e252bfSMichael Neumann return 0; 247957e252bfSMichael Neumann } 248057e252bfSMichael Neumann 248157e252bfSMichael Neumann static int ni_populate_power_containment_values(struct radeon_device *rdev, 248257e252bfSMichael Neumann struct radeon_ps *radeon_state, 248357e252bfSMichael Neumann NISLANDS_SMC_SWSTATE *smc_state) 248457e252bfSMichael Neumann { 248557e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 248657e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 248757e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 248857e252bfSMichael Neumann struct ni_ps *state = ni_get_ps(radeon_state); 248957e252bfSMichael Neumann u32 prev_sclk; 249057e252bfSMichael Neumann u32 max_sclk; 249157e252bfSMichael Neumann u32 min_sclk; 249257e252bfSMichael Neumann int i, ret; 249357e252bfSMichael Neumann u32 tdp_limit; 249457e252bfSMichael Neumann u32 near_tdp_limit; 249557e252bfSMichael Neumann u32 power_boost_limit; 249657e252bfSMichael Neumann u8 max_ps_percent; 249757e252bfSMichael Neumann 249857e252bfSMichael Neumann if (ni_pi->enable_power_containment == false) 249957e252bfSMichael Neumann return 0; 250057e252bfSMichael Neumann 250157e252bfSMichael Neumann if (state->performance_level_count == 0) 250257e252bfSMichael Neumann return -EINVAL; 250357e252bfSMichael Neumann 250457e252bfSMichael Neumann if (smc_state->levelCount != state->performance_level_count) 250557e252bfSMichael Neumann return -EINVAL; 250657e252bfSMichael Neumann 250757e252bfSMichael Neumann ret = ni_calculate_adjusted_tdp_limits(rdev, 250857e252bfSMichael Neumann false, /* ??? */ 250957e252bfSMichael Neumann rdev->pm.dpm.tdp_adjustment, 251057e252bfSMichael Neumann &tdp_limit, 251157e252bfSMichael Neumann &near_tdp_limit); 251257e252bfSMichael Neumann if (ret) 251357e252bfSMichael Neumann return ret; 251457e252bfSMichael Neumann 251557e252bfSMichael Neumann power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit); 251657e252bfSMichael Neumann 251757e252bfSMichael Neumann ret = rv770_write_smc_sram_dword(rdev, 251857e252bfSMichael Neumann pi->state_table_start + 251957e252bfSMichael Neumann offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) + 252057e252bfSMichael Neumann offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit), 252157e252bfSMichael Neumann ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)), 252257e252bfSMichael Neumann pi->sram_end); 252357e252bfSMichael Neumann if (ret) 252457e252bfSMichael Neumann power_boost_limit = 0; 252557e252bfSMichael Neumann 252657e252bfSMichael Neumann smc_state->levels[0].dpm2.MaxPS = 0; 252757e252bfSMichael Neumann smc_state->levels[0].dpm2.NearTDPDec = 0; 252857e252bfSMichael Neumann smc_state->levels[0].dpm2.AboveSafeInc = 0; 252957e252bfSMichael Neumann smc_state->levels[0].dpm2.BelowSafeInc = 0; 253057e252bfSMichael Neumann smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0; 253157e252bfSMichael Neumann 253257e252bfSMichael Neumann for (i = 1; i < state->performance_level_count; i++) { 253357e252bfSMichael Neumann prev_sclk = state->performance_levels[i-1].sclk; 253457e252bfSMichael Neumann max_sclk = state->performance_levels[i].sclk; 253557e252bfSMichael Neumann max_ps_percent = (i != (state->performance_level_count - 1)) ? 253657e252bfSMichael Neumann NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H; 253757e252bfSMichael Neumann 253857e252bfSMichael Neumann if (max_sclk < prev_sclk) 253957e252bfSMichael Neumann return -EINVAL; 254057e252bfSMichael Neumann 254157e252bfSMichael Neumann if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled) 254257e252bfSMichael Neumann min_sclk = max_sclk; 254357e252bfSMichael Neumann else if (1 == i) 254457e252bfSMichael Neumann min_sclk = prev_sclk; 254557e252bfSMichael Neumann else 254657e252bfSMichael Neumann min_sclk = (prev_sclk * (u32)max_ps_percent) / 100; 254757e252bfSMichael Neumann 254857e252bfSMichael Neumann if (min_sclk < state->performance_levels[0].sclk) 254957e252bfSMichael Neumann min_sclk = state->performance_levels[0].sclk; 255057e252bfSMichael Neumann 255157e252bfSMichael Neumann if (min_sclk == 0) 255257e252bfSMichael Neumann return -EINVAL; 255357e252bfSMichael Neumann 255457e252bfSMichael Neumann smc_state->levels[i].dpm2.MaxPS = 255557e252bfSMichael Neumann (u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk); 255657e252bfSMichael Neumann smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC; 255757e252bfSMichael Neumann smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC; 255857e252bfSMichael Neumann smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC; 255957e252bfSMichael Neumann smc_state->levels[i].stateFlags |= 256057e252bfSMichael Neumann ((i != (state->performance_level_count - 1)) && power_boost_limit) ? 256157e252bfSMichael Neumann PPSMC_STATEFLAG_POWERBOOST : 0; 256257e252bfSMichael Neumann } 256357e252bfSMichael Neumann 256457e252bfSMichael Neumann return 0; 256557e252bfSMichael Neumann } 256657e252bfSMichael Neumann 256757e252bfSMichael Neumann static int ni_populate_sq_ramping_values(struct radeon_device *rdev, 256857e252bfSMichael Neumann struct radeon_ps *radeon_state, 256957e252bfSMichael Neumann NISLANDS_SMC_SWSTATE *smc_state) 257057e252bfSMichael Neumann { 257157e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 257257e252bfSMichael Neumann struct ni_ps *state = ni_get_ps(radeon_state); 257357e252bfSMichael Neumann u32 sq_power_throttle; 257457e252bfSMichael Neumann u32 sq_power_throttle2; 257557e252bfSMichael Neumann bool enable_sq_ramping = ni_pi->enable_sq_ramping; 257657e252bfSMichael Neumann int i; 257757e252bfSMichael Neumann 257857e252bfSMichael Neumann if (state->performance_level_count == 0) 257957e252bfSMichael Neumann return -EINVAL; 258057e252bfSMichael Neumann 258157e252bfSMichael Neumann if (smc_state->levelCount != state->performance_level_count) 258257e252bfSMichael Neumann return -EINVAL; 258357e252bfSMichael Neumann 258457e252bfSMichael Neumann if (rdev->pm.dpm.sq_ramping_threshold == 0) 258557e252bfSMichael Neumann return -EINVAL; 258657e252bfSMichael Neumann 258757e252bfSMichael Neumann if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT)) 258857e252bfSMichael Neumann enable_sq_ramping = false; 258957e252bfSMichael Neumann 259057e252bfSMichael Neumann if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT)) 259157e252bfSMichael Neumann enable_sq_ramping = false; 259257e252bfSMichael Neumann 259357e252bfSMichael Neumann if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT)) 259457e252bfSMichael Neumann enable_sq_ramping = false; 259557e252bfSMichael Neumann 259657e252bfSMichael Neumann if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT)) 259757e252bfSMichael Neumann enable_sq_ramping = false; 259857e252bfSMichael Neumann 259957e252bfSMichael Neumann if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO <= (LTI_RATIO_MASK >> LTI_RATIO_SHIFT)) 260057e252bfSMichael Neumann enable_sq_ramping = false; 260157e252bfSMichael Neumann 260257e252bfSMichael Neumann for (i = 0; i < state->performance_level_count; i++) { 260357e252bfSMichael Neumann sq_power_throttle = 0; 260457e252bfSMichael Neumann sq_power_throttle2 = 0; 260557e252bfSMichael Neumann 260657e252bfSMichael Neumann if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) && 260757e252bfSMichael Neumann enable_sq_ramping) { 260857e252bfSMichael Neumann sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER); 260957e252bfSMichael Neumann sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER); 261057e252bfSMichael Neumann sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA); 261157e252bfSMichael Neumann sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE); 261257e252bfSMichael Neumann sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO); 261357e252bfSMichael Neumann } else { 261457e252bfSMichael Neumann sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK; 261557e252bfSMichael Neumann sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK; 261657e252bfSMichael Neumann } 261757e252bfSMichael Neumann 261857e252bfSMichael Neumann smc_state->levels[i].SQPowerThrottle = cpu_to_be32(sq_power_throttle); 261957e252bfSMichael Neumann smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2); 262057e252bfSMichael Neumann } 262157e252bfSMichael Neumann 262257e252bfSMichael Neumann return 0; 262357e252bfSMichael Neumann } 262457e252bfSMichael Neumann 262557e252bfSMichael Neumann static int ni_enable_power_containment(struct radeon_device *rdev, 262657e252bfSMichael Neumann struct radeon_ps *radeon_new_state, 262757e252bfSMichael Neumann bool enable) 262857e252bfSMichael Neumann { 262957e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 263057e252bfSMichael Neumann PPSMC_Result smc_result; 263157e252bfSMichael Neumann int ret = 0; 263257e252bfSMichael Neumann 263357e252bfSMichael Neumann if (ni_pi->enable_power_containment) { 263457e252bfSMichael Neumann if (enable) { 263557e252bfSMichael Neumann if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) { 263657e252bfSMichael Neumann smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive); 263757e252bfSMichael Neumann if (smc_result != PPSMC_Result_OK) { 263857e252bfSMichael Neumann ret = -EINVAL; 263957e252bfSMichael Neumann ni_pi->pc_enabled = false; 264057e252bfSMichael Neumann } else { 264157e252bfSMichael Neumann ni_pi->pc_enabled = true; 264257e252bfSMichael Neumann } 264357e252bfSMichael Neumann } 264457e252bfSMichael Neumann } else { 264557e252bfSMichael Neumann smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive); 264657e252bfSMichael Neumann if (smc_result != PPSMC_Result_OK) 264757e252bfSMichael Neumann ret = -EINVAL; 264857e252bfSMichael Neumann ni_pi->pc_enabled = false; 264957e252bfSMichael Neumann } 265057e252bfSMichael Neumann } 265157e252bfSMichael Neumann 265257e252bfSMichael Neumann return ret; 265357e252bfSMichael Neumann } 265457e252bfSMichael Neumann 265557e252bfSMichael Neumann static int ni_convert_power_state_to_smc(struct radeon_device *rdev, 265657e252bfSMichael Neumann struct radeon_ps *radeon_state, 265757e252bfSMichael Neumann NISLANDS_SMC_SWSTATE *smc_state) 265857e252bfSMichael Neumann { 265957e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 266057e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 266157e252bfSMichael Neumann struct ni_ps *state = ni_get_ps(radeon_state); 266257e252bfSMichael Neumann int i, ret; 266357e252bfSMichael Neumann u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100; 266457e252bfSMichael Neumann 266557e252bfSMichael Neumann if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC)) 266657e252bfSMichael Neumann smc_state->flags |= PPSMC_SWSTATE_FLAG_DC; 266757e252bfSMichael Neumann 266857e252bfSMichael Neumann smc_state->levelCount = 0; 266957e252bfSMichael Neumann 267057e252bfSMichael Neumann if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE) 267157e252bfSMichael Neumann return -EINVAL; 267257e252bfSMichael Neumann 267357e252bfSMichael Neumann for (i = 0; i < state->performance_level_count; i++) { 267457e252bfSMichael Neumann ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i], 267557e252bfSMichael Neumann &smc_state->levels[i]); 267657e252bfSMichael Neumann smc_state->levels[i].arbRefreshState = 267757e252bfSMichael Neumann (u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i); 267857e252bfSMichael Neumann 267957e252bfSMichael Neumann if (ret) 268057e252bfSMichael Neumann return ret; 268157e252bfSMichael Neumann 268257e252bfSMichael Neumann if (ni_pi->enable_power_containment) 268357e252bfSMichael Neumann smc_state->levels[i].displayWatermark = 268457e252bfSMichael Neumann (state->performance_levels[i].sclk < threshold) ? 268557e252bfSMichael Neumann PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH; 268657e252bfSMichael Neumann else 268757e252bfSMichael Neumann smc_state->levels[i].displayWatermark = (i < 2) ? 268857e252bfSMichael Neumann PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH; 268957e252bfSMichael Neumann 269057e252bfSMichael Neumann if (eg_pi->dynamic_ac_timing) 269157e252bfSMichael Neumann smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i; 269257e252bfSMichael Neumann else 269357e252bfSMichael Neumann smc_state->levels[i].ACIndex = 0; 269457e252bfSMichael Neumann 269557e252bfSMichael Neumann smc_state->levelCount++; 269657e252bfSMichael Neumann } 269757e252bfSMichael Neumann 269857e252bfSMichael Neumann rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold, 269957e252bfSMichael Neumann cpu_to_be32(threshold / 512)); 270057e252bfSMichael Neumann 270157e252bfSMichael Neumann ni_populate_smc_sp(rdev, radeon_state, smc_state); 270257e252bfSMichael Neumann 270357e252bfSMichael Neumann ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state); 270457e252bfSMichael Neumann if (ret) 270557e252bfSMichael Neumann ni_pi->enable_power_containment = false; 270657e252bfSMichael Neumann 270757e252bfSMichael Neumann ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state); 270857e252bfSMichael Neumann if (ret) 270957e252bfSMichael Neumann ni_pi->enable_sq_ramping = false; 271057e252bfSMichael Neumann 271157e252bfSMichael Neumann return ni_populate_smc_t(rdev, radeon_state, smc_state); 271257e252bfSMichael Neumann } 271357e252bfSMichael Neumann 271457e252bfSMichael Neumann static int ni_upload_sw_state(struct radeon_device *rdev, 271557e252bfSMichael Neumann struct radeon_ps *radeon_new_state) 271657e252bfSMichael Neumann { 271757e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 271857e252bfSMichael Neumann u16 address = pi->state_table_start + 271957e252bfSMichael Neumann offsetof(NISLANDS_SMC_STATETABLE, driverState); 272057e252bfSMichael Neumann u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) + 272157e252bfSMichael Neumann ((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL)); 272257e252bfSMichael Neumann int ret; 272357e252bfSMichael Neumann NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL); 272457e252bfSMichael Neumann 272557e252bfSMichael Neumann if (smc_state == NULL) 272657e252bfSMichael Neumann return -ENOMEM; 272757e252bfSMichael Neumann 272857e252bfSMichael Neumann ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state); 272957e252bfSMichael Neumann if (ret) 273057e252bfSMichael Neumann goto done; 273157e252bfSMichael Neumann 273257e252bfSMichael Neumann ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end); 273357e252bfSMichael Neumann 273457e252bfSMichael Neumann done: 273557e252bfSMichael Neumann kfree(smc_state); 273657e252bfSMichael Neumann 273757e252bfSMichael Neumann return ret; 273857e252bfSMichael Neumann } 273957e252bfSMichael Neumann 274057e252bfSMichael Neumann static int ni_set_mc_special_registers(struct radeon_device *rdev, 274157e252bfSMichael Neumann struct ni_mc_reg_table *table) 274257e252bfSMichael Neumann { 274357e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 274457e252bfSMichael Neumann u8 i, j, k; 274557e252bfSMichael Neumann u32 temp_reg; 274657e252bfSMichael Neumann 274757e252bfSMichael Neumann for (i = 0, j = table->last; i < table->last; i++) { 274857e252bfSMichael Neumann switch (table->mc_reg_address[i].s1) { 274957e252bfSMichael Neumann case MC_SEQ_MISC1 >> 2: 275057e252bfSMichael Neumann if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE) 275157e252bfSMichael Neumann return -EINVAL; 275257e252bfSMichael Neumann temp_reg = RREG32(MC_PMG_CMD_EMRS); 275357e252bfSMichael Neumann table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2; 275457e252bfSMichael Neumann table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2; 275557e252bfSMichael Neumann for (k = 0; k < table->num_entries; k++) 275657e252bfSMichael Neumann table->mc_reg_table_entry[k].mc_data[j] = 275757e252bfSMichael Neumann ((temp_reg & 0xffff0000)) | 275857e252bfSMichael Neumann ((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16); 275957e252bfSMichael Neumann j++; 276057e252bfSMichael Neumann if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE) 276157e252bfSMichael Neumann return -EINVAL; 276257e252bfSMichael Neumann 276357e252bfSMichael Neumann temp_reg = RREG32(MC_PMG_CMD_MRS); 276457e252bfSMichael Neumann table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2; 276557e252bfSMichael Neumann table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2; 276657e252bfSMichael Neumann for(k = 0; k < table->num_entries; k++) { 276757e252bfSMichael Neumann table->mc_reg_table_entry[k].mc_data[j] = 276857e252bfSMichael Neumann (temp_reg & 0xffff0000) | 276957e252bfSMichael Neumann (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff); 277057e252bfSMichael Neumann if (!pi->mem_gddr5) 277157e252bfSMichael Neumann table->mc_reg_table_entry[k].mc_data[j] |= 0x100; 277257e252bfSMichael Neumann } 277357e252bfSMichael Neumann j++; 277457e252bfSMichael Neumann if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE) 277557e252bfSMichael Neumann return -EINVAL; 277657e252bfSMichael Neumann break; 277757e252bfSMichael Neumann case MC_SEQ_RESERVE_M >> 2: 277857e252bfSMichael Neumann temp_reg = RREG32(MC_PMG_CMD_MRS1); 277957e252bfSMichael Neumann table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2; 278057e252bfSMichael Neumann table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2; 278157e252bfSMichael Neumann for (k = 0; k < table->num_entries; k++) 278257e252bfSMichael Neumann table->mc_reg_table_entry[k].mc_data[j] = 278357e252bfSMichael Neumann (temp_reg & 0xffff0000) | 278457e252bfSMichael Neumann (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff); 278557e252bfSMichael Neumann j++; 278657e252bfSMichael Neumann if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE) 278757e252bfSMichael Neumann return -EINVAL; 278857e252bfSMichael Neumann break; 278957e252bfSMichael Neumann default: 279057e252bfSMichael Neumann break; 279157e252bfSMichael Neumann } 279257e252bfSMichael Neumann } 279357e252bfSMichael Neumann 279457e252bfSMichael Neumann table->last = j; 279557e252bfSMichael Neumann 279657e252bfSMichael Neumann return 0; 279757e252bfSMichael Neumann } 279857e252bfSMichael Neumann 279957e252bfSMichael Neumann static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg) 280057e252bfSMichael Neumann { 280157e252bfSMichael Neumann bool result = true; 280257e252bfSMichael Neumann 280357e252bfSMichael Neumann switch (in_reg) { 280457e252bfSMichael Neumann case MC_SEQ_RAS_TIMING >> 2: 280557e252bfSMichael Neumann *out_reg = MC_SEQ_RAS_TIMING_LP >> 2; 280657e252bfSMichael Neumann break; 280757e252bfSMichael Neumann case MC_SEQ_CAS_TIMING >> 2: 280857e252bfSMichael Neumann *out_reg = MC_SEQ_CAS_TIMING_LP >> 2; 280957e252bfSMichael Neumann break; 281057e252bfSMichael Neumann case MC_SEQ_MISC_TIMING >> 2: 281157e252bfSMichael Neumann *out_reg = MC_SEQ_MISC_TIMING_LP >> 2; 281257e252bfSMichael Neumann break; 281357e252bfSMichael Neumann case MC_SEQ_MISC_TIMING2 >> 2: 281457e252bfSMichael Neumann *out_reg = MC_SEQ_MISC_TIMING2_LP >> 2; 281557e252bfSMichael Neumann break; 281657e252bfSMichael Neumann case MC_SEQ_RD_CTL_D0 >> 2: 281757e252bfSMichael Neumann *out_reg = MC_SEQ_RD_CTL_D0_LP >> 2; 281857e252bfSMichael Neumann break; 281957e252bfSMichael Neumann case MC_SEQ_RD_CTL_D1 >> 2: 282057e252bfSMichael Neumann *out_reg = MC_SEQ_RD_CTL_D1_LP >> 2; 282157e252bfSMichael Neumann break; 282257e252bfSMichael Neumann case MC_SEQ_WR_CTL_D0 >> 2: 282357e252bfSMichael Neumann *out_reg = MC_SEQ_WR_CTL_D0_LP >> 2; 282457e252bfSMichael Neumann break; 282557e252bfSMichael Neumann case MC_SEQ_WR_CTL_D1 >> 2: 282657e252bfSMichael Neumann *out_reg = MC_SEQ_WR_CTL_D1_LP >> 2; 282757e252bfSMichael Neumann break; 282857e252bfSMichael Neumann case MC_PMG_CMD_EMRS >> 2: 282957e252bfSMichael Neumann *out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2; 283057e252bfSMichael Neumann break; 283157e252bfSMichael Neumann case MC_PMG_CMD_MRS >> 2: 283257e252bfSMichael Neumann *out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2; 283357e252bfSMichael Neumann break; 283457e252bfSMichael Neumann case MC_PMG_CMD_MRS1 >> 2: 283557e252bfSMichael Neumann *out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2; 283657e252bfSMichael Neumann break; 283757e252bfSMichael Neumann case MC_SEQ_PMG_TIMING >> 2: 283857e252bfSMichael Neumann *out_reg = MC_SEQ_PMG_TIMING_LP >> 2; 283957e252bfSMichael Neumann break; 284057e252bfSMichael Neumann case MC_PMG_CMD_MRS2 >> 2: 284157e252bfSMichael Neumann *out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2; 284257e252bfSMichael Neumann break; 284357e252bfSMichael Neumann default: 284457e252bfSMichael Neumann result = false; 284557e252bfSMichael Neumann break; 284657e252bfSMichael Neumann } 284757e252bfSMichael Neumann 284857e252bfSMichael Neumann return result; 284957e252bfSMichael Neumann } 285057e252bfSMichael Neumann 285157e252bfSMichael Neumann static void ni_set_valid_flag(struct ni_mc_reg_table *table) 285257e252bfSMichael Neumann { 285357e252bfSMichael Neumann u8 i, j; 285457e252bfSMichael Neumann 285557e252bfSMichael Neumann for (i = 0; i < table->last; i++) { 285657e252bfSMichael Neumann for (j = 1; j < table->num_entries; j++) { 285757e252bfSMichael Neumann if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) { 285857e252bfSMichael Neumann table->valid_flag |= 1 << i; 285957e252bfSMichael Neumann break; 286057e252bfSMichael Neumann } 286157e252bfSMichael Neumann } 286257e252bfSMichael Neumann } 286357e252bfSMichael Neumann } 286457e252bfSMichael Neumann 286557e252bfSMichael Neumann static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table) 286657e252bfSMichael Neumann { 286757e252bfSMichael Neumann u32 i; 286857e252bfSMichael Neumann u16 address; 286957e252bfSMichael Neumann 287057e252bfSMichael Neumann for (i = 0; i < table->last; i++) 287157e252bfSMichael Neumann table->mc_reg_address[i].s0 = 287257e252bfSMichael Neumann ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ? 287357e252bfSMichael Neumann address : table->mc_reg_address[i].s1; 287457e252bfSMichael Neumann } 287557e252bfSMichael Neumann 287657e252bfSMichael Neumann static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table, 287757e252bfSMichael Neumann struct ni_mc_reg_table *ni_table) 287857e252bfSMichael Neumann { 287957e252bfSMichael Neumann u8 i, j; 288057e252bfSMichael Neumann 288157e252bfSMichael Neumann if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE) 288257e252bfSMichael Neumann return -EINVAL; 288357e252bfSMichael Neumann if (table->num_entries > MAX_AC_TIMING_ENTRIES) 288457e252bfSMichael Neumann return -EINVAL; 288557e252bfSMichael Neumann 288657e252bfSMichael Neumann for (i = 0; i < table->last; i++) 288757e252bfSMichael Neumann ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1; 288857e252bfSMichael Neumann ni_table->last = table->last; 288957e252bfSMichael Neumann 289057e252bfSMichael Neumann for (i = 0; i < table->num_entries; i++) { 289157e252bfSMichael Neumann ni_table->mc_reg_table_entry[i].mclk_max = 289257e252bfSMichael Neumann table->mc_reg_table_entry[i].mclk_max; 289357e252bfSMichael Neumann for (j = 0; j < table->last; j++) 289457e252bfSMichael Neumann ni_table->mc_reg_table_entry[i].mc_data[j] = 289557e252bfSMichael Neumann table->mc_reg_table_entry[i].mc_data[j]; 289657e252bfSMichael Neumann } 289757e252bfSMichael Neumann ni_table->num_entries = table->num_entries; 289857e252bfSMichael Neumann 289957e252bfSMichael Neumann return 0; 290057e252bfSMichael Neumann } 290157e252bfSMichael Neumann 290257e252bfSMichael Neumann static int ni_initialize_mc_reg_table(struct radeon_device *rdev) 290357e252bfSMichael Neumann { 290457e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 290557e252bfSMichael Neumann int ret; 290657e252bfSMichael Neumann struct atom_mc_reg_table *table; 290757e252bfSMichael Neumann struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table; 290857e252bfSMichael Neumann u8 module_index = rv770_get_memory_module_index(rdev); 290957e252bfSMichael Neumann 291057e252bfSMichael Neumann table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL); 291157e252bfSMichael Neumann if (!table) 291257e252bfSMichael Neumann return -ENOMEM; 291357e252bfSMichael Neumann 291457e252bfSMichael Neumann WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING)); 291557e252bfSMichael Neumann WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING)); 291657e252bfSMichael Neumann WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING)); 291757e252bfSMichael Neumann WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2)); 291857e252bfSMichael Neumann WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS)); 291957e252bfSMichael Neumann WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS)); 292057e252bfSMichael Neumann WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1)); 292157e252bfSMichael Neumann WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0)); 292257e252bfSMichael Neumann WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1)); 292357e252bfSMichael Neumann WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0)); 292457e252bfSMichael Neumann WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1)); 292557e252bfSMichael Neumann WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING)); 292657e252bfSMichael Neumann WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2)); 292757e252bfSMichael Neumann 292857e252bfSMichael Neumann ret = radeon_atom_init_mc_reg_table(rdev, module_index, table); 292957e252bfSMichael Neumann 293057e252bfSMichael Neumann if (ret) 293157e252bfSMichael Neumann goto init_mc_done; 293257e252bfSMichael Neumann 293357e252bfSMichael Neumann ret = ni_copy_vbios_mc_reg_table(table, ni_table); 293457e252bfSMichael Neumann 293557e252bfSMichael Neumann if (ret) 293657e252bfSMichael Neumann goto init_mc_done; 293757e252bfSMichael Neumann 293857e252bfSMichael Neumann ni_set_s0_mc_reg_index(ni_table); 293957e252bfSMichael Neumann 294057e252bfSMichael Neumann ret = ni_set_mc_special_registers(rdev, ni_table); 294157e252bfSMichael Neumann 294257e252bfSMichael Neumann if (ret) 294357e252bfSMichael Neumann goto init_mc_done; 294457e252bfSMichael Neumann 294557e252bfSMichael Neumann ni_set_valid_flag(ni_table); 294657e252bfSMichael Neumann 294757e252bfSMichael Neumann init_mc_done: 294857e252bfSMichael Neumann kfree(table); 294957e252bfSMichael Neumann 295057e252bfSMichael Neumann return ret; 295157e252bfSMichael Neumann } 295257e252bfSMichael Neumann 295357e252bfSMichael Neumann static void ni_populate_mc_reg_addresses(struct radeon_device *rdev, 295457e252bfSMichael Neumann SMC_NIslands_MCRegisters *mc_reg_table) 295557e252bfSMichael Neumann { 295657e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 295757e252bfSMichael Neumann u32 i, j; 295857e252bfSMichael Neumann 295957e252bfSMichael Neumann for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) { 296057e252bfSMichael Neumann if (ni_pi->mc_reg_table.valid_flag & (1 << j)) { 296157e252bfSMichael Neumann if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE) 296257e252bfSMichael Neumann break; 296357e252bfSMichael Neumann mc_reg_table->address[i].s0 = 296457e252bfSMichael Neumann cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0); 296557e252bfSMichael Neumann mc_reg_table->address[i].s1 = 296657e252bfSMichael Neumann cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1); 296757e252bfSMichael Neumann i++; 296857e252bfSMichael Neumann } 296957e252bfSMichael Neumann } 297057e252bfSMichael Neumann mc_reg_table->last = (u8)i; 297157e252bfSMichael Neumann } 297257e252bfSMichael Neumann 297357e252bfSMichael Neumann 297457e252bfSMichael Neumann static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry, 297557e252bfSMichael Neumann SMC_NIslands_MCRegisterSet *data, 297657e252bfSMichael Neumann u32 num_entries, u32 valid_flag) 297757e252bfSMichael Neumann { 297857e252bfSMichael Neumann u32 i, j; 297957e252bfSMichael Neumann 298057e252bfSMichael Neumann for (i = 0, j = 0; j < num_entries; j++) { 298157e252bfSMichael Neumann if (valid_flag & (1 << j)) { 298257e252bfSMichael Neumann data->value[i] = cpu_to_be32(entry->mc_data[j]); 298357e252bfSMichael Neumann i++; 298457e252bfSMichael Neumann } 298557e252bfSMichael Neumann } 298657e252bfSMichael Neumann } 298757e252bfSMichael Neumann 298857e252bfSMichael Neumann static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev, 298957e252bfSMichael Neumann struct rv7xx_pl *pl, 299057e252bfSMichael Neumann SMC_NIslands_MCRegisterSet *mc_reg_table_data) 299157e252bfSMichael Neumann { 299257e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 299357e252bfSMichael Neumann u32 i = 0; 299457e252bfSMichael Neumann 299557e252bfSMichael Neumann for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) { 299657e252bfSMichael Neumann if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max) 299757e252bfSMichael Neumann break; 299857e252bfSMichael Neumann } 299957e252bfSMichael Neumann 300057e252bfSMichael Neumann if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0)) 300157e252bfSMichael Neumann --i; 300257e252bfSMichael Neumann 300357e252bfSMichael Neumann ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i], 300457e252bfSMichael Neumann mc_reg_table_data, 300557e252bfSMichael Neumann ni_pi->mc_reg_table.last, 300657e252bfSMichael Neumann ni_pi->mc_reg_table.valid_flag); 300757e252bfSMichael Neumann } 300857e252bfSMichael Neumann 300957e252bfSMichael Neumann static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev, 301057e252bfSMichael Neumann struct radeon_ps *radeon_state, 301157e252bfSMichael Neumann SMC_NIslands_MCRegisters *mc_reg_table) 301257e252bfSMichael Neumann { 301357e252bfSMichael Neumann struct ni_ps *state = ni_get_ps(radeon_state); 301457e252bfSMichael Neumann int i; 301557e252bfSMichael Neumann 301657e252bfSMichael Neumann for (i = 0; i < state->performance_level_count; i++) { 301757e252bfSMichael Neumann ni_convert_mc_reg_table_entry_to_smc(rdev, 301857e252bfSMichael Neumann &state->performance_levels[i], 301957e252bfSMichael Neumann &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]); 302057e252bfSMichael Neumann } 302157e252bfSMichael Neumann } 302257e252bfSMichael Neumann 302357e252bfSMichael Neumann static int ni_populate_mc_reg_table(struct radeon_device *rdev, 302457e252bfSMichael Neumann struct radeon_ps *radeon_boot_state) 302557e252bfSMichael Neumann { 302657e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 302757e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 302857e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 302957e252bfSMichael Neumann struct ni_ps *boot_state = ni_get_ps(radeon_boot_state); 303057e252bfSMichael Neumann SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table; 303157e252bfSMichael Neumann 303257e252bfSMichael Neumann memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters)); 303357e252bfSMichael Neumann 303457e252bfSMichael Neumann rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1); 303557e252bfSMichael Neumann 303657e252bfSMichael Neumann ni_populate_mc_reg_addresses(rdev, mc_reg_table); 303757e252bfSMichael Neumann 303857e252bfSMichael Neumann ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0], 303957e252bfSMichael Neumann &mc_reg_table->data[0]); 304057e252bfSMichael Neumann 304157e252bfSMichael Neumann ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0], 304257e252bfSMichael Neumann &mc_reg_table->data[1], 304357e252bfSMichael Neumann ni_pi->mc_reg_table.last, 304457e252bfSMichael Neumann ni_pi->mc_reg_table.valid_flag); 304557e252bfSMichael Neumann 304657e252bfSMichael Neumann ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table); 304757e252bfSMichael Neumann 304857e252bfSMichael Neumann return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start, 304957e252bfSMichael Neumann (u8 *)mc_reg_table, 305057e252bfSMichael Neumann sizeof(SMC_NIslands_MCRegisters), 305157e252bfSMichael Neumann pi->sram_end); 305257e252bfSMichael Neumann } 305357e252bfSMichael Neumann 305457e252bfSMichael Neumann static int ni_upload_mc_reg_table(struct radeon_device *rdev, 305557e252bfSMichael Neumann struct radeon_ps *radeon_new_state) 305657e252bfSMichael Neumann { 305757e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 305857e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 305957e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 306057e252bfSMichael Neumann struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state); 306157e252bfSMichael Neumann SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table; 306257e252bfSMichael Neumann u16 address; 306357e252bfSMichael Neumann 306457e252bfSMichael Neumann memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters)); 306557e252bfSMichael Neumann 306657e252bfSMichael Neumann ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table); 306757e252bfSMichael Neumann 306857e252bfSMichael Neumann address = eg_pi->mc_reg_table_start + 306957e252bfSMichael Neumann (u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]); 307057e252bfSMichael Neumann 307157e252bfSMichael Neumann return rv770_copy_bytes_to_smc(rdev, address, 307257e252bfSMichael Neumann (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT], 307357e252bfSMichael Neumann sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count, 307457e252bfSMichael Neumann pi->sram_end); 307557e252bfSMichael Neumann } 307657e252bfSMichael Neumann 307757e252bfSMichael Neumann static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev, 307857e252bfSMichael Neumann PP_NIslands_CACTABLES *cac_tables) 307957e252bfSMichael Neumann { 308057e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 308157e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 308257e252bfSMichael Neumann u32 leakage = 0; 308357e252bfSMichael Neumann unsigned int i, j, table_size; 308457e252bfSMichael Neumann s32 t; 308557e252bfSMichael Neumann u32 smc_leakage, max_leakage = 0; 308657e252bfSMichael Neumann u32 scaling_factor; 308757e252bfSMichael Neumann 308857e252bfSMichael Neumann table_size = eg_pi->vddc_voltage_table.count; 308957e252bfSMichael Neumann 309057e252bfSMichael Neumann if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size) 309157e252bfSMichael Neumann table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; 309257e252bfSMichael Neumann 309357e252bfSMichael Neumann scaling_factor = ni_get_smc_power_scaling_factor(rdev); 309457e252bfSMichael Neumann 309557e252bfSMichael Neumann for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) { 309657e252bfSMichael Neumann for (j = 0; j < table_size; j++) { 309757e252bfSMichael Neumann t = (1000 * ((i + 1) * 8)); 309857e252bfSMichael Neumann 309957e252bfSMichael Neumann if (t < ni_pi->cac_data.leakage_minimum_temperature) 310057e252bfSMichael Neumann t = ni_pi->cac_data.leakage_minimum_temperature; 310157e252bfSMichael Neumann 310257e252bfSMichael Neumann ni_calculate_leakage_for_v_and_t(rdev, 310357e252bfSMichael Neumann &ni_pi->cac_data.leakage_coefficients, 310457e252bfSMichael Neumann eg_pi->vddc_voltage_table.entries[j].value, 310557e252bfSMichael Neumann t, 310657e252bfSMichael Neumann ni_pi->cac_data.i_leakage, 310757e252bfSMichael Neumann &leakage); 310857e252bfSMichael Neumann 310957e252bfSMichael Neumann smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000; 311057e252bfSMichael Neumann if (smc_leakage > max_leakage) 311157e252bfSMichael Neumann max_leakage = smc_leakage; 311257e252bfSMichael Neumann 311357e252bfSMichael Neumann cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage); 311457e252bfSMichael Neumann } 311557e252bfSMichael Neumann } 311657e252bfSMichael Neumann 311757e252bfSMichael Neumann for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) { 311857e252bfSMichael Neumann for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) 311957e252bfSMichael Neumann cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage); 312057e252bfSMichael Neumann } 312157e252bfSMichael Neumann return 0; 312257e252bfSMichael Neumann } 312357e252bfSMichael Neumann 312457e252bfSMichael Neumann static int ni_init_simplified_leakage_table(struct radeon_device *rdev, 312557e252bfSMichael Neumann PP_NIslands_CACTABLES *cac_tables) 312657e252bfSMichael Neumann { 312757e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 312857e252bfSMichael Neumann struct radeon_cac_leakage_table *leakage_table = 312957e252bfSMichael Neumann &rdev->pm.dpm.dyn_state.cac_leakage_table; 313057e252bfSMichael Neumann u32 i, j, table_size; 313157e252bfSMichael Neumann u32 smc_leakage, max_leakage = 0; 313257e252bfSMichael Neumann u32 scaling_factor; 313357e252bfSMichael Neumann 313457e252bfSMichael Neumann if (!leakage_table) 313557e252bfSMichael Neumann return -EINVAL; 313657e252bfSMichael Neumann 313757e252bfSMichael Neumann table_size = leakage_table->count; 313857e252bfSMichael Neumann 313957e252bfSMichael Neumann if (eg_pi->vddc_voltage_table.count != table_size) 314057e252bfSMichael Neumann table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ? 314157e252bfSMichael Neumann eg_pi->vddc_voltage_table.count : leakage_table->count; 314257e252bfSMichael Neumann 314357e252bfSMichael Neumann if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size) 314457e252bfSMichael Neumann table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; 314557e252bfSMichael Neumann 314657e252bfSMichael Neumann if (table_size == 0) 314757e252bfSMichael Neumann return -EINVAL; 314857e252bfSMichael Neumann 314957e252bfSMichael Neumann scaling_factor = ni_get_smc_power_scaling_factor(rdev); 315057e252bfSMichael Neumann 315157e252bfSMichael Neumann for (j = 0; j < table_size; j++) { 315257e252bfSMichael Neumann smc_leakage = leakage_table->entries[j].leakage; 315357e252bfSMichael Neumann 315457e252bfSMichael Neumann if (smc_leakage > max_leakage) 315557e252bfSMichael Neumann max_leakage = smc_leakage; 315657e252bfSMichael Neumann 315757e252bfSMichael Neumann for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) 315857e252bfSMichael Neumann cac_tables->cac_lkge_lut[i][j] = 315957e252bfSMichael Neumann cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor)); 316057e252bfSMichael Neumann } 316157e252bfSMichael Neumann 316257e252bfSMichael Neumann for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) { 316357e252bfSMichael Neumann for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) 316457e252bfSMichael Neumann cac_tables->cac_lkge_lut[i][j] = 316557e252bfSMichael Neumann cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor)); 316657e252bfSMichael Neumann } 316757e252bfSMichael Neumann return 0; 316857e252bfSMichael Neumann } 316957e252bfSMichael Neumann 317057e252bfSMichael Neumann static int ni_initialize_smc_cac_tables(struct radeon_device *rdev) 317157e252bfSMichael Neumann { 317257e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 317357e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 317457e252bfSMichael Neumann PP_NIslands_CACTABLES *cac_tables = NULL; 317557e252bfSMichael Neumann int i, ret; 317657e252bfSMichael Neumann u32 reg; 317757e252bfSMichael Neumann 317857e252bfSMichael Neumann if (ni_pi->enable_cac == false) 317957e252bfSMichael Neumann return 0; 318057e252bfSMichael Neumann 318157e252bfSMichael Neumann cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL); 318257e252bfSMichael Neumann if (!cac_tables) 318357e252bfSMichael Neumann return -ENOMEM; 318457e252bfSMichael Neumann 318557e252bfSMichael Neumann reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK); 318657e252bfSMichael Neumann reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) | 318757e252bfSMichael Neumann TID_UNIT(ni_pi->cac_weights->tid_unit)); 318857e252bfSMichael Neumann WREG32(CG_CAC_CTRL, reg); 318957e252bfSMichael Neumann 319057e252bfSMichael Neumann for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++) 319157e252bfSMichael Neumann ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i]; 319257e252bfSMichael Neumann 319357e252bfSMichael Neumann for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++) 319457e252bfSMichael Neumann cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i]; 319557e252bfSMichael Neumann 319657e252bfSMichael Neumann ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage; 319757e252bfSMichael Neumann ni_pi->cac_data.pwr_const = 0; 319857e252bfSMichael Neumann ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0]; 319957e252bfSMichael Neumann ni_pi->cac_data.bif_cac_value = 0; 320057e252bfSMichael Neumann ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight; 320157e252bfSMichael Neumann ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight; 320257e252bfSMichael Neumann ni_pi->cac_data.allow_ovrflw = 0; 320357e252bfSMichael Neumann ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size; 320457e252bfSMichael Neumann ni_pi->cac_data.num_win_tdp = 0; 320557e252bfSMichael Neumann ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate; 320657e252bfSMichael Neumann 320757e252bfSMichael Neumann if (ni_pi->driver_calculate_cac_leakage) 320857e252bfSMichael Neumann ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables); 320957e252bfSMichael Neumann else 321057e252bfSMichael Neumann ret = ni_init_simplified_leakage_table(rdev, cac_tables); 321157e252bfSMichael Neumann 321257e252bfSMichael Neumann if (ret) 321357e252bfSMichael Neumann goto done_free; 321457e252bfSMichael Neumann 321557e252bfSMichael Neumann cac_tables->pwr_const = cpu_to_be32(ni_pi->cac_data.pwr_const); 321657e252bfSMichael Neumann cac_tables->dc_cacValue = cpu_to_be32(ni_pi->cac_data.dc_cac_value); 321757e252bfSMichael Neumann cac_tables->bif_cacValue = cpu_to_be32(ni_pi->cac_data.bif_cac_value); 321857e252bfSMichael Neumann cac_tables->AllowOvrflw = ni_pi->cac_data.allow_ovrflw; 321957e252bfSMichael Neumann cac_tables->MCWrWeight = ni_pi->cac_data.mc_wr_weight; 322057e252bfSMichael Neumann cac_tables->MCRdWeight = ni_pi->cac_data.mc_rd_weight; 322157e252bfSMichael Neumann cac_tables->numWin_TDP = ni_pi->cac_data.num_win_tdp; 322257e252bfSMichael Neumann cac_tables->l2numWin_TDP = ni_pi->cac_data.l2num_win_tdp; 322357e252bfSMichael Neumann cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n; 322457e252bfSMichael Neumann 322557e252bfSMichael Neumann ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables, 322657e252bfSMichael Neumann sizeof(PP_NIslands_CACTABLES), pi->sram_end); 322757e252bfSMichael Neumann 322857e252bfSMichael Neumann done_free: 322957e252bfSMichael Neumann if (ret) { 323057e252bfSMichael Neumann ni_pi->enable_cac = false; 323157e252bfSMichael Neumann ni_pi->enable_power_containment = false; 323257e252bfSMichael Neumann } 323357e252bfSMichael Neumann 323457e252bfSMichael Neumann kfree(cac_tables); 323557e252bfSMichael Neumann 323657e252bfSMichael Neumann return 0; 323757e252bfSMichael Neumann } 323857e252bfSMichael Neumann 323957e252bfSMichael Neumann static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev) 324057e252bfSMichael Neumann { 324157e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 324257e252bfSMichael Neumann u32 reg; 324357e252bfSMichael Neumann 324457e252bfSMichael Neumann if (!ni_pi->enable_cac || 324557e252bfSMichael Neumann !ni_pi->cac_configuration_required) 324657e252bfSMichael Neumann return 0; 324757e252bfSMichael Neumann 324857e252bfSMichael Neumann if (ni_pi->cac_weights == NULL) 324957e252bfSMichael Neumann return -EINVAL; 325057e252bfSMichael Neumann 325157e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK | 325257e252bfSMichael Neumann WEIGHT_TCP_SIG1_MASK | 325357e252bfSMichael Neumann WEIGHT_TA_SIG_MASK); 325457e252bfSMichael Neumann reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) | 325557e252bfSMichael Neumann WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) | 325657e252bfSMichael Neumann WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig)); 325757e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg); 325857e252bfSMichael Neumann 325957e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK | 326057e252bfSMichael Neumann WEIGHT_TCC_EN1_MASK | 326157e252bfSMichael Neumann WEIGHT_TCC_EN2_MASK); 326257e252bfSMichael Neumann reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) | 326357e252bfSMichael Neumann WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) | 326457e252bfSMichael Neumann WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2)); 326557e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg); 326657e252bfSMichael Neumann 326757e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK | 326857e252bfSMichael Neumann WEIGHT_CB_EN1_MASK | 326957e252bfSMichael Neumann WEIGHT_CB_EN2_MASK | 327057e252bfSMichael Neumann WEIGHT_CB_EN3_MASK); 327157e252bfSMichael Neumann reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) | 327257e252bfSMichael Neumann WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) | 327357e252bfSMichael Neumann WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) | 327457e252bfSMichael Neumann WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3)); 327557e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg); 327657e252bfSMichael Neumann 327757e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK | 327857e252bfSMichael Neumann WEIGHT_DB_SIG1_MASK | 327957e252bfSMichael Neumann WEIGHT_DB_SIG2_MASK | 328057e252bfSMichael Neumann WEIGHT_DB_SIG3_MASK); 328157e252bfSMichael Neumann reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) | 328257e252bfSMichael Neumann WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) | 328357e252bfSMichael Neumann WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) | 328457e252bfSMichael Neumann WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3)); 328557e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg); 328657e252bfSMichael Neumann 328757e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK | 328857e252bfSMichael Neumann WEIGHT_SXM_SIG1_MASK | 328957e252bfSMichael Neumann WEIGHT_SXM_SIG2_MASK | 329057e252bfSMichael Neumann WEIGHT_SXS_SIG0_MASK | 329157e252bfSMichael Neumann WEIGHT_SXS_SIG1_MASK); 329257e252bfSMichael Neumann reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) | 329357e252bfSMichael Neumann WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) | 329457e252bfSMichael Neumann WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) | 329557e252bfSMichael Neumann WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) | 329657e252bfSMichael Neumann WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1)); 329757e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg); 329857e252bfSMichael Neumann 329957e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK | 330057e252bfSMichael Neumann WEIGHT_XBR_1_MASK | 330157e252bfSMichael Neumann WEIGHT_XBR_2_MASK | 330257e252bfSMichael Neumann WEIGHT_SPI_SIG0_MASK); 330357e252bfSMichael Neumann reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) | 330457e252bfSMichael Neumann WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) | 330557e252bfSMichael Neumann WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) | 330657e252bfSMichael Neumann WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0)); 330757e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg); 330857e252bfSMichael Neumann 330957e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK | 331057e252bfSMichael Neumann WEIGHT_SPI_SIG2_MASK | 331157e252bfSMichael Neumann WEIGHT_SPI_SIG3_MASK | 331257e252bfSMichael Neumann WEIGHT_SPI_SIG4_MASK | 331357e252bfSMichael Neumann WEIGHT_SPI_SIG5_MASK); 331457e252bfSMichael Neumann reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) | 331557e252bfSMichael Neumann WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) | 331657e252bfSMichael Neumann WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) | 331757e252bfSMichael Neumann WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) | 331857e252bfSMichael Neumann WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5)); 331957e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg); 332057e252bfSMichael Neumann 332157e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK | 332257e252bfSMichael Neumann WEIGHT_LDS_SIG1_MASK | 332357e252bfSMichael Neumann WEIGHT_SC_MASK); 332457e252bfSMichael Neumann reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) | 332557e252bfSMichael Neumann WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) | 332657e252bfSMichael Neumann WEIGHT_SC(ni_pi->cac_weights->weight_sc)); 332757e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg); 332857e252bfSMichael Neumann 332957e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK | 333057e252bfSMichael Neumann WEIGHT_CP_MASK | 333157e252bfSMichael Neumann WEIGHT_PA_SIG0_MASK | 333257e252bfSMichael Neumann WEIGHT_PA_SIG1_MASK | 333357e252bfSMichael Neumann WEIGHT_VGT_SIG0_MASK); 333457e252bfSMichael Neumann reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) | 333557e252bfSMichael Neumann WEIGHT_CP(ni_pi->cac_weights->weight_cp) | 333657e252bfSMichael Neumann WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) | 333757e252bfSMichael Neumann WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) | 333857e252bfSMichael Neumann WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0)); 333957e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg); 334057e252bfSMichael Neumann 334157e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK | 334257e252bfSMichael Neumann WEIGHT_VGT_SIG2_MASK | 334357e252bfSMichael Neumann WEIGHT_DC_SIG0_MASK | 334457e252bfSMichael Neumann WEIGHT_DC_SIG1_MASK | 334557e252bfSMichael Neumann WEIGHT_DC_SIG2_MASK); 334657e252bfSMichael Neumann reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) | 334757e252bfSMichael Neumann WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) | 334857e252bfSMichael Neumann WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) | 334957e252bfSMichael Neumann WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) | 335057e252bfSMichael Neumann WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2)); 335157e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg); 335257e252bfSMichael Neumann 335357e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK | 335457e252bfSMichael Neumann WEIGHT_UVD_SIG0_MASK | 335557e252bfSMichael Neumann WEIGHT_UVD_SIG1_MASK | 335657e252bfSMichael Neumann WEIGHT_SPARE0_MASK | 335757e252bfSMichael Neumann WEIGHT_SPARE1_MASK); 335857e252bfSMichael Neumann reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) | 335957e252bfSMichael Neumann WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) | 336057e252bfSMichael Neumann WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) | 336157e252bfSMichael Neumann WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) | 336257e252bfSMichael Neumann WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1)); 336357e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg); 336457e252bfSMichael Neumann 336557e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK | 336657e252bfSMichael Neumann WEIGHT_SQ_VSP0_MASK); 336757e252bfSMichael Neumann reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) | 336857e252bfSMichael Neumann WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0)); 336957e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg); 337057e252bfSMichael Neumann 337157e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK); 337257e252bfSMichael Neumann reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr); 337357e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg); 337457e252bfSMichael Neumann 337557e252bfSMichael Neumann reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK | 337657e252bfSMichael Neumann OVR_VAL_SPARE_0_MASK | 337757e252bfSMichael Neumann OVR_MODE_SPARE_1_MASK | 337857e252bfSMichael Neumann OVR_VAL_SPARE_1_MASK); 337957e252bfSMichael Neumann reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) | 338057e252bfSMichael Neumann OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) | 338157e252bfSMichael Neumann OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) | 338257e252bfSMichael Neumann OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1)); 338357e252bfSMichael Neumann WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg); 338457e252bfSMichael Neumann 338557e252bfSMichael Neumann reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK | 338657e252bfSMichael Neumann VSP0_MASK | 338757e252bfSMichael Neumann GPR_MASK); 338857e252bfSMichael Neumann reg |= (VSP(ni_pi->cac_weights->vsp) | 338957e252bfSMichael Neumann VSP0(ni_pi->cac_weights->vsp0) | 339057e252bfSMichael Neumann GPR(ni_pi->cac_weights->gpr)); 339157e252bfSMichael Neumann WREG32(SQ_CAC_THRESHOLD, reg); 339257e252bfSMichael Neumann 339357e252bfSMichael Neumann reg = (MCDW_WR_ENABLE | 339457e252bfSMichael Neumann MCDX_WR_ENABLE | 339557e252bfSMichael Neumann MCDY_WR_ENABLE | 339657e252bfSMichael Neumann MCDZ_WR_ENABLE | 339757e252bfSMichael Neumann INDEX(0x09D4)); 339857e252bfSMichael Neumann WREG32(MC_CG_CONFIG, reg); 339957e252bfSMichael Neumann 340057e252bfSMichael Neumann reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) | 340157e252bfSMichael Neumann WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) | 340257e252bfSMichael Neumann ALLOW_OVERFLOW); 340357e252bfSMichael Neumann WREG32(MC_CG_DATAPORT, reg); 340457e252bfSMichael Neumann 340557e252bfSMichael Neumann return 0; 340657e252bfSMichael Neumann } 340757e252bfSMichael Neumann 340857e252bfSMichael Neumann static int ni_enable_smc_cac(struct radeon_device *rdev, 340957e252bfSMichael Neumann struct radeon_ps *radeon_new_state, 341057e252bfSMichael Neumann bool enable) 341157e252bfSMichael Neumann { 341257e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 341357e252bfSMichael Neumann int ret = 0; 341457e252bfSMichael Neumann PPSMC_Result smc_result; 341557e252bfSMichael Neumann 341657e252bfSMichael Neumann if (ni_pi->enable_cac) { 341757e252bfSMichael Neumann if (enable) { 341857e252bfSMichael Neumann if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) { 341957e252bfSMichael Neumann smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln); 342057e252bfSMichael Neumann 342157e252bfSMichael Neumann if (ni_pi->support_cac_long_term_average) { 342257e252bfSMichael Neumann smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable); 342357e252bfSMichael Neumann if (PPSMC_Result_OK != smc_result) 342457e252bfSMichael Neumann ni_pi->support_cac_long_term_average = false; 342557e252bfSMichael Neumann } 342657e252bfSMichael Neumann 342757e252bfSMichael Neumann smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac); 342857e252bfSMichael Neumann if (PPSMC_Result_OK != smc_result) 342957e252bfSMichael Neumann ret = -EINVAL; 343057e252bfSMichael Neumann 343157e252bfSMichael Neumann ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false; 343257e252bfSMichael Neumann } 343357e252bfSMichael Neumann } else if (ni_pi->cac_enabled) { 343457e252bfSMichael Neumann smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac); 343557e252bfSMichael Neumann 343657e252bfSMichael Neumann ni_pi->cac_enabled = false; 343757e252bfSMichael Neumann 343857e252bfSMichael Neumann if (ni_pi->support_cac_long_term_average) { 343957e252bfSMichael Neumann smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable); 344057e252bfSMichael Neumann if (PPSMC_Result_OK != smc_result) 344157e252bfSMichael Neumann ni_pi->support_cac_long_term_average = false; 344257e252bfSMichael Neumann } 344357e252bfSMichael Neumann } 344457e252bfSMichael Neumann } 344557e252bfSMichael Neumann 344657e252bfSMichael Neumann return ret; 344757e252bfSMichael Neumann } 344857e252bfSMichael Neumann 344957e252bfSMichael Neumann static int ni_pcie_performance_request(struct radeon_device *rdev, 345057e252bfSMichael Neumann u8 perf_req, bool advertise) 345157e252bfSMichael Neumann { 345257e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 345357e252bfSMichael Neumann 345457e252bfSMichael Neumann #if defined(CONFIG_ACPI) 345557e252bfSMichael Neumann if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) || 345657e252bfSMichael Neumann (perf_req == PCIE_PERF_REQ_PECI_GEN2)) { 345757e252bfSMichael Neumann if (eg_pi->pcie_performance_request_registered == false) 345857e252bfSMichael Neumann radeon_acpi_pcie_notify_device_ready(rdev); 345957e252bfSMichael Neumann eg_pi->pcie_performance_request_registered = true; 346057e252bfSMichael Neumann return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise); 346157e252bfSMichael Neumann } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) && 346257e252bfSMichael Neumann eg_pi->pcie_performance_request_registered) { 346357e252bfSMichael Neumann eg_pi->pcie_performance_request_registered = false; 346457e252bfSMichael Neumann return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise); 346557e252bfSMichael Neumann } 346657e252bfSMichael Neumann #endif 346757e252bfSMichael Neumann return 0; 346857e252bfSMichael Neumann } 346957e252bfSMichael Neumann 347057e252bfSMichael Neumann static int ni_advertise_gen2_capability(struct radeon_device *rdev) 347157e252bfSMichael Neumann { 347257e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 347357e252bfSMichael Neumann u32 tmp; 347457e252bfSMichael Neumann 347557e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL); 347657e252bfSMichael Neumann 347757e252bfSMichael Neumann if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) && 347857e252bfSMichael Neumann (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) 347957e252bfSMichael Neumann pi->pcie_gen2 = true; 348057e252bfSMichael Neumann else 348157e252bfSMichael Neumann pi->pcie_gen2 = false; 348257e252bfSMichael Neumann 348357e252bfSMichael Neumann if (!pi->pcie_gen2) 348457e252bfSMichael Neumann ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true); 348557e252bfSMichael Neumann 348657e252bfSMichael Neumann return 0; 348757e252bfSMichael Neumann } 348857e252bfSMichael Neumann 348957e252bfSMichael Neumann static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, 349057e252bfSMichael Neumann bool enable) 349157e252bfSMichael Neumann { 349257e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 349357e252bfSMichael Neumann u32 tmp, bif; 349457e252bfSMichael Neumann 349557e252bfSMichael Neumann tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL); 349657e252bfSMichael Neumann 349757e252bfSMichael Neumann if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) && 349857e252bfSMichael Neumann (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) { 349957e252bfSMichael Neumann if (enable) { 350057e252bfSMichael Neumann if (!pi->boot_in_gen2) { 350157e252bfSMichael Neumann bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK; 350257e252bfSMichael Neumann bif |= CG_CLIENT_REQ(0xd); 350357e252bfSMichael Neumann WREG32(CG_BIF_REQ_AND_RSP, bif); 350457e252bfSMichael Neumann } 350557e252bfSMichael Neumann tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK; 350657e252bfSMichael Neumann tmp |= LC_HW_VOLTAGE_IF_CONTROL(1); 350757e252bfSMichael Neumann tmp |= LC_GEN2_EN_STRAP; 350857e252bfSMichael Neumann 350957e252bfSMichael Neumann tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT; 351057e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp); 3511c4ef309bSzrj udelay(10); 351257e252bfSMichael Neumann tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT; 351357e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp); 351457e252bfSMichael Neumann } else { 351557e252bfSMichael Neumann if (!pi->boot_in_gen2) { 351657e252bfSMichael Neumann bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK; 351757e252bfSMichael Neumann bif |= CG_CLIENT_REQ(0xd); 351857e252bfSMichael Neumann WREG32(CG_BIF_REQ_AND_RSP, bif); 351957e252bfSMichael Neumann 352057e252bfSMichael Neumann tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK; 352157e252bfSMichael Neumann tmp &= ~LC_GEN2_EN_STRAP; 352257e252bfSMichael Neumann } 352357e252bfSMichael Neumann WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp); 352457e252bfSMichael Neumann } 352557e252bfSMichael Neumann } 352657e252bfSMichael Neumann } 352757e252bfSMichael Neumann 352857e252bfSMichael Neumann static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev, 352957e252bfSMichael Neumann bool enable) 353057e252bfSMichael Neumann { 353157e252bfSMichael Neumann ni_enable_bif_dynamic_pcie_gen2(rdev, enable); 353257e252bfSMichael Neumann 353357e252bfSMichael Neumann if (enable) 353457e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE); 353557e252bfSMichael Neumann else 353657e252bfSMichael Neumann WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE); 353757e252bfSMichael Neumann } 353857e252bfSMichael Neumann 353957e252bfSMichael Neumann void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev, 354057e252bfSMichael Neumann struct radeon_ps *new_ps, 354157e252bfSMichael Neumann struct radeon_ps *old_ps) 354257e252bfSMichael Neumann { 354357e252bfSMichael Neumann struct ni_ps *new_state = ni_get_ps(new_ps); 354457e252bfSMichael Neumann struct ni_ps *current_state = ni_get_ps(old_ps); 354557e252bfSMichael Neumann 354657e252bfSMichael Neumann if ((new_ps->vclk == old_ps->vclk) && 354757e252bfSMichael Neumann (new_ps->dclk == old_ps->dclk)) 354857e252bfSMichael Neumann return; 354957e252bfSMichael Neumann 355057e252bfSMichael Neumann if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >= 355157e252bfSMichael Neumann current_state->performance_levels[current_state->performance_level_count - 1].sclk) 355257e252bfSMichael Neumann return; 355357e252bfSMichael Neumann 355457e252bfSMichael Neumann radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); 355557e252bfSMichael Neumann } 355657e252bfSMichael Neumann 355757e252bfSMichael Neumann void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev, 355857e252bfSMichael Neumann struct radeon_ps *new_ps, 355957e252bfSMichael Neumann struct radeon_ps *old_ps) 356057e252bfSMichael Neumann { 356157e252bfSMichael Neumann struct ni_ps *new_state = ni_get_ps(new_ps); 356257e252bfSMichael Neumann struct ni_ps *current_state = ni_get_ps(old_ps); 356357e252bfSMichael Neumann 356457e252bfSMichael Neumann if ((new_ps->vclk == old_ps->vclk) && 356557e252bfSMichael Neumann (new_ps->dclk == old_ps->dclk)) 356657e252bfSMichael Neumann return; 356757e252bfSMichael Neumann 356857e252bfSMichael Neumann if (new_state->performance_levels[new_state->performance_level_count - 1].sclk < 356957e252bfSMichael Neumann current_state->performance_levels[current_state->performance_level_count - 1].sclk) 357057e252bfSMichael Neumann return; 357157e252bfSMichael Neumann 357257e252bfSMichael Neumann radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); 357357e252bfSMichael Neumann } 357457e252bfSMichael Neumann 357557e252bfSMichael Neumann void ni_dpm_setup_asic(struct radeon_device *rdev) 357657e252bfSMichael Neumann { 357757e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 357857e252bfSMichael Neumann 357957e252bfSMichael Neumann ni_read_clock_registers(rdev); 358057e252bfSMichael Neumann btc_read_arb_registers(rdev); 358157e252bfSMichael Neumann rv770_get_memory_type(rdev); 358257e252bfSMichael Neumann if (eg_pi->pcie_performance_request) 358357e252bfSMichael Neumann ni_advertise_gen2_capability(rdev); 358457e252bfSMichael Neumann rv770_get_pcie_gen2_status(rdev); 358557e252bfSMichael Neumann rv770_enable_acpi_pm(rdev); 358657e252bfSMichael Neumann } 358757e252bfSMichael Neumann 358857e252bfSMichael Neumann void ni_update_current_ps(struct radeon_device *rdev, 358957e252bfSMichael Neumann struct radeon_ps *rps) 359057e252bfSMichael Neumann { 359157e252bfSMichael Neumann struct ni_ps *new_ps = ni_get_ps(rps); 359257e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 359357e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 359457e252bfSMichael Neumann 359557e252bfSMichael Neumann eg_pi->current_rps = *rps; 359657e252bfSMichael Neumann ni_pi->current_ps = *new_ps; 359757e252bfSMichael Neumann eg_pi->current_rps.ps_priv = &ni_pi->current_ps; 359857e252bfSMichael Neumann } 359957e252bfSMichael Neumann 360057e252bfSMichael Neumann void ni_update_requested_ps(struct radeon_device *rdev, 360157e252bfSMichael Neumann struct radeon_ps *rps) 360257e252bfSMichael Neumann { 360357e252bfSMichael Neumann struct ni_ps *new_ps = ni_get_ps(rps); 360457e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 360557e252bfSMichael Neumann struct ni_power_info *ni_pi = ni_get_pi(rdev); 360657e252bfSMichael Neumann 360757e252bfSMichael Neumann eg_pi->requested_rps = *rps; 360857e252bfSMichael Neumann ni_pi->requested_ps = *new_ps; 360957e252bfSMichael Neumann eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps; 361057e252bfSMichael Neumann } 361157e252bfSMichael Neumann 361257e252bfSMichael Neumann int ni_dpm_enable(struct radeon_device *rdev) 361357e252bfSMichael Neumann { 361457e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 361557e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 361657e252bfSMichael Neumann struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; 361757e252bfSMichael Neumann int ret; 361857e252bfSMichael Neumann 361957e252bfSMichael Neumann if (pi->gfx_clock_gating) 362057e252bfSMichael Neumann ni_cg_clockgating_default(rdev); 362157e252bfSMichael Neumann if (btc_dpm_enabled(rdev)) 362257e252bfSMichael Neumann return -EINVAL; 362357e252bfSMichael Neumann if (pi->mg_clock_gating) 362457e252bfSMichael Neumann ni_mg_clockgating_default(rdev); 362557e252bfSMichael Neumann if (eg_pi->ls_clock_gating) 362657e252bfSMichael Neumann ni_ls_clockgating_default(rdev); 362757e252bfSMichael Neumann if (pi->voltage_control) { 362857e252bfSMichael Neumann rv770_enable_voltage_control(rdev, true); 362957e252bfSMichael Neumann ret = cypress_construct_voltage_tables(rdev); 363057e252bfSMichael Neumann if (ret) { 363157e252bfSMichael Neumann DRM_ERROR("cypress_construct_voltage_tables failed\n"); 363257e252bfSMichael Neumann return ret; 363357e252bfSMichael Neumann } 363457e252bfSMichael Neumann } 363557e252bfSMichael Neumann if (eg_pi->dynamic_ac_timing) { 363657e252bfSMichael Neumann ret = ni_initialize_mc_reg_table(rdev); 363757e252bfSMichael Neumann if (ret) 363857e252bfSMichael Neumann eg_pi->dynamic_ac_timing = false; 363957e252bfSMichael Neumann } 364057e252bfSMichael Neumann if (pi->dynamic_ss) 364157e252bfSMichael Neumann cypress_enable_spread_spectrum(rdev, true); 364257e252bfSMichael Neumann if (pi->thermal_protection) 364357e252bfSMichael Neumann rv770_enable_thermal_protection(rdev, true); 364457e252bfSMichael Neumann rv770_setup_bsp(rdev); 364557e252bfSMichael Neumann rv770_program_git(rdev); 364657e252bfSMichael Neumann rv770_program_tp(rdev); 364757e252bfSMichael Neumann rv770_program_tpp(rdev); 364857e252bfSMichael Neumann rv770_program_sstp(rdev); 364957e252bfSMichael Neumann cypress_enable_display_gap(rdev); 365057e252bfSMichael Neumann rv770_program_vc(rdev); 365157e252bfSMichael Neumann if (pi->dynamic_pcie_gen2) 365257e252bfSMichael Neumann ni_enable_dynamic_pcie_gen2(rdev, true); 365357e252bfSMichael Neumann ret = rv770_upload_firmware(rdev); 365457e252bfSMichael Neumann if (ret) { 365557e252bfSMichael Neumann DRM_ERROR("rv770_upload_firmware failed\n"); 365657e252bfSMichael Neumann return ret; 365757e252bfSMichael Neumann } 365857e252bfSMichael Neumann ret = ni_process_firmware_header(rdev); 365957e252bfSMichael Neumann if (ret) { 366057e252bfSMichael Neumann DRM_ERROR("ni_process_firmware_header failed\n"); 366157e252bfSMichael Neumann return ret; 366257e252bfSMichael Neumann } 366357e252bfSMichael Neumann ret = ni_initial_switch_from_arb_f0_to_f1(rdev); 366457e252bfSMichael Neumann if (ret) { 366557e252bfSMichael Neumann DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n"); 366657e252bfSMichael Neumann return ret; 366757e252bfSMichael Neumann } 366857e252bfSMichael Neumann ret = ni_init_smc_table(rdev); 366957e252bfSMichael Neumann if (ret) { 367057e252bfSMichael Neumann DRM_ERROR("ni_init_smc_table failed\n"); 367157e252bfSMichael Neumann return ret; 367257e252bfSMichael Neumann } 367357e252bfSMichael Neumann ret = ni_init_smc_spll_table(rdev); 367457e252bfSMichael Neumann if (ret) { 367557e252bfSMichael Neumann DRM_ERROR("ni_init_smc_spll_table failed\n"); 367657e252bfSMichael Neumann return ret; 367757e252bfSMichael Neumann } 367857e252bfSMichael Neumann ret = ni_init_arb_table_index(rdev); 367957e252bfSMichael Neumann if (ret) { 368057e252bfSMichael Neumann DRM_ERROR("ni_init_arb_table_index failed\n"); 368157e252bfSMichael Neumann return ret; 368257e252bfSMichael Neumann } 368357e252bfSMichael Neumann if (eg_pi->dynamic_ac_timing) { 368457e252bfSMichael Neumann ret = ni_populate_mc_reg_table(rdev, boot_ps); 368557e252bfSMichael Neumann if (ret) { 368657e252bfSMichael Neumann DRM_ERROR("ni_populate_mc_reg_table failed\n"); 368757e252bfSMichael Neumann return ret; 368857e252bfSMichael Neumann } 368957e252bfSMichael Neumann } 369057e252bfSMichael Neumann ret = ni_initialize_smc_cac_tables(rdev); 369157e252bfSMichael Neumann if (ret) { 369257e252bfSMichael Neumann DRM_ERROR("ni_initialize_smc_cac_tables failed\n"); 369357e252bfSMichael Neumann return ret; 369457e252bfSMichael Neumann } 369557e252bfSMichael Neumann ret = ni_initialize_hardware_cac_manager(rdev); 369657e252bfSMichael Neumann if (ret) { 369757e252bfSMichael Neumann DRM_ERROR("ni_initialize_hardware_cac_manager failed\n"); 369857e252bfSMichael Neumann return ret; 369957e252bfSMichael Neumann } 370057e252bfSMichael Neumann ret = ni_populate_smc_tdp_limits(rdev, boot_ps); 370157e252bfSMichael Neumann if (ret) { 370257e252bfSMichael Neumann DRM_ERROR("ni_populate_smc_tdp_limits failed\n"); 370357e252bfSMichael Neumann return ret; 370457e252bfSMichael Neumann } 370557e252bfSMichael Neumann ni_program_response_times(rdev); 370657e252bfSMichael Neumann r7xx_start_smc(rdev); 370757e252bfSMichael Neumann ret = cypress_notify_smc_display_change(rdev, false); 370857e252bfSMichael Neumann if (ret) { 370957e252bfSMichael Neumann DRM_ERROR("cypress_notify_smc_display_change failed\n"); 371057e252bfSMichael Neumann return ret; 371157e252bfSMichael Neumann } 371257e252bfSMichael Neumann cypress_enable_sclk_control(rdev, true); 371357e252bfSMichael Neumann if (eg_pi->memory_transition) 371457e252bfSMichael Neumann cypress_enable_mclk_control(rdev, true); 371557e252bfSMichael Neumann cypress_start_dpm(rdev); 371657e252bfSMichael Neumann if (pi->gfx_clock_gating) 371757e252bfSMichael Neumann ni_gfx_clockgating_enable(rdev, true); 371857e252bfSMichael Neumann if (pi->mg_clock_gating) 371957e252bfSMichael Neumann ni_mg_clockgating_enable(rdev, true); 372057e252bfSMichael Neumann if (eg_pi->ls_clock_gating) 372157e252bfSMichael Neumann ni_ls_clockgating_enable(rdev, true); 372257e252bfSMichael Neumann 372357e252bfSMichael Neumann if (rdev->irq.installed && 372457e252bfSMichael Neumann r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { 372557e252bfSMichael Neumann PPSMC_Result result; 372657e252bfSMichael Neumann 372757e252bfSMichael Neumann ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, 0xff * 1000); 372857e252bfSMichael Neumann if (ret) 372957e252bfSMichael Neumann return ret; 373057e252bfSMichael Neumann rdev->irq.dpm_thermal = true; 373157e252bfSMichael Neumann radeon_irq_set(rdev); 373257e252bfSMichael Neumann result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt); 373357e252bfSMichael Neumann 373457e252bfSMichael Neumann if (result != PPSMC_Result_OK) 373557e252bfSMichael Neumann DRM_DEBUG_KMS("Could not enable thermal interrupts.\n"); 373657e252bfSMichael Neumann } 373757e252bfSMichael Neumann 373857e252bfSMichael Neumann rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); 373957e252bfSMichael Neumann 374057e252bfSMichael Neumann ni_update_current_ps(rdev, boot_ps); 374157e252bfSMichael Neumann 374257e252bfSMichael Neumann return 0; 374357e252bfSMichael Neumann } 374457e252bfSMichael Neumann 374557e252bfSMichael Neumann void ni_dpm_disable(struct radeon_device *rdev) 374657e252bfSMichael Neumann { 374757e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 374857e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 374957e252bfSMichael Neumann struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; 375057e252bfSMichael Neumann 375157e252bfSMichael Neumann if (!btc_dpm_enabled(rdev)) 375257e252bfSMichael Neumann return; 375357e252bfSMichael Neumann rv770_clear_vc(rdev); 375457e252bfSMichael Neumann if (pi->thermal_protection) 375557e252bfSMichael Neumann rv770_enable_thermal_protection(rdev, false); 375657e252bfSMichael Neumann ni_enable_power_containment(rdev, boot_ps, false); 375757e252bfSMichael Neumann ni_enable_smc_cac(rdev, boot_ps, false); 375857e252bfSMichael Neumann cypress_enable_spread_spectrum(rdev, false); 375957e252bfSMichael Neumann rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false); 376057e252bfSMichael Neumann if (pi->dynamic_pcie_gen2) 376157e252bfSMichael Neumann ni_enable_dynamic_pcie_gen2(rdev, false); 376257e252bfSMichael Neumann 376357e252bfSMichael Neumann if (rdev->irq.installed && 376457e252bfSMichael Neumann r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { 376557e252bfSMichael Neumann rdev->irq.dpm_thermal = false; 376657e252bfSMichael Neumann radeon_irq_set(rdev); 376757e252bfSMichael Neumann } 376857e252bfSMichael Neumann 376957e252bfSMichael Neumann if (pi->gfx_clock_gating) 377057e252bfSMichael Neumann ni_gfx_clockgating_enable(rdev, false); 377157e252bfSMichael Neumann if (pi->mg_clock_gating) 377257e252bfSMichael Neumann ni_mg_clockgating_enable(rdev, false); 377357e252bfSMichael Neumann if (eg_pi->ls_clock_gating) 377457e252bfSMichael Neumann ni_ls_clockgating_enable(rdev, false); 377557e252bfSMichael Neumann ni_stop_dpm(rdev); 377657e252bfSMichael Neumann btc_reset_to_default(rdev); 377757e252bfSMichael Neumann ni_stop_smc(rdev); 377857e252bfSMichael Neumann ni_force_switch_to_arb_f0(rdev); 377957e252bfSMichael Neumann 378057e252bfSMichael Neumann ni_update_current_ps(rdev, boot_ps); 378157e252bfSMichael Neumann } 378257e252bfSMichael Neumann 378357e252bfSMichael Neumann static int ni_power_control_set_level(struct radeon_device *rdev) 378457e252bfSMichael Neumann { 378557e252bfSMichael Neumann struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; 378657e252bfSMichael Neumann int ret; 378757e252bfSMichael Neumann 378857e252bfSMichael Neumann ret = ni_restrict_performance_levels_before_switch(rdev); 378957e252bfSMichael Neumann if (ret) 379057e252bfSMichael Neumann return ret; 379157e252bfSMichael Neumann ret = rv770_halt_smc(rdev); 379257e252bfSMichael Neumann if (ret) 379357e252bfSMichael Neumann return ret; 379457e252bfSMichael Neumann ret = ni_populate_smc_tdp_limits(rdev, new_ps); 379557e252bfSMichael Neumann if (ret) 379657e252bfSMichael Neumann return ret; 379757e252bfSMichael Neumann ret = rv770_resume_smc(rdev); 379857e252bfSMichael Neumann if (ret) 379957e252bfSMichael Neumann return ret; 380057e252bfSMichael Neumann ret = rv770_set_sw_state(rdev); 380157e252bfSMichael Neumann if (ret) 380257e252bfSMichael Neumann return ret; 380357e252bfSMichael Neumann 380457e252bfSMichael Neumann return 0; 380557e252bfSMichael Neumann } 380657e252bfSMichael Neumann 380757e252bfSMichael Neumann int ni_dpm_pre_set_power_state(struct radeon_device *rdev) 380857e252bfSMichael Neumann { 380957e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 381057e252bfSMichael Neumann struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps; 381157e252bfSMichael Neumann struct radeon_ps *new_ps = &requested_ps; 381257e252bfSMichael Neumann 381357e252bfSMichael Neumann ni_update_requested_ps(rdev, new_ps); 381457e252bfSMichael Neumann 381557e252bfSMichael Neumann ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps); 381657e252bfSMichael Neumann 381757e252bfSMichael Neumann return 0; 381857e252bfSMichael Neumann } 381957e252bfSMichael Neumann 382057e252bfSMichael Neumann int ni_dpm_set_power_state(struct radeon_device *rdev) 382157e252bfSMichael Neumann { 382257e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 382357e252bfSMichael Neumann struct radeon_ps *new_ps = &eg_pi->requested_rps; 382457e252bfSMichael Neumann struct radeon_ps *old_ps = &eg_pi->current_rps; 382557e252bfSMichael Neumann int ret; 382657e252bfSMichael Neumann 382757e252bfSMichael Neumann ret = ni_restrict_performance_levels_before_switch(rdev); 382857e252bfSMichael Neumann if (ret) { 382957e252bfSMichael Neumann DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n"); 383057e252bfSMichael Neumann return ret; 383157e252bfSMichael Neumann } 383257e252bfSMichael Neumann ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); 383357e252bfSMichael Neumann ret = ni_enable_power_containment(rdev, new_ps, false); 383457e252bfSMichael Neumann if (ret) { 383557e252bfSMichael Neumann DRM_ERROR("ni_enable_power_containment failed\n"); 383657e252bfSMichael Neumann return ret; 383757e252bfSMichael Neumann } 383857e252bfSMichael Neumann ret = ni_enable_smc_cac(rdev, new_ps, false); 383957e252bfSMichael Neumann if (ret) { 384057e252bfSMichael Neumann DRM_ERROR("ni_enable_smc_cac failed\n"); 384157e252bfSMichael Neumann return ret; 384257e252bfSMichael Neumann } 384357e252bfSMichael Neumann ret = rv770_halt_smc(rdev); 384457e252bfSMichael Neumann if (ret) { 384557e252bfSMichael Neumann DRM_ERROR("rv770_halt_smc failed\n"); 384657e252bfSMichael Neumann return ret; 384757e252bfSMichael Neumann } 384857e252bfSMichael Neumann if (eg_pi->smu_uvd_hs) 384957e252bfSMichael Neumann btc_notify_uvd_to_smc(rdev, new_ps); 385057e252bfSMichael Neumann ret = ni_upload_sw_state(rdev, new_ps); 385157e252bfSMichael Neumann if (ret) { 385257e252bfSMichael Neumann DRM_ERROR("ni_upload_sw_state failed\n"); 385357e252bfSMichael Neumann return ret; 385457e252bfSMichael Neumann } 385557e252bfSMichael Neumann if (eg_pi->dynamic_ac_timing) { 385657e252bfSMichael Neumann ret = ni_upload_mc_reg_table(rdev, new_ps); 385757e252bfSMichael Neumann if (ret) { 385857e252bfSMichael Neumann DRM_ERROR("ni_upload_mc_reg_table failed\n"); 385957e252bfSMichael Neumann return ret; 386057e252bfSMichael Neumann } 386157e252bfSMichael Neumann } 386257e252bfSMichael Neumann ret = ni_program_memory_timing_parameters(rdev, new_ps); 386357e252bfSMichael Neumann if (ret) { 386457e252bfSMichael Neumann DRM_ERROR("ni_program_memory_timing_parameters failed\n"); 386557e252bfSMichael Neumann return ret; 386657e252bfSMichael Neumann } 386757e252bfSMichael Neumann ret = rv770_resume_smc(rdev); 386857e252bfSMichael Neumann if (ret) { 386957e252bfSMichael Neumann DRM_ERROR("rv770_resume_smc failed\n"); 387057e252bfSMichael Neumann return ret; 387157e252bfSMichael Neumann } 387257e252bfSMichael Neumann ret = rv770_set_sw_state(rdev); 387357e252bfSMichael Neumann if (ret) { 387457e252bfSMichael Neumann DRM_ERROR("rv770_set_sw_state failed\n"); 387557e252bfSMichael Neumann return ret; 387657e252bfSMichael Neumann } 387757e252bfSMichael Neumann ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); 387857e252bfSMichael Neumann ret = ni_enable_smc_cac(rdev, new_ps, true); 387957e252bfSMichael Neumann if (ret) { 388057e252bfSMichael Neumann DRM_ERROR("ni_enable_smc_cac failed\n"); 388157e252bfSMichael Neumann return ret; 388257e252bfSMichael Neumann } 388357e252bfSMichael Neumann ret = ni_enable_power_containment(rdev, new_ps, true); 388457e252bfSMichael Neumann if (ret) { 388557e252bfSMichael Neumann DRM_ERROR("ni_enable_power_containment failed\n"); 388657e252bfSMichael Neumann return ret; 388757e252bfSMichael Neumann } 388857e252bfSMichael Neumann 388957e252bfSMichael Neumann /* update tdp */ 389057e252bfSMichael Neumann ret = ni_power_control_set_level(rdev); 389157e252bfSMichael Neumann if (ret) { 389257e252bfSMichael Neumann DRM_ERROR("ni_power_control_set_level failed\n"); 389357e252bfSMichael Neumann return ret; 389457e252bfSMichael Neumann } 389557e252bfSMichael Neumann 389657e252bfSMichael Neumann return 0; 389757e252bfSMichael Neumann } 389857e252bfSMichael Neumann 389957e252bfSMichael Neumann void ni_dpm_post_set_power_state(struct radeon_device *rdev) 390057e252bfSMichael Neumann { 390157e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 390257e252bfSMichael Neumann struct radeon_ps *new_ps = &eg_pi->requested_rps; 390357e252bfSMichael Neumann 390457e252bfSMichael Neumann ni_update_current_ps(rdev, new_ps); 390557e252bfSMichael Neumann } 390657e252bfSMichael Neumann 390757e252bfSMichael Neumann void ni_dpm_reset_asic(struct radeon_device *rdev) 390857e252bfSMichael Neumann { 390957e252bfSMichael Neumann ni_restrict_performance_levels_before_switch(rdev); 391057e252bfSMichael Neumann rv770_set_boot_state(rdev); 391157e252bfSMichael Neumann } 391257e252bfSMichael Neumann 391357e252bfSMichael Neumann union power_info { 391457e252bfSMichael Neumann struct _ATOM_POWERPLAY_INFO info; 391557e252bfSMichael Neumann struct _ATOM_POWERPLAY_INFO_V2 info_2; 391657e252bfSMichael Neumann struct _ATOM_POWERPLAY_INFO_V3 info_3; 391757e252bfSMichael Neumann struct _ATOM_PPLIB_POWERPLAYTABLE pplib; 391857e252bfSMichael Neumann struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2; 391957e252bfSMichael Neumann struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3; 392057e252bfSMichael Neumann }; 392157e252bfSMichael Neumann 392257e252bfSMichael Neumann union pplib_clock_info { 392357e252bfSMichael Neumann struct _ATOM_PPLIB_R600_CLOCK_INFO r600; 392457e252bfSMichael Neumann struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780; 392557e252bfSMichael Neumann struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen; 392657e252bfSMichael Neumann struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo; 392757e252bfSMichael Neumann }; 392857e252bfSMichael Neumann 392957e252bfSMichael Neumann union pplib_power_state { 393057e252bfSMichael Neumann struct _ATOM_PPLIB_STATE v1; 393157e252bfSMichael Neumann struct _ATOM_PPLIB_STATE_V2 v2; 393257e252bfSMichael Neumann }; 393357e252bfSMichael Neumann 393457e252bfSMichael Neumann static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev, 393557e252bfSMichael Neumann struct radeon_ps *rps, 393657e252bfSMichael Neumann struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info, 393757e252bfSMichael Neumann u8 table_rev) 393857e252bfSMichael Neumann { 393957e252bfSMichael Neumann rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings); 394057e252bfSMichael Neumann rps->class = le16_to_cpu(non_clock_info->usClassification); 394157e252bfSMichael Neumann rps->class2 = le16_to_cpu(non_clock_info->usClassification2); 394257e252bfSMichael Neumann 394357e252bfSMichael Neumann if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) { 394457e252bfSMichael Neumann rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); 394557e252bfSMichael Neumann rps->dclk = le32_to_cpu(non_clock_info->ulDCLK); 394657e252bfSMichael Neumann } else if (r600_is_uvd_state(rps->class, rps->class2)) { 394757e252bfSMichael Neumann rps->vclk = RV770_DEFAULT_VCLK_FREQ; 394857e252bfSMichael Neumann rps->dclk = RV770_DEFAULT_DCLK_FREQ; 394957e252bfSMichael Neumann } else { 395057e252bfSMichael Neumann rps->vclk = 0; 395157e252bfSMichael Neumann rps->dclk = 0; 395257e252bfSMichael Neumann } 395357e252bfSMichael Neumann 395457e252bfSMichael Neumann if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) 395557e252bfSMichael Neumann rdev->pm.dpm.boot_ps = rps; 395657e252bfSMichael Neumann if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE) 395757e252bfSMichael Neumann rdev->pm.dpm.uvd_ps = rps; 395857e252bfSMichael Neumann } 395957e252bfSMichael Neumann 396057e252bfSMichael Neumann static void ni_parse_pplib_clock_info(struct radeon_device *rdev, 396157e252bfSMichael Neumann struct radeon_ps *rps, int index, 396257e252bfSMichael Neumann union pplib_clock_info *clock_info) 396357e252bfSMichael Neumann { 396457e252bfSMichael Neumann struct rv7xx_power_info *pi = rv770_get_pi(rdev); 396557e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 396657e252bfSMichael Neumann struct ni_ps *ps = ni_get_ps(rps); 396757e252bfSMichael Neumann u16 vddc; 396857e252bfSMichael Neumann struct rv7xx_pl *pl = &ps->performance_levels[index]; 396957e252bfSMichael Neumann 397057e252bfSMichael Neumann ps->performance_level_count = index + 1; 397157e252bfSMichael Neumann 397257e252bfSMichael Neumann pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow); 397357e252bfSMichael Neumann pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16; 397457e252bfSMichael Neumann pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow); 397557e252bfSMichael Neumann pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16; 397657e252bfSMichael Neumann 397757e252bfSMichael Neumann pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC); 397857e252bfSMichael Neumann pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI); 397957e252bfSMichael Neumann pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags); 398057e252bfSMichael Neumann 398157e252bfSMichael Neumann /* patch up vddc if necessary */ 398257e252bfSMichael Neumann if (pl->vddc == 0xff01) { 398357e252bfSMichael Neumann if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0) 398457e252bfSMichael Neumann pl->vddc = vddc; 398557e252bfSMichael Neumann } 398657e252bfSMichael Neumann 398757e252bfSMichael Neumann if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) { 398857e252bfSMichael Neumann pi->acpi_vddc = pl->vddc; 398957e252bfSMichael Neumann eg_pi->acpi_vddci = pl->vddci; 399057e252bfSMichael Neumann if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) 399157e252bfSMichael Neumann pi->acpi_pcie_gen2 = true; 399257e252bfSMichael Neumann else 399357e252bfSMichael Neumann pi->acpi_pcie_gen2 = false; 399457e252bfSMichael Neumann } 399557e252bfSMichael Neumann 399657e252bfSMichael Neumann if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) { 399757e252bfSMichael Neumann eg_pi->ulv.supported = true; 399857e252bfSMichael Neumann eg_pi->ulv.pl = pl; 399957e252bfSMichael Neumann } 400057e252bfSMichael Neumann 400157e252bfSMichael Neumann if (pi->min_vddc_in_table > pl->vddc) 400257e252bfSMichael Neumann pi->min_vddc_in_table = pl->vddc; 400357e252bfSMichael Neumann 400457e252bfSMichael Neumann if (pi->max_vddc_in_table < pl->vddc) 400557e252bfSMichael Neumann pi->max_vddc_in_table = pl->vddc; 400657e252bfSMichael Neumann 400757e252bfSMichael Neumann /* patch up boot state */ 400857e252bfSMichael Neumann if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) { 400957e252bfSMichael Neumann u16 vddc, vddci, mvdd; 401057e252bfSMichael Neumann radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd); 401157e252bfSMichael Neumann pl->mclk = rdev->clock.default_mclk; 401257e252bfSMichael Neumann pl->sclk = rdev->clock.default_sclk; 401357e252bfSMichael Neumann pl->vddc = vddc; 401457e252bfSMichael Neumann pl->vddci = vddci; 401557e252bfSMichael Neumann } 401657e252bfSMichael Neumann 401757e252bfSMichael Neumann if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) == 401857e252bfSMichael Neumann ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) { 401957e252bfSMichael Neumann rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk; 402057e252bfSMichael Neumann rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk; 402157e252bfSMichael Neumann rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc; 402257e252bfSMichael Neumann rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci; 402357e252bfSMichael Neumann } 402457e252bfSMichael Neumann } 402557e252bfSMichael Neumann 402657e252bfSMichael Neumann static int ni_parse_power_table(struct radeon_device *rdev) 402757e252bfSMichael Neumann { 402857e252bfSMichael Neumann struct radeon_mode_info *mode_info = &rdev->mode_info; 402957e252bfSMichael Neumann struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info; 403057e252bfSMichael Neumann union pplib_power_state *power_state; 403157e252bfSMichael Neumann int i, j; 403257e252bfSMichael Neumann union pplib_clock_info *clock_info; 403357e252bfSMichael Neumann union power_info *power_info; 403457e252bfSMichael Neumann int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo); 403557e252bfSMichael Neumann u16 data_offset; 403657e252bfSMichael Neumann u8 frev, crev; 403757e252bfSMichael Neumann struct ni_ps *ps; 403857e252bfSMichael Neumann 403957e252bfSMichael Neumann if (!atom_parse_data_header(mode_info->atom_context, index, NULL, 404057e252bfSMichael Neumann &frev, &crev, &data_offset)) 404157e252bfSMichael Neumann return -EINVAL; 404257e252bfSMichael Neumann power_info = (union power_info *)((uint8_t*)mode_info->atom_context->bios + data_offset); 404357e252bfSMichael Neumann 404457e252bfSMichael Neumann rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * 404557e252bfSMichael Neumann power_info->pplib.ucNumStates, GFP_KERNEL); 404657e252bfSMichael Neumann if (!rdev->pm.dpm.ps) 404757e252bfSMichael Neumann return -ENOMEM; 404857e252bfSMichael Neumann rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps); 404957e252bfSMichael Neumann rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime); 405057e252bfSMichael Neumann rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime); 405157e252bfSMichael Neumann 405257e252bfSMichael Neumann for (i = 0; i < power_info->pplib.ucNumStates; i++) { 405357e252bfSMichael Neumann power_state = (union pplib_power_state *) 405457e252bfSMichael Neumann ((uint8_t*)mode_info->atom_context->bios + data_offset + 405557e252bfSMichael Neumann le16_to_cpu(power_info->pplib.usStateArrayOffset) + 405657e252bfSMichael Neumann i * power_info->pplib.ucStateEntrySize); 405757e252bfSMichael Neumann non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *) 405857e252bfSMichael Neumann ((uint8_t*)mode_info->atom_context->bios + data_offset + 405957e252bfSMichael Neumann le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) + 406057e252bfSMichael Neumann (power_state->v1.ucNonClockStateIndex * 406157e252bfSMichael Neumann power_info->pplib.ucNonClockSize)); 406257e252bfSMichael Neumann if (power_info->pplib.ucStateEntrySize - 1) { 4063*4cd92098Szrj u8 *idx; 406457e252bfSMichael Neumann ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL); 406557e252bfSMichael Neumann if (ps == NULL) { 406657e252bfSMichael Neumann kfree(rdev->pm.dpm.ps); 406757e252bfSMichael Neumann return -ENOMEM; 406857e252bfSMichael Neumann } 406957e252bfSMichael Neumann rdev->pm.dpm.ps[i].ps_priv = ps; 407057e252bfSMichael Neumann ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], 407157e252bfSMichael Neumann non_clock_info, 407257e252bfSMichael Neumann power_info->pplib.ucNonClockSize); 4073*4cd92098Szrj idx = (u8 *)&power_state->v1.ucClockStateIndices[0]; 407457e252bfSMichael Neumann for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) { 407557e252bfSMichael Neumann clock_info = (union pplib_clock_info *) 407657e252bfSMichael Neumann ((uint8_t*)mode_info->atom_context->bios + data_offset + 407757e252bfSMichael Neumann le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) + 4078*4cd92098Szrj (idx[j] * power_info->pplib.ucClockInfoSize)); 407957e252bfSMichael Neumann ni_parse_pplib_clock_info(rdev, 408057e252bfSMichael Neumann &rdev->pm.dpm.ps[i], j, 408157e252bfSMichael Neumann clock_info); 408257e252bfSMichael Neumann } 408357e252bfSMichael Neumann } 408457e252bfSMichael Neumann } 408557e252bfSMichael Neumann rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates; 408657e252bfSMichael Neumann return 0; 408757e252bfSMichael Neumann } 408857e252bfSMichael Neumann 408957e252bfSMichael Neumann int ni_dpm_init(struct radeon_device *rdev) 409057e252bfSMichael Neumann { 409157e252bfSMichael Neumann struct rv7xx_power_info *pi; 409257e252bfSMichael Neumann struct evergreen_power_info *eg_pi; 409357e252bfSMichael Neumann struct ni_power_info *ni_pi; 409457e252bfSMichael Neumann struct atom_clock_dividers dividers; 409557e252bfSMichael Neumann int ret; 409657e252bfSMichael Neumann 409757e252bfSMichael Neumann ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL); 409857e252bfSMichael Neumann if (ni_pi == NULL) 409957e252bfSMichael Neumann return -ENOMEM; 410057e252bfSMichael Neumann rdev->pm.dpm.priv = ni_pi; 410157e252bfSMichael Neumann eg_pi = &ni_pi->eg; 410257e252bfSMichael Neumann pi = &eg_pi->rv7xx; 410357e252bfSMichael Neumann 410457e252bfSMichael Neumann rv770_get_max_vddc(rdev); 410557e252bfSMichael Neumann 410657e252bfSMichael Neumann eg_pi->ulv.supported = false; 410757e252bfSMichael Neumann pi->acpi_vddc = 0; 410857e252bfSMichael Neumann eg_pi->acpi_vddci = 0; 410957e252bfSMichael Neumann pi->min_vddc_in_table = 0; 411057e252bfSMichael Neumann pi->max_vddc_in_table = 0; 411157e252bfSMichael Neumann 411257e252bfSMichael Neumann ret = ni_parse_power_table(rdev); 411357e252bfSMichael Neumann if (ret) 411457e252bfSMichael Neumann return ret; 411557e252bfSMichael Neumann ret = r600_parse_extended_power_table(rdev); 411657e252bfSMichael Neumann if (ret) 411757e252bfSMichael Neumann return ret; 411857e252bfSMichael Neumann 411957e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries = 412057e252bfSMichael Neumann kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL); 412157e252bfSMichael Neumann if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) { 412257e252bfSMichael Neumann r600_free_extended_power_table(rdev); 412357e252bfSMichael Neumann return -ENOMEM; 412457e252bfSMichael Neumann } 412557e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4; 412657e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0; 412757e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0; 412857e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000; 412957e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720; 413057e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000; 413157e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810; 413257e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000; 413357e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900; 413457e252bfSMichael Neumann 413557e252bfSMichael Neumann ni_patch_dependency_tables_based_on_leakage(rdev); 413657e252bfSMichael Neumann 413757e252bfSMichael Neumann if (rdev->pm.dpm.voltage_response_time == 0) 413857e252bfSMichael Neumann rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT; 413957e252bfSMichael Neumann if (rdev->pm.dpm.backbias_response_time == 0) 414057e252bfSMichael Neumann rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT; 414157e252bfSMichael Neumann 414257e252bfSMichael Neumann ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, 414357e252bfSMichael Neumann 0, false, ÷rs); 414457e252bfSMichael Neumann if (ret) 414557e252bfSMichael Neumann pi->ref_div = dividers.ref_div + 1; 414657e252bfSMichael Neumann else 414757e252bfSMichael Neumann pi->ref_div = R600_REFERENCEDIVIDER_DFLT; 414857e252bfSMichael Neumann 414957e252bfSMichael Neumann pi->rlp = RV770_RLP_DFLT; 415057e252bfSMichael Neumann pi->rmp = RV770_RMP_DFLT; 415157e252bfSMichael Neumann pi->lhp = RV770_LHP_DFLT; 415257e252bfSMichael Neumann pi->lmp = RV770_LMP_DFLT; 415357e252bfSMichael Neumann 415457e252bfSMichael Neumann eg_pi->ats[0].rlp = RV770_RLP_DFLT; 415557e252bfSMichael Neumann eg_pi->ats[0].rmp = RV770_RMP_DFLT; 415657e252bfSMichael Neumann eg_pi->ats[0].lhp = RV770_LHP_DFLT; 415757e252bfSMichael Neumann eg_pi->ats[0].lmp = RV770_LMP_DFLT; 415857e252bfSMichael Neumann 415957e252bfSMichael Neumann eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT; 416057e252bfSMichael Neumann eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT; 416157e252bfSMichael Neumann eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT; 416257e252bfSMichael Neumann eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT; 416357e252bfSMichael Neumann 416457e252bfSMichael Neumann eg_pi->smu_uvd_hs = true; 416557e252bfSMichael Neumann 416657e252bfSMichael Neumann if (rdev->ddev->pci_device == 0x6707) { 416757e252bfSMichael Neumann pi->mclk_strobe_mode_threshold = 55000; 416857e252bfSMichael Neumann pi->mclk_edc_enable_threshold = 55000; 416957e252bfSMichael Neumann eg_pi->mclk_edc_wr_enable_threshold = 55000; 417057e252bfSMichael Neumann } else { 417157e252bfSMichael Neumann pi->mclk_strobe_mode_threshold = 40000; 417257e252bfSMichael Neumann pi->mclk_edc_enable_threshold = 40000; 417357e252bfSMichael Neumann eg_pi->mclk_edc_wr_enable_threshold = 40000; 417457e252bfSMichael Neumann } 417557e252bfSMichael Neumann ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold; 417657e252bfSMichael Neumann 417757e252bfSMichael Neumann pi->voltage_control = 417857e252bfSMichael Neumann radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0); 417957e252bfSMichael Neumann 418057e252bfSMichael Neumann pi->mvdd_control = 418157e252bfSMichael Neumann radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0); 418257e252bfSMichael Neumann 418357e252bfSMichael Neumann eg_pi->vddci_control = 418457e252bfSMichael Neumann radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0); 418557e252bfSMichael Neumann 418657e252bfSMichael Neumann rv770_get_engine_memory_ss(rdev); 418757e252bfSMichael Neumann 418857e252bfSMichael Neumann pi->asi = RV770_ASI_DFLT; 418957e252bfSMichael Neumann pi->pasi = CYPRESS_HASI_DFLT; 419057e252bfSMichael Neumann pi->vrc = CYPRESS_VRC_DFLT; 419157e252bfSMichael Neumann 419257e252bfSMichael Neumann pi->power_gating = false; 419357e252bfSMichael Neumann 419457e252bfSMichael Neumann pi->gfx_clock_gating = true; 419557e252bfSMichael Neumann 419657e252bfSMichael Neumann pi->mg_clock_gating = true; 419757e252bfSMichael Neumann pi->mgcgtssm = true; 419857e252bfSMichael Neumann eg_pi->ls_clock_gating = false; 419957e252bfSMichael Neumann eg_pi->sclk_deep_sleep = false; 420057e252bfSMichael Neumann 420157e252bfSMichael Neumann pi->dynamic_pcie_gen2 = true; 420257e252bfSMichael Neumann 420357e252bfSMichael Neumann if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE) 420457e252bfSMichael Neumann pi->thermal_protection = true; 420557e252bfSMichael Neumann else 420657e252bfSMichael Neumann pi->thermal_protection = false; 420757e252bfSMichael Neumann 420857e252bfSMichael Neumann pi->display_gap = true; 420957e252bfSMichael Neumann 421057e252bfSMichael Neumann pi->dcodt = true; 421157e252bfSMichael Neumann 421257e252bfSMichael Neumann pi->ulps = true; 421357e252bfSMichael Neumann 421457e252bfSMichael Neumann eg_pi->dynamic_ac_timing = true; 421557e252bfSMichael Neumann eg_pi->abm = true; 421657e252bfSMichael Neumann eg_pi->mcls = true; 421757e252bfSMichael Neumann eg_pi->light_sleep = true; 421857e252bfSMichael Neumann eg_pi->memory_transition = true; 421957e252bfSMichael Neumann #if defined(CONFIG_ACPI) 422057e252bfSMichael Neumann eg_pi->pcie_performance_request = 422157e252bfSMichael Neumann radeon_acpi_is_pcie_performance_request_supported(rdev); 422257e252bfSMichael Neumann #else 422357e252bfSMichael Neumann eg_pi->pcie_performance_request = false; 422457e252bfSMichael Neumann #endif 422557e252bfSMichael Neumann 422657e252bfSMichael Neumann eg_pi->dll_default_on = false; 422757e252bfSMichael Neumann 422857e252bfSMichael Neumann eg_pi->sclk_deep_sleep = false; 422957e252bfSMichael Neumann 423057e252bfSMichael Neumann pi->mclk_stutter_mode_threshold = 0; 423157e252bfSMichael Neumann 423257e252bfSMichael Neumann pi->sram_end = SMC_RAM_END; 423357e252bfSMichael Neumann 423457e252bfSMichael Neumann rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3; 423557e252bfSMichael Neumann rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200; 423657e252bfSMichael Neumann rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900; 423757e252bfSMichael Neumann rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk); 423857e252bfSMichael Neumann rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk; 423957e252bfSMichael Neumann rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0; 424057e252bfSMichael Neumann rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL; 424157e252bfSMichael Neumann rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500; 424257e252bfSMichael Neumann 424357e252bfSMichael Neumann ni_pi->cac_data.leakage_coefficients.at = 516; 424457e252bfSMichael Neumann ni_pi->cac_data.leakage_coefficients.bt = 18; 424557e252bfSMichael Neumann ni_pi->cac_data.leakage_coefficients.av = 51; 424657e252bfSMichael Neumann ni_pi->cac_data.leakage_coefficients.bv = 2957; 424757e252bfSMichael Neumann 424857e252bfSMichael Neumann switch (rdev->ddev->pci_device) { 424957e252bfSMichael Neumann case 0x6700: 425057e252bfSMichael Neumann case 0x6701: 425157e252bfSMichael Neumann case 0x6702: 425257e252bfSMichael Neumann case 0x6703: 425357e252bfSMichael Neumann case 0x6718: 425457e252bfSMichael Neumann ni_pi->cac_weights = &cac_weights_cayman_xt; 425557e252bfSMichael Neumann break; 425657e252bfSMichael Neumann case 0x6705: 425757e252bfSMichael Neumann case 0x6719: 425857e252bfSMichael Neumann case 0x671D: 425957e252bfSMichael Neumann case 0x671C: 426057e252bfSMichael Neumann default: 426157e252bfSMichael Neumann ni_pi->cac_weights = &cac_weights_cayman_pro; 426257e252bfSMichael Neumann break; 426357e252bfSMichael Neumann case 0x6704: 426457e252bfSMichael Neumann case 0x6706: 426557e252bfSMichael Neumann case 0x6707: 426657e252bfSMichael Neumann case 0x6708: 426757e252bfSMichael Neumann case 0x6709: 426857e252bfSMichael Neumann ni_pi->cac_weights = &cac_weights_cayman_le; 426957e252bfSMichael Neumann break; 427057e252bfSMichael Neumann } 427157e252bfSMichael Neumann 427257e252bfSMichael Neumann if (ni_pi->cac_weights->enable_power_containment_by_default) { 427357e252bfSMichael Neumann ni_pi->enable_power_containment = true; 427457e252bfSMichael Neumann ni_pi->enable_cac = true; 427557e252bfSMichael Neumann ni_pi->enable_sq_ramping = true; 427657e252bfSMichael Neumann } else { 427757e252bfSMichael Neumann ni_pi->enable_power_containment = false; 427857e252bfSMichael Neumann ni_pi->enable_cac = false; 427957e252bfSMichael Neumann ni_pi->enable_sq_ramping = false; 428057e252bfSMichael Neumann } 428157e252bfSMichael Neumann 428257e252bfSMichael Neumann ni_pi->driver_calculate_cac_leakage = false; 428357e252bfSMichael Neumann ni_pi->cac_configuration_required = true; 428457e252bfSMichael Neumann 428557e252bfSMichael Neumann if (ni_pi->cac_configuration_required) { 428657e252bfSMichael Neumann ni_pi->support_cac_long_term_average = true; 428757e252bfSMichael Neumann ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size; 428857e252bfSMichael Neumann ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate; 428957e252bfSMichael Neumann } else { 429057e252bfSMichael Neumann ni_pi->support_cac_long_term_average = false; 429157e252bfSMichael Neumann ni_pi->lta_window_size = 0; 429257e252bfSMichael Neumann ni_pi->lts_truncate = 0; 429357e252bfSMichael Neumann } 429457e252bfSMichael Neumann 429557e252bfSMichael Neumann ni_pi->use_power_boost_limit = true; 429657e252bfSMichael Neumann 4297*4cd92098Szrj /* make sure dc limits are valid */ 4298*4cd92098Szrj if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) || 4299*4cd92098Szrj (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0)) 4300*4cd92098Szrj rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc = 4301*4cd92098Szrj rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac; 4302*4cd92098Szrj 430357e252bfSMichael Neumann return 0; 430457e252bfSMichael Neumann } 430557e252bfSMichael Neumann 430657e252bfSMichael Neumann void ni_dpm_fini(struct radeon_device *rdev) 430757e252bfSMichael Neumann { 430857e252bfSMichael Neumann int i; 430957e252bfSMichael Neumann 431057e252bfSMichael Neumann for (i = 0; i < rdev->pm.dpm.num_ps; i++) { 431157e252bfSMichael Neumann kfree(rdev->pm.dpm.ps[i].ps_priv); 431257e252bfSMichael Neumann } 431357e252bfSMichael Neumann kfree(rdev->pm.dpm.ps); 431457e252bfSMichael Neumann kfree(rdev->pm.dpm.priv); 431557e252bfSMichael Neumann kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries); 431657e252bfSMichael Neumann r600_free_extended_power_table(rdev); 431757e252bfSMichael Neumann } 431857e252bfSMichael Neumann 431957e252bfSMichael Neumann void ni_dpm_print_power_state(struct radeon_device *rdev, 432057e252bfSMichael Neumann struct radeon_ps *rps) 432157e252bfSMichael Neumann { 432257e252bfSMichael Neumann struct ni_ps *ps = ni_get_ps(rps); 432357e252bfSMichael Neumann struct rv7xx_pl *pl; 432457e252bfSMichael Neumann int i; 432557e252bfSMichael Neumann 432657e252bfSMichael Neumann r600_dpm_print_class_info(rps->class, rps->class2); 432757e252bfSMichael Neumann r600_dpm_print_cap_info(rps->caps); 432857e252bfSMichael Neumann printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); 432957e252bfSMichael Neumann for (i = 0; i < ps->performance_level_count; i++) { 433057e252bfSMichael Neumann pl = &ps->performance_levels[i]; 433157e252bfSMichael Neumann if (rdev->family >= CHIP_TAHITI) 433257e252bfSMichael Neumann printk("\t\tpower level %d sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n", 433357e252bfSMichael Neumann i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1); 433457e252bfSMichael Neumann else 433557e252bfSMichael Neumann printk("\t\tpower level %d sclk: %u mclk: %u vddc: %u vddci: %u\n", 433657e252bfSMichael Neumann i, pl->sclk, pl->mclk, pl->vddc, pl->vddci); 433757e252bfSMichael Neumann } 433857e252bfSMichael Neumann r600_dpm_print_ps_status(rdev, rps); 433957e252bfSMichael Neumann } 434057e252bfSMichael Neumann 434157e252bfSMichael Neumann void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, 434257e252bfSMichael Neumann struct seq_file *m) 434357e252bfSMichael Neumann { 434457e252bfSMichael Neumann struct radeon_ps *rps = rdev->pm.dpm.current_ps; 434557e252bfSMichael Neumann struct ni_ps *ps = ni_get_ps(rps); 434657e252bfSMichael Neumann struct rv7xx_pl *pl; 434757e252bfSMichael Neumann u32 current_index = 434857e252bfSMichael Neumann (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >> 434957e252bfSMichael Neumann CURRENT_STATE_INDEX_SHIFT; 435057e252bfSMichael Neumann 435157e252bfSMichael Neumann if (current_index >= ps->performance_level_count) { 435257e252bfSMichael Neumann seq_printf(m, "invalid dpm profile %d\n", current_index); 435357e252bfSMichael Neumann } else { 435457e252bfSMichael Neumann pl = &ps->performance_levels[current_index]; 435557e252bfSMichael Neumann seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); 435657e252bfSMichael Neumann seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u vddci: %u\n", 435757e252bfSMichael Neumann current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci); 435857e252bfSMichael Neumann } 435957e252bfSMichael Neumann } 436057e252bfSMichael Neumann 436157e252bfSMichael Neumann u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low) 436257e252bfSMichael Neumann { 436357e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 436457e252bfSMichael Neumann struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps); 436557e252bfSMichael Neumann 436657e252bfSMichael Neumann if (low) 436757e252bfSMichael Neumann return requested_state->performance_levels[0].sclk; 436857e252bfSMichael Neumann else 436957e252bfSMichael Neumann return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk; 437057e252bfSMichael Neumann } 437157e252bfSMichael Neumann 437257e252bfSMichael Neumann u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low) 437357e252bfSMichael Neumann { 437457e252bfSMichael Neumann struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 437557e252bfSMichael Neumann struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps); 437657e252bfSMichael Neumann 437757e252bfSMichael Neumann if (low) 437857e252bfSMichael Neumann return requested_state->performance_levels[0].mclk; 437957e252bfSMichael Neumann else 438057e252bfSMichael Neumann return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk; 438157e252bfSMichael Neumann } 4382