File drivers/gpu/alga/amd/si/dyn_pm/ctx.c changed (mode: 100644) (index 95c6865..8931550) |
... |
... |
static void u_and_p_compute(u32 i, u32 ref_clk, u32 *p, u32 *u) |
211 |
211 |
} |
} |
212 |
212 |
|
|
213 |
213 |
#define ASI_DEFAULT 1000 /* value from rv770 family */ |
#define ASI_DEFAULT 1000 /* value from rv770 family */ |
214 |
|
#define PASI_DEFAULT 400000 /* value from cypress family */ |
|
|
214 |
|
#define P_ASI_DEFAULT 400000 /* value from cypress family */ |
215 |
215 |
static void ctx_b_sp_init(struct ctx *ctx) |
static void ctx_b_sp_init(struct ctx *ctx) |
216 |
216 |
{ |
{ |
217 |
217 |
u32 bsp; |
u32 bsp; |
218 |
218 |
u32 bsu; |
u32 bsu; |
|
219 |
|
u32 p_bsp; |
|
220 |
|
u32 p_bsu; |
219 |
221 |
|
|
220 |
222 |
u_and_p_compute(ASI_DEFAULT, ctx->gpu_aux_clk, &bsp, &bsu); |
u_and_p_compute(ASI_DEFAULT, ctx->gpu_aux_clk, &bsp, &bsu); |
|
223 |
|
u_and_p_compute(P_ASI_DEFAULT, ctx->gpu_aux_clk, &p_bsp, &p_bsu); |
|
224 |
|
|
221 |
225 |
|
|
222 |
|
LOG("b_sp parameters:gpu_aux_clk=%u kHz, bsp=0x%08x, bsu=0x%08x", |
|
|
226 |
|
LOG("d_sp parameters:gpu_aux_clk=%u kHz, bsp=0x%08x, bsu=0x%08x", |
223 |
227 |
ctx->gpu_aux_clk * 10, bsp, bsu); |
ctx->gpu_aux_clk * 10, bsp, bsu); |
|
228 |
|
LOG("p_sp parameters:gpu_aux_clk=%u kHz, p_bsp=0x%08x, p_bsu=0x%08x", |
|
229 |
|
ctx->gpu_aux_clk * 10, p_bsp, p_bsu); |
224 |
230 |
ctx->d_sp = set(CB_BSP, bsp) | set (CB_BSU, bsu); |
ctx->d_sp = set(CB_BSP, bsp) | set (CB_BSU, bsu); |
|
231 |
|
ctx->p_sp = set(CB_BSP, p_bsp) | set (CB_BSU, p_bsu); |
225 |
232 |
} |
} |
226 |
233 |
|
|
227 |
234 |
static u32 cus_n_max_compute(struct ctx *ctx) |
static u32 cus_n_max_compute(struct ctx *ctx) |
File drivers/gpu/alga/amd/si/dyn_pm/driver.c changed (mode: 100644) (index 325cd5a..6d19e77) |
35 |
35 |
|
|
36 |
36 |
#include "ctx.h" |
#include "ctx.h" |
37 |
37 |
#include "private.h" |
#include "private.h" |
|
38 |
|
#include "smc_lvl.h" |
|
39 |
|
#include "smc_volt.h" |
|
40 |
|
|
|
41 |
|
static void smc_sw_state_init(struct ctx *ctx, struct smc_sw_state *state) |
|
42 |
|
{ |
|
43 |
|
state->flgs = SMC_SW_STATE_FLGS_DC; |
|
44 |
|
state->lvls_n = ctx->atb_performance.lvls_n; |
|
45 |
|
} |
|
46 |
|
|
|
47 |
|
static u16 pwr_efficiency_ratio_compute(struct ctx *ctx, |
|
48 |
|
struct smc_volt *prev_std_vddc, struct smc_volt *cur_std_vddc) |
|
49 |
|
{ |
|
50 |
|
//TODO |
|
51 |
|
return 0; |
|
52 |
|
} |
|
53 |
|
|
|
54 |
|
#define NEAR_TDP_DEC 10 |
|
55 |
|
#define ABOVE_SAFE_INC 5 |
|
56 |
|
#define BELOW_SAFE_INC 20 |
|
57 |
|
static long smc_pp_dpm_to_perf_lvl_init(struct ctx *ctx, u8 lvl_idx, |
|
58 |
|
struct smc_pp_dpm_to_perf_lvl *dpm_to_perf_lvl) |
|
59 |
|
{ |
|
60 |
|
long r; |
|
61 |
|
struct atb_pp_lvl *prev_atb_lvl; |
|
62 |
|
struct atb_pp_lvl *cur_atb_lvl; |
|
63 |
|
struct smc_volt prev_vddc; |
|
64 |
|
struct smc_volt cur_vddc; |
|
65 |
|
struct smc_volt prev_std_vddc; |
|
66 |
|
struct smc_volt cur_std_vddc; |
|
67 |
|
u16 pwr_efficiency_ratio; |
|
68 |
|
|
|
69 |
|
if (lvl_idx == 0) { |
|
70 |
|
dpm_to_perf_lvl->max_ps = 0; |
|
71 |
|
dpm_to_perf_lvl->near_tdp_dec = 0; |
|
72 |
|
dpm_to_perf_lvl->above_safe_inc = 0; |
|
73 |
|
dpm_to_perf_lvl->below_safe_inc = 0; |
|
74 |
|
put_unaligned_be16(0, &dpm_to_perf_lvl->pwr_efficiency_ratio); |
|
75 |
|
return 0; |
|
76 |
|
} |
|
77 |
|
|
|
78 |
|
/* from here lvl_idx >= 1 */ |
|
79 |
|
|
|
80 |
|
/*--------------------------------------------------------------------*/ |
|
81 |
|
|
|
82 |
|
dpm_to_perf_lvl->max_ps = 0; /* because no uvd */ |
|
83 |
|
dpm_to_perf_lvl->near_tdp_dec = NEAR_TDP_DEC ; |
|
84 |
|
dpm_to_perf_lvl->above_safe_inc = ABOVE_SAFE_INC; |
|
85 |
|
dpm_to_perf_lvl->below_safe_inc = BELOW_SAFE_INC; |
|
86 |
|
|
|
87 |
|
/*--------------------------------------------------------------------*/ |
|
88 |
|
|
|
89 |
|
prev_atb_lvl = &ctx->atb_performance.lvls[lvl_idx - 1]; |
|
90 |
|
r = smc_volt_vddc_set_from_atb_id(ctx, &prev_vddc, |
|
91 |
|
prev_atb_lvl->vddc_id); |
|
92 |
|
if (r == -SI_ERR) { |
|
93 |
|
dev_err(&ctx->dev->dev, "dyn_pm:unable to find/set previous vddc_id\n"); |
|
94 |
|
return -SI_ERR; |
|
95 |
|
} |
|
96 |
|
smc_volt_std_vddc_compute(ctx, &prev_std_vddc, &prev_vddc); |
|
97 |
|
|
|
98 |
|
cur_atb_lvl = &ctx->atb_performance.lvls[lvl_idx]; |
|
99 |
|
r = smc_volt_vddc_set_from_atb_id(ctx, &cur_vddc, cur_atb_lvl->vddc_id); |
|
100 |
|
if (r == -SI_ERR) { |
|
101 |
|
dev_err(&ctx->dev->dev, "dyn_pm:unable to find/set current vddc_id\n"); |
|
102 |
|
return -SI_ERR; |
|
103 |
|
} |
|
104 |
|
smc_volt_std_vddc_compute(ctx, &cur_std_vddc, &cur_vddc); |
|
105 |
|
|
|
106 |
|
pwr_efficiency_ratio = pwr_efficiency_ratio_compute(ctx, &prev_std_vddc, |
|
107 |
|
&cur_std_vddc); |
|
108 |
|
|
|
109 |
|
put_unaligned_be16(pwr_efficiency_ratio, |
|
110 |
|
&dpm_to_perf_lvl->pwr_efficiency_ratio); |
|
111 |
|
return 0; |
|
112 |
|
} |
|
113 |
|
|
|
114 |
|
static long smc_lvls_init(struct ctx *ctx, struct smc_lvl *lvls) |
|
115 |
|
{ |
|
116 |
|
u8 lvl_idx; |
|
117 |
|
u8 lvl_idx_last; |
|
118 |
|
|
|
119 |
|
/* the first lvl of the pwr state has some special ds setting */ |
|
120 |
|
lvls[0].state_flgs = (u8)SMC_STATE_FLGS_DEEPSLEEP_BYPASS; |
|
121 |
|
|
|
122 |
|
/* |
|
123 |
|
* the last lvl of the pwr state will get some special settings in the |
|
124 |
|
* following loop |
|
125 |
|
*/ |
|
126 |
|
lvl_idx_last = ctx->atb_performance.lvls_n - 1; |
|
127 |
|
|
|
128 |
|
for (lvl_idx = 0; lvl_idx < ctx->atb_performance.lvls_n; ++lvl_idx) { |
|
129 |
|
struct smc_lvl *lvl; |
|
130 |
|
long r; |
|
131 |
|
|
|
132 |
|
lvl = &lvls[lvl_idx]; |
|
133 |
|
|
|
134 |
|
r = smc_lvl_from_atb(ctx, &ctx->atb_performance.lvls[lvl_idx], |
|
135 |
|
&lvls[lvl_idx]); |
|
136 |
|
if (r == -SI_ERR) |
|
137 |
|
goto err; |
|
138 |
|
|
|
139 |
|
if (lvl_idx == lvl_idx_last) { |
|
140 |
|
lvl->disp_watermark = SMC_DISP_WATERMARK_HIGH; |
|
141 |
|
put_unaligned_be32(ctx->p_sp, &lvl->b_sp); |
|
142 |
|
} else { |
|
143 |
|
lvl->disp_watermark = SMC_DISP_WATERMARK_LOW; |
|
144 |
|
put_unaligned_be32(ctx->d_sp, &lvl->b_sp); |
|
145 |
|
} |
|
146 |
|
|
|
147 |
|
lvl->mc_reg_set_idx = MC_REG_SET_IDX_DRIVER + lvl_idx; |
|
148 |
|
lvl->mc_arb_set_idx = MC_ARB_SET_IDX_DRIVER + lvl_idx; |
|
149 |
|
|
|
150 |
|
smc_pp_dpm_to_perf_lvl_init(ctx, lvl_idx, |
|
151 |
|
&lvl->dpm_to_perf_lvl); |
|
152 |
|
//TODO |
|
153 |
|
} |
|
154 |
|
return 0; |
|
155 |
|
err: |
|
156 |
|
return -SI_ERR; |
|
157 |
|
} |
|
158 |
|
|
|
159 |
|
|
|
160 |
|
static long smc_tbls_init(struct ctx *ctx) |
|
161 |
|
{ |
|
162 |
|
struct smc_sw_state *smc_sw_state; |
|
163 |
|
struct smc_lvl *smc_lvls; |
|
164 |
|
long r; |
|
165 |
|
|
|
166 |
|
/*--------------------------------------------------------------------*/ |
|
167 |
|
|
|
168 |
|
smc_sw_state = kzalloc(sizeof(*smc_sw_state), GFP_KERNEL); |
|
169 |
|
if (!smc_sw_state) { |
|
170 |
|
dev_err(&ctx->dev->dev, "dyn_pm:driver:unable to alloc the smc_sw_tbl\n"); |
|
171 |
|
goto err; |
|
172 |
|
} |
|
173 |
|
|
|
174 |
|
smc_sw_state_init(ctx, smc_sw_state); |
|
175 |
|
|
|
176 |
|
/*--------------------------------------------------------------------*/ |
|
177 |
|
|
|
178 |
|
smc_lvls = kzalloc(sizeof(*smc_lvls) * ctx->atb_performance.lvls_n, |
|
179 |
|
GFP_KERNEL); |
|
180 |
|
if (!smc_lvls) { |
|
181 |
|
dev_err(&ctx->dev->dev, "dyn_pm:driver:unable to alloc the smc_lvls\n"); |
|
182 |
|
goto err_free_smc_sw_state; |
|
183 |
|
} |
|
184 |
|
|
|
185 |
|
r = smc_lvls_init(ctx, smc_lvls); |
|
186 |
|
if (r == -SI_ERR) |
|
187 |
|
goto err_free_smc_lvls; |
|
188 |
|
|
|
189 |
|
/*--------------------------------------------------------------------*/ |
|
190 |
|
|
|
191 |
|
//TODO:mc_regs, mc_arb... |
|
192 |
|
|
|
193 |
|
kfree(smc_lvls); |
|
194 |
|
kfree(smc_sw_state); |
|
195 |
|
return 0; |
|
196 |
|
|
|
197 |
|
err_free_smc_lvls: |
|
198 |
|
kfree(smc_lvls); |
|
199 |
|
|
|
200 |
|
err_free_smc_sw_state: |
|
201 |
|
kfree(smc_sw_state); |
|
202 |
|
|
|
203 |
|
err: |
|
204 |
|
return -SI_ERR; |
|
205 |
|
} |
|
206 |
|
|
|
207 |
|
static void smc_sw_regs_init(struct ctx *ctx) |
|
208 |
|
{ |
|
209 |
|
u8 lvl_idx_last; |
|
210 |
|
u32 eng_clk_max; |
|
211 |
|
|
|
212 |
|
lvl_idx_last = ctx->atb_performance.lvls_n - 1; |
|
213 |
|
eng_clk_max = ctx->atb_performance.lvls[lvl_idx_last].eng_clk; |
|
214 |
|
smc_sw_wr32(ctx->dev, eng_clk_max / 512, SMC_SW_WATERMARK_THRESHOLD); |
|
215 |
|
} |
38 |
216 |
|
|
39 |
217 |
long driver_set_performance(struct ctx *ctx) |
long driver_set_performance(struct ctx *ctx) |
40 |
218 |
{ |
{ |
|
... |
... |
long driver_set_performance(struct ctx *ctx) |
85 |
263 |
LOG("smc:waiting to halt"); |
LOG("smc:waiting to halt"); |
86 |
264 |
smc_halt_wait(ctx->dev); |
smc_halt_wait(ctx->dev); |
87 |
265 |
|
|
|
266 |
|
r = smc_tbls_init(ctx); |
|
267 |
|
if (r == -SI_ERR) |
|
268 |
|
goto err; |
|
269 |
|
|
|
270 |
|
smc_sw_regs_init(ctx); |
|
271 |
|
|
88 |
272 |
//TODO |
//TODO |
89 |
273 |
|
|
90 |
274 |
return 0; |
return 0; |