Line data Source code
1 : /*
2 : * Copyright 2012 Advanced Micro Devices, Inc.
3 : *
4 : * Permission is hereby granted, free of charge, to any person obtaining a
5 : * copy of this software and associated documentation files (the "Software"),
6 : * to deal in the Software without restriction, including without limitation
7 : * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 : * and/or sell copies of the Software, and to permit persons to whom the
9 : * Software is furnished to do so, subject to the following conditions:
10 : *
11 : * The above copyright notice and this permission notice shall be included in
12 : * all copies or substantial portions of the Software.
13 : *
14 : * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 : * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 : * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 : * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 : * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 : * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 : * OTHER DEALINGS IN THE SOFTWARE.
21 : *
22 : */
23 :
24 : #include <dev/pci/drm/drmP.h>
25 : #include "radeon.h"
26 : #include "radeon_asic.h"
27 : #include "nid.h"
28 : #include "r600_dpm.h"
29 : #include "ni_dpm.h"
30 : #include "atom.h"
31 :
32 : #define MC_CG_ARB_FREQ_F0 0x0a
33 : #define MC_CG_ARB_FREQ_F1 0x0b
34 : #define MC_CG_ARB_FREQ_F2 0x0c
35 : #define MC_CG_ARB_FREQ_F3 0x0d
36 :
37 : #define SMC_RAM_END 0xC000
38 :
39 : static const struct ni_cac_weights cac_weights_cayman_xt =
40 : {
41 : 0x15,
42 : 0x2,
43 : 0x19,
44 : 0x2,
45 : 0x8,
46 : 0x14,
47 : 0x2,
48 : 0x16,
49 : 0xE,
50 : 0x17,
51 : 0x13,
52 : 0x2B,
53 : 0x10,
54 : 0x7,
55 : 0x5,
56 : 0x5,
57 : 0x5,
58 : 0x2,
59 : 0x3,
60 : 0x9,
61 : 0x10,
62 : 0x10,
63 : 0x2B,
64 : 0xA,
65 : 0x9,
66 : 0x4,
67 : 0xD,
68 : 0xD,
69 : 0x3E,
70 : 0x18,
71 : 0x14,
72 : 0,
73 : 0x3,
74 : 0x3,
75 : 0x5,
76 : 0,
77 : 0x2,
78 : 0,
79 : 0,
80 : 0,
81 : 0,
82 : 0,
83 : 0,
84 : 0,
85 : 0,
86 : 0,
87 : 0x1CC,
88 : 0,
89 : 0x164,
90 : 1,
91 : 1,
92 : 1,
93 : 1,
94 : 12,
95 : 12,
96 : 12,
97 : 0x12,
98 : 0x1F,
99 : 132,
100 : 5,
101 : 7,
102 : 0,
103 : { 0, 0, 0, 0, 0, 0, 0, 0 },
104 : { 0, 0, 0, 0 },
105 : true
106 : };
107 :
108 : static const struct ni_cac_weights cac_weights_cayman_pro =
109 : {
110 : 0x16,
111 : 0x4,
112 : 0x10,
113 : 0x2,
114 : 0xA,
115 : 0x16,
116 : 0x2,
117 : 0x18,
118 : 0x10,
119 : 0x1A,
120 : 0x16,
121 : 0x2D,
122 : 0x12,
123 : 0xA,
124 : 0x6,
125 : 0x6,
126 : 0x6,
127 : 0x2,
128 : 0x4,
129 : 0xB,
130 : 0x11,
131 : 0x11,
132 : 0x2D,
133 : 0xC,
134 : 0xC,
135 : 0x7,
136 : 0x10,
137 : 0x10,
138 : 0x3F,
139 : 0x1A,
140 : 0x16,
141 : 0,
142 : 0x7,
143 : 0x4,
144 : 0x6,
145 : 1,
146 : 0x2,
147 : 0x1,
148 : 0,
149 : 0,
150 : 0,
151 : 0,
152 : 0,
153 : 0,
154 : 0x30,
155 : 0,
156 : 0x1CF,
157 : 0,
158 : 0x166,
159 : 1,
160 : 1,
161 : 1,
162 : 1,
163 : 12,
164 : 12,
165 : 12,
166 : 0x15,
167 : 0x1F,
168 : 132,
169 : 6,
170 : 6,
171 : 0,
172 : { 0, 0, 0, 0, 0, 0, 0, 0 },
173 : { 0, 0, 0, 0 },
174 : true
175 : };
176 :
177 : static const struct ni_cac_weights cac_weights_cayman_le =
178 : {
179 : 0x7,
180 : 0xE,
181 : 0x1,
182 : 0xA,
183 : 0x1,
184 : 0x3F,
185 : 0x2,
186 : 0x18,
187 : 0x10,
188 : 0x1A,
189 : 0x1,
190 : 0x3F,
191 : 0x1,
192 : 0xE,
193 : 0x6,
194 : 0x6,
195 : 0x6,
196 : 0x2,
197 : 0x4,
198 : 0x9,
199 : 0x1A,
200 : 0x1A,
201 : 0x2C,
202 : 0xA,
203 : 0x11,
204 : 0x8,
205 : 0x19,
206 : 0x19,
207 : 0x1,
208 : 0x1,
209 : 0x1A,
210 : 0,
211 : 0x8,
212 : 0x5,
213 : 0x8,
214 : 0x1,
215 : 0x3,
216 : 0x1,
217 : 0,
218 : 0,
219 : 0,
220 : 0,
221 : 0,
222 : 0,
223 : 0x38,
224 : 0x38,
225 : 0x239,
226 : 0x3,
227 : 0x18A,
228 : 1,
229 : 1,
230 : 1,
231 : 1,
232 : 12,
233 : 12,
234 : 12,
235 : 0x15,
236 : 0x22,
237 : 132,
238 : 6,
239 : 6,
240 : 0,
241 : { 0, 0, 0, 0, 0, 0, 0, 0 },
242 : { 0, 0, 0, 0 },
243 : true
244 : };
245 :
246 : #define NISLANDS_MGCG_SEQUENCE 300
247 :
248 : static const u32 cayman_cgcg_cgls_default[] =
249 : {
250 : 0x000008f8, 0x00000010, 0xffffffff,
251 : 0x000008fc, 0x00000000, 0xffffffff,
252 : 0x000008f8, 0x00000011, 0xffffffff,
253 : 0x000008fc, 0x00000000, 0xffffffff,
254 : 0x000008f8, 0x00000012, 0xffffffff,
255 : 0x000008fc, 0x00000000, 0xffffffff,
256 : 0x000008f8, 0x00000013, 0xffffffff,
257 : 0x000008fc, 0x00000000, 0xffffffff,
258 : 0x000008f8, 0x00000014, 0xffffffff,
259 : 0x000008fc, 0x00000000, 0xffffffff,
260 : 0x000008f8, 0x00000015, 0xffffffff,
261 : 0x000008fc, 0x00000000, 0xffffffff,
262 : 0x000008f8, 0x00000016, 0xffffffff,
263 : 0x000008fc, 0x00000000, 0xffffffff,
264 : 0x000008f8, 0x00000017, 0xffffffff,
265 : 0x000008fc, 0x00000000, 0xffffffff,
266 : 0x000008f8, 0x00000018, 0xffffffff,
267 : 0x000008fc, 0x00000000, 0xffffffff,
268 : 0x000008f8, 0x00000019, 0xffffffff,
269 : 0x000008fc, 0x00000000, 0xffffffff,
270 : 0x000008f8, 0x0000001a, 0xffffffff,
271 : 0x000008fc, 0x00000000, 0xffffffff,
272 : 0x000008f8, 0x0000001b, 0xffffffff,
273 : 0x000008fc, 0x00000000, 0xffffffff,
274 : 0x000008f8, 0x00000020, 0xffffffff,
275 : 0x000008fc, 0x00000000, 0xffffffff,
276 : 0x000008f8, 0x00000021, 0xffffffff,
277 : 0x000008fc, 0x00000000, 0xffffffff,
278 : 0x000008f8, 0x00000022, 0xffffffff,
279 : 0x000008fc, 0x00000000, 0xffffffff,
280 : 0x000008f8, 0x00000023, 0xffffffff,
281 : 0x000008fc, 0x00000000, 0xffffffff,
282 : 0x000008f8, 0x00000024, 0xffffffff,
283 : 0x000008fc, 0x00000000, 0xffffffff,
284 : 0x000008f8, 0x00000025, 0xffffffff,
285 : 0x000008fc, 0x00000000, 0xffffffff,
286 : 0x000008f8, 0x00000026, 0xffffffff,
287 : 0x000008fc, 0x00000000, 0xffffffff,
288 : 0x000008f8, 0x00000027, 0xffffffff,
289 : 0x000008fc, 0x00000000, 0xffffffff,
290 : 0x000008f8, 0x00000028, 0xffffffff,
291 : 0x000008fc, 0x00000000, 0xffffffff,
292 : 0x000008f8, 0x00000029, 0xffffffff,
293 : 0x000008fc, 0x00000000, 0xffffffff,
294 : 0x000008f8, 0x0000002a, 0xffffffff,
295 : 0x000008fc, 0x00000000, 0xffffffff,
296 : 0x000008f8, 0x0000002b, 0xffffffff,
297 : 0x000008fc, 0x00000000, 0xffffffff
298 : };
299 : #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
300 :
301 : static const u32 cayman_cgcg_cgls_disable[] =
302 : {
303 : 0x000008f8, 0x00000010, 0xffffffff,
304 : 0x000008fc, 0xffffffff, 0xffffffff,
305 : 0x000008f8, 0x00000011, 0xffffffff,
306 : 0x000008fc, 0xffffffff, 0xffffffff,
307 : 0x000008f8, 0x00000012, 0xffffffff,
308 : 0x000008fc, 0xffffffff, 0xffffffff,
309 : 0x000008f8, 0x00000013, 0xffffffff,
310 : 0x000008fc, 0xffffffff, 0xffffffff,
311 : 0x000008f8, 0x00000014, 0xffffffff,
312 : 0x000008fc, 0xffffffff, 0xffffffff,
313 : 0x000008f8, 0x00000015, 0xffffffff,
314 : 0x000008fc, 0xffffffff, 0xffffffff,
315 : 0x000008f8, 0x00000016, 0xffffffff,
316 : 0x000008fc, 0xffffffff, 0xffffffff,
317 : 0x000008f8, 0x00000017, 0xffffffff,
318 : 0x000008fc, 0xffffffff, 0xffffffff,
319 : 0x000008f8, 0x00000018, 0xffffffff,
320 : 0x000008fc, 0xffffffff, 0xffffffff,
321 : 0x000008f8, 0x00000019, 0xffffffff,
322 : 0x000008fc, 0xffffffff, 0xffffffff,
323 : 0x000008f8, 0x0000001a, 0xffffffff,
324 : 0x000008fc, 0xffffffff, 0xffffffff,
325 : 0x000008f8, 0x0000001b, 0xffffffff,
326 : 0x000008fc, 0xffffffff, 0xffffffff,
327 : 0x000008f8, 0x00000020, 0xffffffff,
328 : 0x000008fc, 0x00000000, 0xffffffff,
329 : 0x000008f8, 0x00000021, 0xffffffff,
330 : 0x000008fc, 0x00000000, 0xffffffff,
331 : 0x000008f8, 0x00000022, 0xffffffff,
332 : 0x000008fc, 0x00000000, 0xffffffff,
333 : 0x000008f8, 0x00000023, 0xffffffff,
334 : 0x000008fc, 0x00000000, 0xffffffff,
335 : 0x000008f8, 0x00000024, 0xffffffff,
336 : 0x000008fc, 0x00000000, 0xffffffff,
337 : 0x000008f8, 0x00000025, 0xffffffff,
338 : 0x000008fc, 0x00000000, 0xffffffff,
339 : 0x000008f8, 0x00000026, 0xffffffff,
340 : 0x000008fc, 0x00000000, 0xffffffff,
341 : 0x000008f8, 0x00000027, 0xffffffff,
342 : 0x000008fc, 0x00000000, 0xffffffff,
343 : 0x000008f8, 0x00000028, 0xffffffff,
344 : 0x000008fc, 0x00000000, 0xffffffff,
345 : 0x000008f8, 0x00000029, 0xffffffff,
346 : 0x000008fc, 0x00000000, 0xffffffff,
347 : 0x000008f8, 0x0000002a, 0xffffffff,
348 : 0x000008fc, 0x00000000, 0xffffffff,
349 : 0x000008f8, 0x0000002b, 0xffffffff,
350 : 0x000008fc, 0x00000000, 0xffffffff,
351 : 0x00000644, 0x000f7902, 0x001f4180,
352 : 0x00000644, 0x000f3802, 0x001f4180
353 : };
354 : #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
355 :
356 : static const u32 cayman_cgcg_cgls_enable[] =
357 : {
358 : 0x00000644, 0x000f7882, 0x001f4080,
359 : 0x000008f8, 0x00000010, 0xffffffff,
360 : 0x000008fc, 0x00000000, 0xffffffff,
361 : 0x000008f8, 0x00000011, 0xffffffff,
362 : 0x000008fc, 0x00000000, 0xffffffff,
363 : 0x000008f8, 0x00000012, 0xffffffff,
364 : 0x000008fc, 0x00000000, 0xffffffff,
365 : 0x000008f8, 0x00000013, 0xffffffff,
366 : 0x000008fc, 0x00000000, 0xffffffff,
367 : 0x000008f8, 0x00000014, 0xffffffff,
368 : 0x000008fc, 0x00000000, 0xffffffff,
369 : 0x000008f8, 0x00000015, 0xffffffff,
370 : 0x000008fc, 0x00000000, 0xffffffff,
371 : 0x000008f8, 0x00000016, 0xffffffff,
372 : 0x000008fc, 0x00000000, 0xffffffff,
373 : 0x000008f8, 0x00000017, 0xffffffff,
374 : 0x000008fc, 0x00000000, 0xffffffff,
375 : 0x000008f8, 0x00000018, 0xffffffff,
376 : 0x000008fc, 0x00000000, 0xffffffff,
377 : 0x000008f8, 0x00000019, 0xffffffff,
378 : 0x000008fc, 0x00000000, 0xffffffff,
379 : 0x000008f8, 0x0000001a, 0xffffffff,
380 : 0x000008fc, 0x00000000, 0xffffffff,
381 : 0x000008f8, 0x0000001b, 0xffffffff,
382 : 0x000008fc, 0x00000000, 0xffffffff,
383 : 0x000008f8, 0x00000020, 0xffffffff,
384 : 0x000008fc, 0xffffffff, 0xffffffff,
385 : 0x000008f8, 0x00000021, 0xffffffff,
386 : 0x000008fc, 0xffffffff, 0xffffffff,
387 : 0x000008f8, 0x00000022, 0xffffffff,
388 : 0x000008fc, 0xffffffff, 0xffffffff,
389 : 0x000008f8, 0x00000023, 0xffffffff,
390 : 0x000008fc, 0xffffffff, 0xffffffff,
391 : 0x000008f8, 0x00000024, 0xffffffff,
392 : 0x000008fc, 0xffffffff, 0xffffffff,
393 : 0x000008f8, 0x00000025, 0xffffffff,
394 : 0x000008fc, 0xffffffff, 0xffffffff,
395 : 0x000008f8, 0x00000026, 0xffffffff,
396 : 0x000008fc, 0xffffffff, 0xffffffff,
397 : 0x000008f8, 0x00000027, 0xffffffff,
398 : 0x000008fc, 0xffffffff, 0xffffffff,
399 : 0x000008f8, 0x00000028, 0xffffffff,
400 : 0x000008fc, 0xffffffff, 0xffffffff,
401 : 0x000008f8, 0x00000029, 0xffffffff,
402 : 0x000008fc, 0xffffffff, 0xffffffff,
403 : 0x000008f8, 0x0000002a, 0xffffffff,
404 : 0x000008fc, 0xffffffff, 0xffffffff,
405 : 0x000008f8, 0x0000002b, 0xffffffff,
406 : 0x000008fc, 0xffffffff, 0xffffffff
407 : };
408 : #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
409 :
410 : static const u32 cayman_mgcg_default[] =
411 : {
412 : 0x0000802c, 0xc0000000, 0xffffffff,
413 : 0x00003fc4, 0xc0000000, 0xffffffff,
414 : 0x00005448, 0x00000100, 0xffffffff,
415 : 0x000055e4, 0x00000100, 0xffffffff,
416 : 0x0000160c, 0x00000100, 0xffffffff,
417 : 0x00008984, 0x06000100, 0xffffffff,
418 : 0x0000c164, 0x00000100, 0xffffffff,
419 : 0x00008a18, 0x00000100, 0xffffffff,
420 : 0x0000897c, 0x06000100, 0xffffffff,
421 : 0x00008b28, 0x00000100, 0xffffffff,
422 : 0x00009144, 0x00800200, 0xffffffff,
423 : 0x00009a60, 0x00000100, 0xffffffff,
424 : 0x00009868, 0x00000100, 0xffffffff,
425 : 0x00008d58, 0x00000100, 0xffffffff,
426 : 0x00009510, 0x00000100, 0xffffffff,
427 : 0x0000949c, 0x00000100, 0xffffffff,
428 : 0x00009654, 0x00000100, 0xffffffff,
429 : 0x00009030, 0x00000100, 0xffffffff,
430 : 0x00009034, 0x00000100, 0xffffffff,
431 : 0x00009038, 0x00000100, 0xffffffff,
432 : 0x0000903c, 0x00000100, 0xffffffff,
433 : 0x00009040, 0x00000100, 0xffffffff,
434 : 0x0000a200, 0x00000100, 0xffffffff,
435 : 0x0000a204, 0x00000100, 0xffffffff,
436 : 0x0000a208, 0x00000100, 0xffffffff,
437 : 0x0000a20c, 0x00000100, 0xffffffff,
438 : 0x00009744, 0x00000100, 0xffffffff,
439 : 0x00003f80, 0x00000100, 0xffffffff,
440 : 0x0000a210, 0x00000100, 0xffffffff,
441 : 0x0000a214, 0x00000100, 0xffffffff,
442 : 0x000004d8, 0x00000100, 0xffffffff,
443 : 0x00009664, 0x00000100, 0xffffffff,
444 : 0x00009698, 0x00000100, 0xffffffff,
445 : 0x000004d4, 0x00000200, 0xffffffff,
446 : 0x000004d0, 0x00000000, 0xffffffff,
447 : 0x000030cc, 0x00000104, 0xffffffff,
448 : 0x0000d0c0, 0x00000100, 0xffffffff,
449 : 0x0000d8c0, 0x00000100, 0xffffffff,
450 : 0x0000802c, 0x40000000, 0xffffffff,
451 : 0x00003fc4, 0x40000000, 0xffffffff,
452 : 0x0000915c, 0x00010000, 0xffffffff,
453 : 0x00009160, 0x00030002, 0xffffffff,
454 : 0x00009164, 0x00050004, 0xffffffff,
455 : 0x00009168, 0x00070006, 0xffffffff,
456 : 0x00009178, 0x00070000, 0xffffffff,
457 : 0x0000917c, 0x00030002, 0xffffffff,
458 : 0x00009180, 0x00050004, 0xffffffff,
459 : 0x0000918c, 0x00010006, 0xffffffff,
460 : 0x00009190, 0x00090008, 0xffffffff,
461 : 0x00009194, 0x00070000, 0xffffffff,
462 : 0x00009198, 0x00030002, 0xffffffff,
463 : 0x0000919c, 0x00050004, 0xffffffff,
464 : 0x000091a8, 0x00010006, 0xffffffff,
465 : 0x000091ac, 0x00090008, 0xffffffff,
466 : 0x000091b0, 0x00070000, 0xffffffff,
467 : 0x000091b4, 0x00030002, 0xffffffff,
468 : 0x000091b8, 0x00050004, 0xffffffff,
469 : 0x000091c4, 0x00010006, 0xffffffff,
470 : 0x000091c8, 0x00090008, 0xffffffff,
471 : 0x000091cc, 0x00070000, 0xffffffff,
472 : 0x000091d0, 0x00030002, 0xffffffff,
473 : 0x000091d4, 0x00050004, 0xffffffff,
474 : 0x000091e0, 0x00010006, 0xffffffff,
475 : 0x000091e4, 0x00090008, 0xffffffff,
476 : 0x000091e8, 0x00000000, 0xffffffff,
477 : 0x000091ec, 0x00070000, 0xffffffff,
478 : 0x000091f0, 0x00030002, 0xffffffff,
479 : 0x000091f4, 0x00050004, 0xffffffff,
480 : 0x00009200, 0x00010006, 0xffffffff,
481 : 0x00009204, 0x00090008, 0xffffffff,
482 : 0x00009208, 0x00070000, 0xffffffff,
483 : 0x0000920c, 0x00030002, 0xffffffff,
484 : 0x00009210, 0x00050004, 0xffffffff,
485 : 0x0000921c, 0x00010006, 0xffffffff,
486 : 0x00009220, 0x00090008, 0xffffffff,
487 : 0x00009224, 0x00070000, 0xffffffff,
488 : 0x00009228, 0x00030002, 0xffffffff,
489 : 0x0000922c, 0x00050004, 0xffffffff,
490 : 0x00009238, 0x00010006, 0xffffffff,
491 : 0x0000923c, 0x00090008, 0xffffffff,
492 : 0x00009240, 0x00070000, 0xffffffff,
493 : 0x00009244, 0x00030002, 0xffffffff,
494 : 0x00009248, 0x00050004, 0xffffffff,
495 : 0x00009254, 0x00010006, 0xffffffff,
496 : 0x00009258, 0x00090008, 0xffffffff,
497 : 0x0000925c, 0x00070000, 0xffffffff,
498 : 0x00009260, 0x00030002, 0xffffffff,
499 : 0x00009264, 0x00050004, 0xffffffff,
500 : 0x00009270, 0x00010006, 0xffffffff,
501 : 0x00009274, 0x00090008, 0xffffffff,
502 : 0x00009278, 0x00070000, 0xffffffff,
503 : 0x0000927c, 0x00030002, 0xffffffff,
504 : 0x00009280, 0x00050004, 0xffffffff,
505 : 0x0000928c, 0x00010006, 0xffffffff,
506 : 0x00009290, 0x00090008, 0xffffffff,
507 : 0x000092a8, 0x00070000, 0xffffffff,
508 : 0x000092ac, 0x00030002, 0xffffffff,
509 : 0x000092b0, 0x00050004, 0xffffffff,
510 : 0x000092bc, 0x00010006, 0xffffffff,
511 : 0x000092c0, 0x00090008, 0xffffffff,
512 : 0x000092c4, 0x00070000, 0xffffffff,
513 : 0x000092c8, 0x00030002, 0xffffffff,
514 : 0x000092cc, 0x00050004, 0xffffffff,
515 : 0x000092d8, 0x00010006, 0xffffffff,
516 : 0x000092dc, 0x00090008, 0xffffffff,
517 : 0x00009294, 0x00000000, 0xffffffff,
518 : 0x0000802c, 0x40010000, 0xffffffff,
519 : 0x00003fc4, 0x40010000, 0xffffffff,
520 : 0x0000915c, 0x00010000, 0xffffffff,
521 : 0x00009160, 0x00030002, 0xffffffff,
522 : 0x00009164, 0x00050004, 0xffffffff,
523 : 0x00009168, 0x00070006, 0xffffffff,
524 : 0x00009178, 0x00070000, 0xffffffff,
525 : 0x0000917c, 0x00030002, 0xffffffff,
526 : 0x00009180, 0x00050004, 0xffffffff,
527 : 0x0000918c, 0x00010006, 0xffffffff,
528 : 0x00009190, 0x00090008, 0xffffffff,
529 : 0x00009194, 0x00070000, 0xffffffff,
530 : 0x00009198, 0x00030002, 0xffffffff,
531 : 0x0000919c, 0x00050004, 0xffffffff,
532 : 0x000091a8, 0x00010006, 0xffffffff,
533 : 0x000091ac, 0x00090008, 0xffffffff,
534 : 0x000091b0, 0x00070000, 0xffffffff,
535 : 0x000091b4, 0x00030002, 0xffffffff,
536 : 0x000091b8, 0x00050004, 0xffffffff,
537 : 0x000091c4, 0x00010006, 0xffffffff,
538 : 0x000091c8, 0x00090008, 0xffffffff,
539 : 0x000091cc, 0x00070000, 0xffffffff,
540 : 0x000091d0, 0x00030002, 0xffffffff,
541 : 0x000091d4, 0x00050004, 0xffffffff,
542 : 0x000091e0, 0x00010006, 0xffffffff,
543 : 0x000091e4, 0x00090008, 0xffffffff,
544 : 0x000091e8, 0x00000000, 0xffffffff,
545 : 0x000091ec, 0x00070000, 0xffffffff,
546 : 0x000091f0, 0x00030002, 0xffffffff,
547 : 0x000091f4, 0x00050004, 0xffffffff,
548 : 0x00009200, 0x00010006, 0xffffffff,
549 : 0x00009204, 0x00090008, 0xffffffff,
550 : 0x00009208, 0x00070000, 0xffffffff,
551 : 0x0000920c, 0x00030002, 0xffffffff,
552 : 0x00009210, 0x00050004, 0xffffffff,
553 : 0x0000921c, 0x00010006, 0xffffffff,
554 : 0x00009220, 0x00090008, 0xffffffff,
555 : 0x00009224, 0x00070000, 0xffffffff,
556 : 0x00009228, 0x00030002, 0xffffffff,
557 : 0x0000922c, 0x00050004, 0xffffffff,
558 : 0x00009238, 0x00010006, 0xffffffff,
559 : 0x0000923c, 0x00090008, 0xffffffff,
560 : 0x00009240, 0x00070000, 0xffffffff,
561 : 0x00009244, 0x00030002, 0xffffffff,
562 : 0x00009248, 0x00050004, 0xffffffff,
563 : 0x00009254, 0x00010006, 0xffffffff,
564 : 0x00009258, 0x00090008, 0xffffffff,
565 : 0x0000925c, 0x00070000, 0xffffffff,
566 : 0x00009260, 0x00030002, 0xffffffff,
567 : 0x00009264, 0x00050004, 0xffffffff,
568 : 0x00009270, 0x00010006, 0xffffffff,
569 : 0x00009274, 0x00090008, 0xffffffff,
570 : 0x00009278, 0x00070000, 0xffffffff,
571 : 0x0000927c, 0x00030002, 0xffffffff,
572 : 0x00009280, 0x00050004, 0xffffffff,
573 : 0x0000928c, 0x00010006, 0xffffffff,
574 : 0x00009290, 0x00090008, 0xffffffff,
575 : 0x000092a8, 0x00070000, 0xffffffff,
576 : 0x000092ac, 0x00030002, 0xffffffff,
577 : 0x000092b0, 0x00050004, 0xffffffff,
578 : 0x000092bc, 0x00010006, 0xffffffff,
579 : 0x000092c0, 0x00090008, 0xffffffff,
580 : 0x000092c4, 0x00070000, 0xffffffff,
581 : 0x000092c8, 0x00030002, 0xffffffff,
582 : 0x000092cc, 0x00050004, 0xffffffff,
583 : 0x000092d8, 0x00010006, 0xffffffff,
584 : 0x000092dc, 0x00090008, 0xffffffff,
585 : 0x00009294, 0x00000000, 0xffffffff,
586 : 0x0000802c, 0xc0000000, 0xffffffff,
587 : 0x00003fc4, 0xc0000000, 0xffffffff,
588 : 0x000008f8, 0x00000010, 0xffffffff,
589 : 0x000008fc, 0x00000000, 0xffffffff,
590 : 0x000008f8, 0x00000011, 0xffffffff,
591 : 0x000008fc, 0x00000000, 0xffffffff,
592 : 0x000008f8, 0x00000012, 0xffffffff,
593 : 0x000008fc, 0x00000000, 0xffffffff,
594 : 0x000008f8, 0x00000013, 0xffffffff,
595 : 0x000008fc, 0x00000000, 0xffffffff,
596 : 0x000008f8, 0x00000014, 0xffffffff,
597 : 0x000008fc, 0x00000000, 0xffffffff,
598 : 0x000008f8, 0x00000015, 0xffffffff,
599 : 0x000008fc, 0x00000000, 0xffffffff,
600 : 0x000008f8, 0x00000016, 0xffffffff,
601 : 0x000008fc, 0x00000000, 0xffffffff,
602 : 0x000008f8, 0x00000017, 0xffffffff,
603 : 0x000008fc, 0x00000000, 0xffffffff,
604 : 0x000008f8, 0x00000018, 0xffffffff,
605 : 0x000008fc, 0x00000000, 0xffffffff,
606 : 0x000008f8, 0x00000019, 0xffffffff,
607 : 0x000008fc, 0x00000000, 0xffffffff,
608 : 0x000008f8, 0x0000001a, 0xffffffff,
609 : 0x000008fc, 0x00000000, 0xffffffff,
610 : 0x000008f8, 0x0000001b, 0xffffffff,
611 : 0x000008fc, 0x00000000, 0xffffffff
612 : };
613 : #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
614 :
615 : static const u32 cayman_mgcg_disable[] =
616 : {
617 : 0x0000802c, 0xc0000000, 0xffffffff,
618 : 0x000008f8, 0x00000000, 0xffffffff,
619 : 0x000008fc, 0xffffffff, 0xffffffff,
620 : 0x000008f8, 0x00000001, 0xffffffff,
621 : 0x000008fc, 0xffffffff, 0xffffffff,
622 : 0x000008f8, 0x00000002, 0xffffffff,
623 : 0x000008fc, 0xffffffff, 0xffffffff,
624 : 0x000008f8, 0x00000003, 0xffffffff,
625 : 0x000008fc, 0xffffffff, 0xffffffff,
626 : 0x00009150, 0x00600000, 0xffffffff
627 : };
628 : #define CAYMAN_MGCG_DISABLE_LENGTH sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
629 :
630 : static const u32 cayman_mgcg_enable[] =
631 : {
632 : 0x0000802c, 0xc0000000, 0xffffffff,
633 : 0x000008f8, 0x00000000, 0xffffffff,
634 : 0x000008fc, 0x00000000, 0xffffffff,
635 : 0x000008f8, 0x00000001, 0xffffffff,
636 : 0x000008fc, 0x00000000, 0xffffffff,
637 : 0x000008f8, 0x00000002, 0xffffffff,
638 : 0x000008fc, 0x00600000, 0xffffffff,
639 : 0x000008f8, 0x00000003, 0xffffffff,
640 : 0x000008fc, 0x00000000, 0xffffffff,
641 : 0x00009150, 0x96944200, 0xffffffff
642 : };
643 :
644 : #define CAYMAN_MGCG_ENABLE_LENGTH sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
645 :
646 : #define NISLANDS_SYSLS_SEQUENCE 100
647 :
648 : static const u32 cayman_sysls_default[] =
649 : {
650 : /* Register, Value, Mask bits */
651 : 0x000055e8, 0x00000000, 0xffffffff,
652 : 0x0000d0bc, 0x00000000, 0xffffffff,
653 : 0x0000d8bc, 0x00000000, 0xffffffff,
654 : 0x000015c0, 0x000c1401, 0xffffffff,
655 : 0x0000264c, 0x000c0400, 0xffffffff,
656 : 0x00002648, 0x000c0400, 0xffffffff,
657 : 0x00002650, 0x000c0400, 0xffffffff,
658 : 0x000020b8, 0x000c0400, 0xffffffff,
659 : 0x000020bc, 0x000c0400, 0xffffffff,
660 : 0x000020c0, 0x000c0c80, 0xffffffff,
661 : 0x0000f4a0, 0x000000c0, 0xffffffff,
662 : 0x0000f4a4, 0x00680fff, 0xffffffff,
663 : 0x00002f50, 0x00000404, 0xffffffff,
664 : 0x000004c8, 0x00000001, 0xffffffff,
665 : 0x000064ec, 0x00000000, 0xffffffff,
666 : 0x00000c7c, 0x00000000, 0xffffffff,
667 : 0x00008dfc, 0x00000000, 0xffffffff
668 : };
669 : #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
670 :
671 : static const u32 cayman_sysls_disable[] =
672 : {
673 : /* Register, Value, Mask bits */
674 : 0x0000d0c0, 0x00000000, 0xffffffff,
675 : 0x0000d8c0, 0x00000000, 0xffffffff,
676 : 0x000055e8, 0x00000000, 0xffffffff,
677 : 0x0000d0bc, 0x00000000, 0xffffffff,
678 : 0x0000d8bc, 0x00000000, 0xffffffff,
679 : 0x000015c0, 0x00041401, 0xffffffff,
680 : 0x0000264c, 0x00040400, 0xffffffff,
681 : 0x00002648, 0x00040400, 0xffffffff,
682 : 0x00002650, 0x00040400, 0xffffffff,
683 : 0x000020b8, 0x00040400, 0xffffffff,
684 : 0x000020bc, 0x00040400, 0xffffffff,
685 : 0x000020c0, 0x00040c80, 0xffffffff,
686 : 0x0000f4a0, 0x000000c0, 0xffffffff,
687 : 0x0000f4a4, 0x00680000, 0xffffffff,
688 : 0x00002f50, 0x00000404, 0xffffffff,
689 : 0x000004c8, 0x00000001, 0xffffffff,
690 : 0x000064ec, 0x00007ffd, 0xffffffff,
691 : 0x00000c7c, 0x0000ff00, 0xffffffff,
692 : 0x00008dfc, 0x0000007f, 0xffffffff
693 : };
694 : #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
695 :
696 : static const u32 cayman_sysls_enable[] =
697 : {
698 : /* Register, Value, Mask bits */
699 : 0x000055e8, 0x00000001, 0xffffffff,
700 : 0x0000d0bc, 0x00000100, 0xffffffff,
701 : 0x0000d8bc, 0x00000100, 0xffffffff,
702 : 0x000015c0, 0x000c1401, 0xffffffff,
703 : 0x0000264c, 0x000c0400, 0xffffffff,
704 : 0x00002648, 0x000c0400, 0xffffffff,
705 : 0x00002650, 0x000c0400, 0xffffffff,
706 : 0x000020b8, 0x000c0400, 0xffffffff,
707 : 0x000020bc, 0x000c0400, 0xffffffff,
708 : 0x000020c0, 0x000c0c80, 0xffffffff,
709 : 0x0000f4a0, 0x000000c0, 0xffffffff,
710 : 0x0000f4a4, 0x00680fff, 0xffffffff,
711 : 0x00002f50, 0x00000903, 0xffffffff,
712 : 0x000004c8, 0x00000000, 0xffffffff,
713 : 0x000064ec, 0x00000000, 0xffffffff,
714 : 0x00000c7c, 0x00000000, 0xffffffff,
715 : 0x00008dfc, 0x00000000, 0xffffffff
716 : };
717 : #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
718 :
719 : struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
720 : struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
721 :
722 : extern int ni_mc_load_microcode(struct radeon_device *rdev);
723 :
724 0 : struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
725 : {
726 0 : struct ni_power_info *pi = rdev->pm.dpm.priv;
727 :
728 0 : return pi;
729 : }
730 :
731 0 : struct ni_ps *ni_get_ps(struct radeon_ps *rps)
732 : {
733 0 : struct ni_ps *ps = rps->ps_priv;
734 :
735 0 : return ps;
736 : }
737 :
738 0 : static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
739 : u16 v, s32 t,
740 : u32 ileakage,
741 : u32 *leakage)
742 : {
743 : s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
744 :
745 0 : i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
746 0 : vddc = div64_s64(drm_int2fixp(v), 1000);
747 0 : temperature = div64_s64(drm_int2fixp(t), 1000);
748 :
749 0 : kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
750 0 : drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
751 0 : kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
752 0 : drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
753 :
754 0 : leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
755 :
756 0 : *leakage = drm_fixp2int(leakage_w * 1000);
757 0 : }
758 :
759 0 : static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
760 : const struct ni_leakage_coeffients *coeff,
761 : u16 v,
762 : s32 t,
763 : u32 i_leakage,
764 : u32 *leakage)
765 : {
766 0 : ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
767 0 : }
768 :
769 0 : bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
770 : {
771 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
772 0 : u32 vblank_time = r600_dpm_get_vblank_time(rdev);
773 : /* we never hit the non-gddr5 limit so disable it */
774 0 : u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
775 :
776 0 : if (vblank_time < switch_limit)
777 0 : return true;
778 : else
779 0 : return false;
780 :
781 0 : }
782 :
783 0 : static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
784 : struct radeon_ps *rps)
785 : {
786 0 : struct ni_ps *ps = ni_get_ps(rps);
787 : struct radeon_clock_and_voltage_limits *max_limits;
788 : bool disable_mclk_switching;
789 : u32 mclk;
790 : u16 vddci;
791 : int i;
792 :
793 0 : if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
794 0 : ni_dpm_vblank_too_short(rdev))
795 0 : disable_mclk_switching = true;
796 : else
797 : disable_mclk_switching = false;
798 :
799 0 : if (rdev->pm.dpm.ac_power)
800 0 : max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
801 : else
802 0 : max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
803 :
804 0 : if (rdev->pm.dpm.ac_power == false) {
805 0 : for (i = 0; i < ps->performance_level_count; i++) {
806 0 : if (ps->performance_levels[i].mclk > max_limits->mclk)
807 0 : ps->performance_levels[i].mclk = max_limits->mclk;
808 0 : if (ps->performance_levels[i].sclk > max_limits->sclk)
809 0 : ps->performance_levels[i].sclk = max_limits->sclk;
810 0 : if (ps->performance_levels[i].vddc > max_limits->vddc)
811 0 : ps->performance_levels[i].vddc = max_limits->vddc;
812 0 : if (ps->performance_levels[i].vddci > max_limits->vddci)
813 0 : ps->performance_levels[i].vddci = max_limits->vddci;
814 : }
815 : }
816 :
817 : /* XXX validate the min clocks required for display */
818 :
819 : /* adjust low state */
820 0 : if (disable_mclk_switching) {
821 0 : ps->performance_levels[0].mclk =
822 0 : ps->performance_levels[ps->performance_level_count - 1].mclk;
823 0 : ps->performance_levels[0].vddci =
824 0 : ps->performance_levels[ps->performance_level_count - 1].vddci;
825 0 : }
826 :
827 0 : btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
828 0 : &ps->performance_levels[0].sclk,
829 0 : &ps->performance_levels[0].mclk);
830 :
831 0 : for (i = 1; i < ps->performance_level_count; i++) {
832 0 : if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
833 0 : ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
834 0 : if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
835 0 : ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
836 : }
837 :
838 : /* adjust remaining states */
839 0 : if (disable_mclk_switching) {
840 0 : mclk = ps->performance_levels[0].mclk;
841 0 : vddci = ps->performance_levels[0].vddci;
842 0 : for (i = 1; i < ps->performance_level_count; i++) {
843 0 : if (mclk < ps->performance_levels[i].mclk)
844 0 : mclk = ps->performance_levels[i].mclk;
845 0 : if (vddci < ps->performance_levels[i].vddci)
846 0 : vddci = ps->performance_levels[i].vddci;
847 : }
848 0 : for (i = 0; i < ps->performance_level_count; i++) {
849 0 : ps->performance_levels[i].mclk = mclk;
850 0 : ps->performance_levels[i].vddci = vddci;
851 : }
852 : } else {
853 0 : for (i = 1; i < ps->performance_level_count; i++) {
854 0 : if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
855 0 : ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
856 0 : if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
857 0 : ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
858 : }
859 : }
860 :
861 0 : for (i = 1; i < ps->performance_level_count; i++)
862 0 : btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
863 0 : &ps->performance_levels[i].sclk,
864 0 : &ps->performance_levels[i].mclk);
865 :
866 0 : for (i = 0; i < ps->performance_level_count; i++)
867 0 : btc_adjust_clock_combinations(rdev, max_limits,
868 0 : &ps->performance_levels[i]);
869 :
870 0 : for (i = 0; i < ps->performance_level_count; i++) {
871 0 : btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
872 0 : ps->performance_levels[i].sclk,
873 0 : max_limits->vddc, &ps->performance_levels[i].vddc);
874 0 : btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
875 0 : ps->performance_levels[i].mclk,
876 0 : max_limits->vddci, &ps->performance_levels[i].vddci);
877 0 : btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
878 0 : ps->performance_levels[i].mclk,
879 0 : max_limits->vddc, &ps->performance_levels[i].vddc);
880 0 : btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
881 0 : rdev->clock.current_dispclk,
882 0 : max_limits->vddc, &ps->performance_levels[i].vddc);
883 : }
884 :
885 0 : for (i = 0; i < ps->performance_level_count; i++) {
886 0 : btc_apply_voltage_delta_rules(rdev,
887 0 : max_limits->vddc, max_limits->vddci,
888 0 : &ps->performance_levels[i].vddc,
889 0 : &ps->performance_levels[i].vddci);
890 : }
891 :
892 0 : ps->dc_compatible = true;
893 0 : for (i = 0; i < ps->performance_level_count; i++) {
894 0 : if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
895 0 : ps->dc_compatible = false;
896 :
897 0 : if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
898 0 : ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
899 : }
900 0 : }
901 :
902 0 : static void ni_cg_clockgating_default(struct radeon_device *rdev)
903 : {
904 : u32 count;
905 : const u32 *ps = NULL;
906 :
907 : ps = (const u32 *)&cayman_cgcg_cgls_default;
908 : count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
909 :
910 0 : btc_program_mgcg_hw_sequence(rdev, ps, count);
911 0 : }
912 :
913 0 : static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
914 : bool enable)
915 : {
916 : u32 count;
917 : const u32 *ps = NULL;
918 :
919 0 : if (enable) {
920 : ps = (const u32 *)&cayman_cgcg_cgls_enable;
921 : count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
922 0 : } else {
923 : ps = (const u32 *)&cayman_cgcg_cgls_disable;
924 : count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
925 : }
926 :
927 0 : btc_program_mgcg_hw_sequence(rdev, ps, count);
928 0 : }
929 :
930 0 : static void ni_mg_clockgating_default(struct radeon_device *rdev)
931 : {
932 : u32 count;
933 : const u32 *ps = NULL;
934 :
935 : ps = (const u32 *)&cayman_mgcg_default;
936 : count = CAYMAN_MGCG_DEFAULT_LENGTH;
937 :
938 0 : btc_program_mgcg_hw_sequence(rdev, ps, count);
939 0 : }
940 :
941 0 : static void ni_mg_clockgating_enable(struct radeon_device *rdev,
942 : bool enable)
943 : {
944 : u32 count;
945 : const u32 *ps = NULL;
946 :
947 0 : if (enable) {
948 : ps = (const u32 *)&cayman_mgcg_enable;
949 : count = CAYMAN_MGCG_ENABLE_LENGTH;
950 0 : } else {
951 : ps = (const u32 *)&cayman_mgcg_disable;
952 : count = CAYMAN_MGCG_DISABLE_LENGTH;
953 : }
954 :
955 0 : btc_program_mgcg_hw_sequence(rdev, ps, count);
956 0 : }
957 :
958 0 : static void ni_ls_clockgating_default(struct radeon_device *rdev)
959 : {
960 : u32 count;
961 : const u32 *ps = NULL;
962 :
963 : ps = (const u32 *)&cayman_sysls_default;
964 : count = CAYMAN_SYSLS_DEFAULT_LENGTH;
965 :
966 0 : btc_program_mgcg_hw_sequence(rdev, ps, count);
967 0 : }
968 :
969 0 : static void ni_ls_clockgating_enable(struct radeon_device *rdev,
970 : bool enable)
971 : {
972 : u32 count;
973 : const u32 *ps = NULL;
974 :
975 0 : if (enable) {
976 : ps = (const u32 *)&cayman_sysls_enable;
977 : count = CAYMAN_SYSLS_ENABLE_LENGTH;
978 0 : } else {
979 : ps = (const u32 *)&cayman_sysls_disable;
980 : count = CAYMAN_SYSLS_DISABLE_LENGTH;
981 : }
982 :
983 0 : btc_program_mgcg_hw_sequence(rdev, ps, count);
984 :
985 0 : }
986 :
987 0 : static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
988 : struct radeon_clock_voltage_dependency_table *table)
989 : {
990 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
991 : u32 i;
992 :
993 0 : if (table) {
994 0 : for (i = 0; i < table->count; i++) {
995 0 : if (0xff01 == table->entries[i].v) {
996 0 : if (pi->max_vddc == 0)
997 0 : return -EINVAL;
998 0 : table->entries[i].v = pi->max_vddc;
999 0 : }
1000 : }
1001 : }
1002 0 : return 0;
1003 0 : }
1004 :
1005 0 : static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1006 : {
1007 : int ret = 0;
1008 :
1009 0 : ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1010 0 : &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1011 :
1012 0 : ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1013 0 : &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1014 0 : return ret;
1015 : }
1016 :
1017 0 : static void ni_stop_dpm(struct radeon_device *rdev)
1018 : {
1019 0 : WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1020 0 : }
1021 :
1022 : #if 0
1023 : static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1024 : bool ac_power)
1025 : {
1026 : if (ac_power)
1027 : return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1028 : 0 : -EINVAL;
1029 :
1030 : return 0;
1031 : }
1032 : #endif
1033 :
1034 0 : static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1035 : PPSMC_Msg msg, u32 parameter)
1036 : {
1037 0 : WREG32(SMC_SCRATCH0, parameter);
1038 0 : return rv770_send_msg_to_smc(rdev, msg);
1039 : }
1040 :
1041 0 : static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1042 : {
1043 0 : if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1044 0 : return -EINVAL;
1045 :
1046 0 : return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1047 : 0 : -EINVAL;
1048 0 : }
1049 :
1050 0 : int ni_dpm_force_performance_level(struct radeon_device *rdev,
1051 : enum radeon_dpm_forced_level level)
1052 : {
1053 0 : if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1054 0 : if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1055 0 : return -EINVAL;
1056 :
1057 0 : if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
1058 0 : return -EINVAL;
1059 0 : } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1060 0 : if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1061 0 : return -EINVAL;
1062 :
1063 0 : if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
1064 0 : return -EINVAL;
1065 0 : } else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
1066 0 : if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1067 0 : return -EINVAL;
1068 :
1069 0 : if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1070 0 : return -EINVAL;
1071 : }
1072 :
1073 0 : rdev->pm.dpm.forced_level = level;
1074 :
1075 0 : return 0;
1076 0 : }
1077 :
1078 0 : static void ni_stop_smc(struct radeon_device *rdev)
1079 : {
1080 : u32 tmp;
1081 : int i;
1082 :
1083 0 : for (i = 0; i < rdev->usec_timeout; i++) {
1084 0 : tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1085 0 : if (tmp != 1)
1086 : break;
1087 0 : udelay(1);
1088 : }
1089 :
1090 0 : udelay(100);
1091 :
1092 0 : r7xx_stop_smc(rdev);
1093 0 : }
1094 :
1095 0 : static int ni_process_firmware_header(struct radeon_device *rdev)
1096 : {
1097 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1098 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1099 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1100 0 : u32 tmp;
1101 : int ret;
1102 :
1103 0 : ret = rv770_read_smc_sram_dword(rdev,
1104 : NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1105 : NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1106 0 : &tmp, pi->sram_end);
1107 :
1108 0 : if (ret)
1109 0 : return ret;
1110 :
1111 0 : pi->state_table_start = (u16)tmp;
1112 :
1113 0 : ret = rv770_read_smc_sram_dword(rdev,
1114 : NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1115 : NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1116 0 : &tmp, pi->sram_end);
1117 :
1118 0 : if (ret)
1119 0 : return ret;
1120 :
1121 0 : pi->soft_regs_start = (u16)tmp;
1122 :
1123 0 : ret = rv770_read_smc_sram_dword(rdev,
1124 : NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1125 : NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1126 0 : &tmp, pi->sram_end);
1127 :
1128 0 : if (ret)
1129 0 : return ret;
1130 :
1131 0 : eg_pi->mc_reg_table_start = (u16)tmp;
1132 :
1133 0 : ret = rv770_read_smc_sram_dword(rdev,
1134 : NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1135 : NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1136 0 : &tmp, pi->sram_end);
1137 :
1138 0 : if (ret)
1139 0 : return ret;
1140 :
1141 0 : ni_pi->fan_table_start = (u16)tmp;
1142 :
1143 0 : ret = rv770_read_smc_sram_dword(rdev,
1144 : NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1145 : NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1146 0 : &tmp, pi->sram_end);
1147 :
1148 0 : if (ret)
1149 0 : return ret;
1150 :
1151 0 : ni_pi->arb_table_start = (u16)tmp;
1152 :
1153 0 : ret = rv770_read_smc_sram_dword(rdev,
1154 : NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1155 : NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1156 0 : &tmp, pi->sram_end);
1157 :
1158 0 : if (ret)
1159 0 : return ret;
1160 :
1161 0 : ni_pi->cac_table_start = (u16)tmp;
1162 :
1163 0 : ret = rv770_read_smc_sram_dword(rdev,
1164 : NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1165 : NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1166 0 : &tmp, pi->sram_end);
1167 :
1168 0 : if (ret)
1169 0 : return ret;
1170 :
1171 0 : ni_pi->spll_table_start = (u16)tmp;
1172 :
1173 :
1174 0 : return ret;
1175 0 : }
1176 :
1177 0 : static void ni_read_clock_registers(struct radeon_device *rdev)
1178 : {
1179 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1180 :
1181 0 : ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1182 0 : ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1183 0 : ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1184 0 : ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1185 0 : ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1186 0 : ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1187 0 : ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1188 0 : ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1189 0 : ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1190 0 : ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1191 0 : ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1192 0 : ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1193 0 : ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1194 0 : ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1195 0 : }
1196 :
1197 : #if 0
1198 : static int ni_enter_ulp_state(struct radeon_device *rdev)
1199 : {
1200 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1201 :
1202 : if (pi->gfx_clock_gating) {
1203 : WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1204 : WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1205 : WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1206 : RREG32(GB_ADDR_CONFIG);
1207 : }
1208 :
1209 : WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1210 : ~HOST_SMC_MSG_MASK);
1211 :
1212 : udelay(25000);
1213 :
1214 : return 0;
1215 : }
1216 : #endif
1217 :
1218 0 : static void ni_program_response_times(struct radeon_device *rdev)
1219 : {
1220 : u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1221 : u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1222 : u32 reference_clock;
1223 :
1224 0 : rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1225 :
1226 0 : voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1227 0 : backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1228 :
1229 0 : if (voltage_response_time == 0)
1230 0 : voltage_response_time = 1000;
1231 :
1232 0 : if (backbias_response_time == 0)
1233 0 : backbias_response_time = 1000;
1234 :
1235 : acpi_delay_time = 15000;
1236 : vbi_time_out = 100000;
1237 :
1238 0 : reference_clock = radeon_get_xclk(rdev);
1239 :
1240 0 : vddc_dly = (voltage_response_time * reference_clock) / 1600;
1241 0 : bb_dly = (backbias_response_time * reference_clock) / 1600;
1242 0 : acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1243 0 : vbi_dly = (vbi_time_out * reference_clock) / 1600;
1244 :
1245 0 : mclk_switch_limit = (460 * reference_clock) / 100;
1246 :
1247 0 : rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg, vddc_dly);
1248 0 : rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1249 0 : rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi, acpi_dly);
1250 0 : rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1251 0 : rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1252 0 : rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1253 0 : }
1254 :
1255 0 : static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1256 : struct atom_voltage_table *voltage_table,
1257 : NISLANDS_SMC_STATETABLE *table)
1258 : {
1259 : unsigned int i;
1260 :
1261 0 : for (i = 0; i < voltage_table->count; i++) {
1262 0 : table->highSMIO[i] = 0;
1263 0 : table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1264 : }
1265 0 : }
1266 :
1267 0 : static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1268 : NISLANDS_SMC_STATETABLE *table)
1269 : {
1270 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1271 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1272 : unsigned char i;
1273 :
1274 0 : if (eg_pi->vddc_voltage_table.count) {
1275 0 : ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1276 0 : table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1277 0 : table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1278 0 : cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1279 :
1280 0 : for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1281 0 : if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1282 0 : table->maxVDDCIndexInPPTable = i;
1283 0 : break;
1284 : }
1285 : }
1286 : }
1287 :
1288 0 : if (eg_pi->vddci_voltage_table.count) {
1289 0 : ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1290 :
1291 0 : table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1292 0 : table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1293 0 : cpu_to_be32(eg_pi->vddci_voltage_table.mask_low);
1294 0 : }
1295 0 : }
1296 :
1297 0 : static int ni_populate_voltage_value(struct radeon_device *rdev,
1298 : struct atom_voltage_table *table,
1299 : u16 value,
1300 : NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1301 : {
1302 : unsigned int i;
1303 :
1304 0 : for (i = 0; i < table->count; i++) {
1305 0 : if (value <= table->entries[i].value) {
1306 0 : voltage->index = (u8)i;
1307 0 : voltage->value = cpu_to_be16(table->entries[i].value);
1308 0 : break;
1309 : }
1310 : }
1311 :
1312 0 : if (i >= table->count)
1313 0 : return -EINVAL;
1314 :
1315 0 : return 0;
1316 0 : }
1317 :
1318 0 : static void ni_populate_mvdd_value(struct radeon_device *rdev,
1319 : u32 mclk,
1320 : NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1321 : {
1322 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1323 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1324 :
1325 0 : if (!pi->mvdd_control) {
1326 0 : voltage->index = eg_pi->mvdd_high_index;
1327 0 : voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1328 0 : return;
1329 : }
1330 :
1331 0 : if (mclk <= pi->mvdd_split_frequency) {
1332 0 : voltage->index = eg_pi->mvdd_low_index;
1333 0 : voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1334 0 : } else {
1335 0 : voltage->index = eg_pi->mvdd_high_index;
1336 0 : voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1337 : }
1338 0 : }
1339 :
1340 0 : static int ni_get_std_voltage_value(struct radeon_device *rdev,
1341 : NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1342 : u16 *std_voltage)
1343 : {
1344 0 : if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1345 0 : ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1346 0 : *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1347 : else
1348 0 : *std_voltage = be16_to_cpu(voltage->value);
1349 :
1350 0 : return 0;
1351 : }
1352 :
1353 0 : static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1354 : u16 value, u8 index,
1355 : NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1356 : {
1357 0 : voltage->index = index;
1358 0 : voltage->value = cpu_to_be16(value);
1359 0 : }
1360 :
1361 0 : static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1362 : {
1363 : u32 xclk_period;
1364 0 : u32 xclk = radeon_get_xclk(rdev);
1365 0 : u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1366 :
1367 0 : xclk_period = (1000000000UL / xclk);
1368 0 : xclk_period /= 10000UL;
1369 :
1370 0 : return tmp * xclk_period;
1371 : }
1372 :
1373 0 : static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1374 : {
1375 0 : return (power_in_watts * scaling_factor) << 2;
1376 : }
1377 :
1378 0 : static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1379 : struct radeon_ps *radeon_state,
1380 : u32 near_tdp_limit)
1381 : {
1382 0 : struct ni_ps *state = ni_get_ps(radeon_state);
1383 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1384 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1385 : u32 power_boost_limit = 0;
1386 : int ret;
1387 :
1388 0 : if (ni_pi->enable_power_containment &&
1389 0 : ni_pi->use_power_boost_limit) {
1390 0 : NISLANDS_SMC_VOLTAGE_VALUE vddc;
1391 0 : u16 std_vddc_med;
1392 0 : u16 std_vddc_high;
1393 : u64 tmp, n, d;
1394 :
1395 0 : if (state->performance_level_count < 3)
1396 0 : return 0;
1397 :
1398 0 : ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1399 0 : state->performance_levels[state->performance_level_count - 2].vddc,
1400 : &vddc);
1401 0 : if (ret)
1402 0 : return 0;
1403 :
1404 0 : ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1405 0 : if (ret)
1406 0 : return 0;
1407 :
1408 0 : ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1409 0 : state->performance_levels[state->performance_level_count - 1].vddc,
1410 : &vddc);
1411 0 : if (ret)
1412 0 : return 0;
1413 :
1414 0 : ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1415 0 : if (ret)
1416 0 : return 0;
1417 :
1418 0 : n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1419 0 : d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1420 0 : tmp = div64_u64(n, d);
1421 :
1422 0 : if (tmp >> 32)
1423 0 : return 0;
1424 0 : power_boost_limit = (u32)tmp;
1425 0 : }
1426 :
1427 0 : return power_boost_limit;
1428 0 : }
1429 :
1430 0 : static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1431 : bool adjust_polarity,
1432 : u32 tdp_adjustment,
1433 : u32 *tdp_limit,
1434 : u32 *near_tdp_limit)
1435 : {
1436 0 : if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1437 0 : return -EINVAL;
1438 :
1439 0 : if (adjust_polarity) {
1440 0 : *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1441 0 : *near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1442 0 : } else {
1443 0 : *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1444 0 : *near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1445 : }
1446 :
1447 0 : return 0;
1448 0 : }
1449 :
1450 0 : static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1451 : struct radeon_ps *radeon_state)
1452 : {
1453 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1454 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1455 :
1456 0 : if (ni_pi->enable_power_containment) {
1457 0 : NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1458 0 : u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1459 0 : u32 tdp_limit;
1460 0 : u32 near_tdp_limit;
1461 : u32 power_boost_limit;
1462 : int ret;
1463 :
1464 0 : if (scaling_factor == 0)
1465 0 : return -EINVAL;
1466 :
1467 0 : memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1468 :
1469 0 : ret = ni_calculate_adjusted_tdp_limits(rdev,
1470 : false, /* ??? */
1471 0 : rdev->pm.dpm.tdp_adjustment,
1472 : &tdp_limit,
1473 : &near_tdp_limit);
1474 0 : if (ret)
1475 0 : return ret;
1476 :
1477 0 : power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1478 0 : near_tdp_limit);
1479 :
1480 0 : smc_table->dpm2Params.TDPLimit =
1481 0 : cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1482 0 : smc_table->dpm2Params.NearTDPLimit =
1483 0 : cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1484 0 : smc_table->dpm2Params.SafePowerLimit =
1485 0 : cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1486 : scaling_factor));
1487 0 : smc_table->dpm2Params.PowerBoostLimit =
1488 0 : cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1489 :
1490 0 : ret = rv770_copy_bytes_to_smc(rdev,
1491 0 : (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1492 : offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1493 0 : (u8 *)(&smc_table->dpm2Params.TDPLimit),
1494 0 : sizeof(u32) * 4, pi->sram_end);
1495 0 : if (ret)
1496 0 : return ret;
1497 0 : }
1498 :
1499 0 : return 0;
1500 0 : }
1501 :
1502 0 : int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1503 : u32 arb_freq_src, u32 arb_freq_dest)
1504 : {
1505 : u32 mc_arb_dram_timing;
1506 : u32 mc_arb_dram_timing2;
1507 : u32 burst_time;
1508 : u32 mc_cg_config;
1509 :
1510 0 : switch (arb_freq_src) {
1511 : case MC_CG_ARB_FREQ_F0:
1512 0 : mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1513 0 : mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1514 0 : burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1515 0 : break;
1516 : case MC_CG_ARB_FREQ_F1:
1517 0 : mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_1);
1518 0 : mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1519 0 : burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1520 0 : break;
1521 : case MC_CG_ARB_FREQ_F2:
1522 0 : mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_2);
1523 0 : mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1524 0 : burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1525 0 : break;
1526 : case MC_CG_ARB_FREQ_F3:
1527 0 : mc_arb_dram_timing = RREG32(MC_ARB_DRAM_TIMING_3);
1528 0 : mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1529 0 : burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1530 0 : break;
1531 : default:
1532 0 : return -EINVAL;
1533 : }
1534 :
1535 0 : switch (arb_freq_dest) {
1536 : case MC_CG_ARB_FREQ_F0:
1537 0 : WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1538 0 : WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1539 0 : WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1540 0 : break;
1541 : case MC_CG_ARB_FREQ_F1:
1542 0 : WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1543 0 : WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1544 0 : WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1545 0 : break;
1546 : case MC_CG_ARB_FREQ_F2:
1547 0 : WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1548 0 : WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1549 0 : WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1550 0 : break;
1551 : case MC_CG_ARB_FREQ_F3:
1552 0 : WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1553 0 : WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1554 0 : WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1555 0 : break;
1556 : default:
1557 0 : return -EINVAL;
1558 : }
1559 :
1560 0 : mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1561 0 : WREG32(MC_CG_CONFIG, mc_cg_config);
1562 0 : WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1563 :
1564 0 : return 0;
1565 0 : }
1566 :
1567 0 : static int ni_init_arb_table_index(struct radeon_device *rdev)
1568 : {
1569 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1570 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1571 0 : u32 tmp;
1572 : int ret;
1573 :
1574 0 : ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1575 0 : &tmp, pi->sram_end);
1576 0 : if (ret)
1577 0 : return ret;
1578 :
1579 0 : tmp &= 0x00FFFFFF;
1580 0 : tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1581 :
1582 0 : return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1583 0 : tmp, pi->sram_end);
1584 0 : }
1585 :
1586 0 : static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1587 : {
1588 0 : return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1589 : }
1590 :
1591 0 : static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1592 : {
1593 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1594 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1595 0 : u32 tmp;
1596 : int ret;
1597 :
1598 0 : ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1599 0 : &tmp, pi->sram_end);
1600 0 : if (ret)
1601 0 : return ret;
1602 :
1603 0 : tmp = (tmp >> 24) & 0xff;
1604 :
1605 0 : if (tmp == MC_CG_ARB_FREQ_F0)
1606 0 : return 0;
1607 :
1608 0 : return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1609 0 : }
1610 :
1611 0 : static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1612 : struct rv7xx_pl *pl,
1613 : SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1614 : {
1615 : u32 dram_timing;
1616 : u32 dram_timing2;
1617 :
1618 0 : arb_regs->mc_arb_rfsh_rate =
1619 0 : (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1620 :
1621 :
1622 0 : radeon_atom_set_engine_dram_timings(rdev,
1623 0 : pl->sclk,
1624 0 : pl->mclk);
1625 :
1626 0 : dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1627 0 : dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1628 :
1629 0 : arb_regs->mc_arb_dram_timing = cpu_to_be32(dram_timing);
1630 0 : arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1631 :
1632 0 : return 0;
1633 : }
1634 :
1635 0 : static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1636 : struct radeon_ps *radeon_state,
1637 : unsigned int first_arb_set)
1638 : {
1639 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1640 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1641 0 : struct ni_ps *state = ni_get_ps(radeon_state);
1642 0 : SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1643 : int i, ret = 0;
1644 :
1645 0 : for (i = 0; i < state->performance_level_count; i++) {
1646 0 : ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1647 0 : if (ret)
1648 : break;
1649 :
1650 0 : ret = rv770_copy_bytes_to_smc(rdev,
1651 0 : (u16)(ni_pi->arb_table_start +
1652 0 : offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1653 0 : sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1654 : (u8 *)&arb_regs,
1655 : (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1656 0 : pi->sram_end);
1657 0 : if (ret)
1658 : break;
1659 : }
1660 0 : return ret;
1661 0 : }
1662 :
1663 0 : static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1664 : struct radeon_ps *radeon_new_state)
1665 : {
1666 0 : return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1667 : NISLANDS_DRIVER_STATE_ARB_INDEX);
1668 : }
1669 :
1670 0 : static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1671 : struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1672 : {
1673 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1674 :
1675 0 : voltage->index = eg_pi->mvdd_high_index;
1676 0 : voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1677 0 : }
1678 :
1679 0 : static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1680 : struct radeon_ps *radeon_initial_state,
1681 : NISLANDS_SMC_STATETABLE *table)
1682 : {
1683 0 : struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1684 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1685 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1686 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1687 : u32 reg;
1688 : int ret;
1689 :
1690 0 : table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1691 0 : cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1692 0 : table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1693 0 : cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1694 0 : table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1695 0 : cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1696 0 : table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1697 0 : cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1698 0 : table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1699 0 : cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1700 0 : table->initialState.levels[0].mclk.vDLL_CNTL =
1701 0 : cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1702 0 : table->initialState.levels[0].mclk.vMPLL_SS =
1703 0 : cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1704 0 : table->initialState.levels[0].mclk.vMPLL_SS2 =
1705 0 : cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1706 0 : table->initialState.levels[0].mclk.mclk_value =
1707 0 : cpu_to_be32(initial_state->performance_levels[0].mclk);
1708 :
1709 0 : table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1710 0 : cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1711 0 : table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1712 0 : cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1713 0 : table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1714 0 : cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1715 0 : table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1716 0 : cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1717 0 : table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1718 0 : cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1719 0 : table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1720 0 : cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1721 0 : table->initialState.levels[0].sclk.sclk_value =
1722 0 : cpu_to_be32(initial_state->performance_levels[0].sclk);
1723 0 : table->initialState.levels[0].arbRefreshState =
1724 : NISLANDS_INITIAL_STATE_ARB_INDEX;
1725 :
1726 0 : table->initialState.levels[0].ACIndex = 0;
1727 :
1728 0 : ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1729 0 : initial_state->performance_levels[0].vddc,
1730 0 : &table->initialState.levels[0].vddc);
1731 0 : if (!ret) {
1732 0 : u16 std_vddc;
1733 :
1734 0 : ret = ni_get_std_voltage_value(rdev,
1735 : &table->initialState.levels[0].vddc,
1736 : &std_vddc);
1737 0 : if (!ret)
1738 0 : ni_populate_std_voltage_value(rdev, std_vddc,
1739 0 : table->initialState.levels[0].vddc.index,
1740 0 : &table->initialState.levels[0].std_vddc);
1741 0 : }
1742 :
1743 0 : if (eg_pi->vddci_control)
1744 0 : ni_populate_voltage_value(rdev,
1745 0 : &eg_pi->vddci_voltage_table,
1746 0 : initial_state->performance_levels[0].vddci,
1747 0 : &table->initialState.levels[0].vddci);
1748 :
1749 0 : ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1750 :
1751 : reg = CG_R(0xffff) | CG_L(0);
1752 0 : table->initialState.levels[0].aT = cpu_to_be32(reg);
1753 :
1754 0 : table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1755 :
1756 0 : if (pi->boot_in_gen2)
1757 0 : table->initialState.levels[0].gen2PCIE = 1;
1758 : else
1759 0 : table->initialState.levels[0].gen2PCIE = 0;
1760 :
1761 0 : if (pi->mem_gddr5) {
1762 0 : table->initialState.levels[0].strobeMode =
1763 0 : cypress_get_strobe_mode_settings(rdev,
1764 0 : initial_state->performance_levels[0].mclk);
1765 :
1766 0 : if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1767 0 : table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1768 : else
1769 0 : table->initialState.levels[0].mcFlags = 0;
1770 : }
1771 :
1772 0 : table->initialState.levelCount = 1;
1773 :
1774 0 : table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1775 :
1776 0 : table->initialState.levels[0].dpm2.MaxPS = 0;
1777 0 : table->initialState.levels[0].dpm2.NearTDPDec = 0;
1778 0 : table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1779 0 : table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1780 :
1781 : reg = MIN_POWER_MASK | MAX_POWER_MASK;
1782 0 : table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1783 :
1784 : reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1785 0 : table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1786 :
1787 0 : return 0;
1788 : }
1789 :
1790 0 : static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1791 : NISLANDS_SMC_STATETABLE *table)
1792 : {
1793 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1794 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1795 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1796 0 : u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
1797 0 : u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1798 0 : u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
1799 0 : u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1800 0 : u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
1801 0 : u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
1802 0 : u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
1803 0 : u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
1804 0 : u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1805 0 : u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
1806 : u32 reg;
1807 : int ret;
1808 :
1809 0 : table->ACPIState = table->initialState;
1810 :
1811 0 : table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1812 :
1813 0 : if (pi->acpi_vddc) {
1814 0 : ret = ni_populate_voltage_value(rdev,
1815 : &eg_pi->vddc_voltage_table,
1816 0 : pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1817 0 : if (!ret) {
1818 0 : u16 std_vddc;
1819 :
1820 0 : ret = ni_get_std_voltage_value(rdev,
1821 : &table->ACPIState.levels[0].vddc, &std_vddc);
1822 0 : if (!ret)
1823 0 : ni_populate_std_voltage_value(rdev, std_vddc,
1824 0 : table->ACPIState.levels[0].vddc.index,
1825 0 : &table->ACPIState.levels[0].std_vddc);
1826 0 : }
1827 :
1828 0 : if (pi->pcie_gen2) {
1829 0 : if (pi->acpi_pcie_gen2)
1830 0 : table->ACPIState.levels[0].gen2PCIE = 1;
1831 : else
1832 0 : table->ACPIState.levels[0].gen2PCIE = 0;
1833 : } else {
1834 0 : table->ACPIState.levels[0].gen2PCIE = 0;
1835 : }
1836 : } else {
1837 0 : ret = ni_populate_voltage_value(rdev,
1838 : &eg_pi->vddc_voltage_table,
1839 0 : pi->min_vddc_in_table,
1840 0 : &table->ACPIState.levels[0].vddc);
1841 0 : if (!ret) {
1842 0 : u16 std_vddc;
1843 :
1844 0 : ret = ni_get_std_voltage_value(rdev,
1845 : &table->ACPIState.levels[0].vddc,
1846 : &std_vddc);
1847 0 : if (!ret)
1848 0 : ni_populate_std_voltage_value(rdev, std_vddc,
1849 0 : table->ACPIState.levels[0].vddc.index,
1850 0 : &table->ACPIState.levels[0].std_vddc);
1851 0 : }
1852 0 : table->ACPIState.levels[0].gen2PCIE = 0;
1853 : }
1854 :
1855 0 : if (eg_pi->acpi_vddci) {
1856 0 : if (eg_pi->vddci_control)
1857 0 : ni_populate_voltage_value(rdev,
1858 0 : &eg_pi->vddci_voltage_table,
1859 : eg_pi->acpi_vddci,
1860 0 : &table->ACPIState.levels[0].vddci);
1861 : }
1862 :
1863 :
1864 0 : mpll_ad_func_cntl &= ~PDNB;
1865 :
1866 0 : mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1867 :
1868 0 : if (pi->mem_gddr5)
1869 0 : mpll_dq_func_cntl &= ~PDNB;
1870 0 : mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1871 :
1872 :
1873 0 : mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1874 : MRDCKA1_RESET |
1875 : MRDCKB0_RESET |
1876 : MRDCKB1_RESET |
1877 : MRDCKC0_RESET |
1878 : MRDCKC1_RESET |
1879 : MRDCKD0_RESET |
1880 : MRDCKD1_RESET);
1881 :
1882 0 : mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1883 : MRDCKA1_PDNB |
1884 : MRDCKB0_PDNB |
1885 : MRDCKB1_PDNB |
1886 : MRDCKC0_PDNB |
1887 : MRDCKC1_PDNB |
1888 : MRDCKD0_PDNB |
1889 : MRDCKD1_PDNB);
1890 :
1891 0 : dll_cntl |= (MRDCKA0_BYPASS |
1892 : MRDCKA1_BYPASS |
1893 : MRDCKB0_BYPASS |
1894 : MRDCKB1_BYPASS |
1895 : MRDCKC0_BYPASS |
1896 : MRDCKC1_BYPASS |
1897 : MRDCKD0_BYPASS |
1898 : MRDCKD1_BYPASS);
1899 :
1900 0 : spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1901 0 : spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1902 :
1903 0 : table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1904 0 : table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1905 0 : table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1906 0 : table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1907 0 : table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1908 0 : table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1909 :
1910 0 : table->ACPIState.levels[0].mclk.mclk_value = 0;
1911 :
1912 0 : table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1913 0 : table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1914 0 : table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1915 0 : table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1916 :
1917 0 : table->ACPIState.levels[0].sclk.sclk_value = 0;
1918 :
1919 0 : ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1920 :
1921 0 : if (eg_pi->dynamic_ac_timing)
1922 0 : table->ACPIState.levels[0].ACIndex = 1;
1923 :
1924 0 : table->ACPIState.levels[0].dpm2.MaxPS = 0;
1925 0 : table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1926 0 : table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1927 0 : table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1928 :
1929 : reg = MIN_POWER_MASK | MAX_POWER_MASK;
1930 0 : table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1931 :
1932 : reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1933 0 : table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1934 :
1935 0 : return 0;
1936 : }
1937 :
1938 0 : static int ni_init_smc_table(struct radeon_device *rdev)
1939 : {
1940 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1941 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
1942 : int ret;
1943 0 : struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1944 0 : NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1945 :
1946 0 : memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1947 :
1948 0 : ni_populate_smc_voltage_tables(rdev, table);
1949 :
1950 0 : switch (rdev->pm.int_thermal_type) {
1951 : case THERMAL_TYPE_NI:
1952 : case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1953 0 : table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1954 0 : break;
1955 : case THERMAL_TYPE_NONE:
1956 0 : table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1957 0 : break;
1958 : default:
1959 0 : table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1960 0 : break;
1961 : }
1962 :
1963 0 : if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1964 0 : table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1965 :
1966 0 : if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1967 0 : table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1968 :
1969 0 : if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1970 0 : table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1971 :
1972 0 : if (pi->mem_gddr5)
1973 0 : table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1974 :
1975 0 : ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1976 0 : if (ret)
1977 0 : return ret;
1978 :
1979 0 : ret = ni_populate_smc_acpi_state(rdev, table);
1980 0 : if (ret)
1981 0 : return ret;
1982 :
1983 0 : table->driverState = table->initialState;
1984 :
1985 0 : table->ULVState = table->initialState;
1986 :
1987 0 : ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
1988 : NISLANDS_INITIAL_STATE_ARB_INDEX);
1989 0 : if (ret)
1990 0 : return ret;
1991 :
1992 0 : return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
1993 0 : sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
1994 0 : }
1995 :
1996 0 : static int ni_calculate_sclk_params(struct radeon_device *rdev,
1997 : u32 engine_clock,
1998 : NISLANDS_SMC_SCLK_VALUE *sclk)
1999 : {
2000 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2001 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2002 0 : struct atom_clock_dividers dividers;
2003 0 : u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
2004 0 : u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
2005 0 : u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2006 0 : u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2007 0 : u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2008 0 : u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2009 : u64 tmp;
2010 0 : u32 reference_clock = rdev->clock.spll.reference_freq;
2011 : u32 reference_divider;
2012 : u32 fbdiv;
2013 : int ret;
2014 :
2015 0 : ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2016 : engine_clock, false, ÷rs);
2017 0 : if (ret)
2018 0 : return ret;
2019 :
2020 0 : reference_divider = 1 + dividers.ref_div;
2021 :
2022 :
2023 0 : tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2024 0 : do_div(tmp, reference_clock);
2025 0 : fbdiv = (u32) tmp;
2026 :
2027 0 : spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2028 0 : spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2029 0 : spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2030 :
2031 0 : spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2032 0 : spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2033 :
2034 0 : spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2035 0 : spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2036 0 : spll_func_cntl_3 |= SPLL_DITHEN;
2037 :
2038 0 : if (pi->sclk_ss) {
2039 0 : struct radeon_atom_ss ss;
2040 0 : u32 vco_freq = engine_clock * dividers.post_div;
2041 :
2042 0 : if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2043 : ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2044 0 : u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2045 0 : u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2046 :
2047 0 : cg_spll_spread_spectrum &= ~CLK_S_MASK;
2048 0 : cg_spll_spread_spectrum |= CLK_S(clk_s);
2049 0 : cg_spll_spread_spectrum |= SSEN;
2050 :
2051 0 : cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2052 0 : cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2053 0 : }
2054 0 : }
2055 :
2056 0 : sclk->sclk_value = engine_clock;
2057 0 : sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2058 0 : sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2059 0 : sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2060 0 : sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2061 0 : sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2062 0 : sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2063 :
2064 0 : return 0;
2065 0 : }
2066 :
2067 0 : static int ni_populate_sclk_value(struct radeon_device *rdev,
2068 : u32 engine_clock,
2069 : NISLANDS_SMC_SCLK_VALUE *sclk)
2070 : {
2071 0 : NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2072 : int ret;
2073 :
2074 0 : ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2075 0 : if (!ret) {
2076 0 : sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2077 0 : sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2078 0 : sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2079 0 : sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2080 0 : sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2081 0 : sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2082 0 : sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2083 0 : }
2084 :
2085 0 : return ret;
2086 0 : }
2087 :
2088 0 : static int ni_init_smc_spll_table(struct radeon_device *rdev)
2089 : {
2090 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2091 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2092 : SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2093 0 : NISLANDS_SMC_SCLK_VALUE sclk_params;
2094 : u32 fb_div;
2095 : u32 p_div;
2096 : u32 clk_s;
2097 : u32 clk_v;
2098 : u32 sclk = 0;
2099 : int i, ret;
2100 : u32 tmp;
2101 :
2102 0 : if (ni_pi->spll_table_start == 0)
2103 0 : return -EINVAL;
2104 :
2105 0 : spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2106 0 : if (spll_table == NULL)
2107 0 : return -ENOMEM;
2108 :
2109 0 : for (i = 0; i < 256; i++) {
2110 0 : ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2111 0 : if (ret)
2112 : break;
2113 :
2114 0 : p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2115 0 : fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2116 0 : clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2117 0 : clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2118 :
2119 0 : fb_div &= ~0x00001FFF;
2120 0 : fb_div >>= 1;
2121 0 : clk_v >>= 6;
2122 :
2123 0 : if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2124 0 : ret = -EINVAL;
2125 :
2126 0 : if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2127 0 : ret = -EINVAL;
2128 :
2129 0 : if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2130 0 : ret = -EINVAL;
2131 :
2132 0 : if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2133 0 : ret = -EINVAL;
2134 :
2135 0 : if (ret)
2136 : break;
2137 :
2138 0 : tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2139 0 : ((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2140 0 : spll_table->freq[i] = cpu_to_be32(tmp);
2141 :
2142 0 : tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2143 0 : ((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2144 0 : spll_table->ss[i] = cpu_to_be32(tmp);
2145 :
2146 0 : sclk += 512;
2147 : }
2148 :
2149 0 : if (!ret)
2150 0 : ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2151 0 : sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2152 :
2153 0 : kfree(spll_table);
2154 :
2155 0 : return ret;
2156 0 : }
2157 :
2158 0 : static int ni_populate_mclk_value(struct radeon_device *rdev,
2159 : u32 engine_clock,
2160 : u32 memory_clock,
2161 : NISLANDS_SMC_MCLK_VALUE *mclk,
2162 : bool strobe_mode,
2163 : bool dll_state_on)
2164 : {
2165 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2166 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2167 0 : u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2168 0 : u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2169 0 : u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2170 0 : u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2171 0 : u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2172 0 : u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2173 0 : u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2174 0 : u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2175 0 : struct atom_clock_dividers dividers;
2176 : u32 ibias;
2177 : u32 dll_speed;
2178 : int ret;
2179 : u32 mc_seq_misc7;
2180 :
2181 0 : ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2182 : memory_clock, strobe_mode, ÷rs);
2183 0 : if (ret)
2184 0 : return ret;
2185 :
2186 0 : if (!strobe_mode) {
2187 0 : mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2188 :
2189 0 : if (mc_seq_misc7 & 0x8000000)
2190 0 : dividers.post_div = 1;
2191 : }
2192 :
2193 0 : ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2194 :
2195 0 : mpll_ad_func_cntl &= ~(CLKR_MASK |
2196 : YCLK_POST_DIV_MASK |
2197 : CLKF_MASK |
2198 : CLKFRAC_MASK |
2199 : IBIAS_MASK);
2200 0 : mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2201 0 : mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2202 0 : mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2203 0 : mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2204 0 : mpll_ad_func_cntl |= IBIAS(ibias);
2205 :
2206 0 : if (dividers.vco_mode)
2207 0 : mpll_ad_func_cntl_2 |= VCO_MODE;
2208 : else
2209 0 : mpll_ad_func_cntl_2 &= ~VCO_MODE;
2210 :
2211 0 : if (pi->mem_gddr5) {
2212 0 : mpll_dq_func_cntl &= ~(CLKR_MASK |
2213 : YCLK_POST_DIV_MASK |
2214 : CLKF_MASK |
2215 : CLKFRAC_MASK |
2216 : IBIAS_MASK);
2217 0 : mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2218 0 : mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2219 0 : mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2220 0 : mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2221 0 : mpll_dq_func_cntl |= IBIAS(ibias);
2222 :
2223 0 : if (strobe_mode)
2224 0 : mpll_dq_func_cntl &= ~PDNB;
2225 : else
2226 0 : mpll_dq_func_cntl |= PDNB;
2227 :
2228 0 : if (dividers.vco_mode)
2229 0 : mpll_dq_func_cntl_2 |= VCO_MODE;
2230 : else
2231 0 : mpll_dq_func_cntl_2 &= ~VCO_MODE;
2232 : }
2233 :
2234 0 : if (pi->mclk_ss) {
2235 0 : struct radeon_atom_ss ss;
2236 0 : u32 vco_freq = memory_clock * dividers.post_div;
2237 :
2238 0 : if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2239 : ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2240 0 : u32 reference_clock = rdev->clock.mpll.reference_freq;
2241 0 : u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2242 0 : u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2243 0 : u32 clk_v = ss.percentage *
2244 0 : (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2245 :
2246 0 : mpll_ss1 &= ~CLKV_MASK;
2247 0 : mpll_ss1 |= CLKV(clk_v);
2248 :
2249 0 : mpll_ss2 &= ~CLKS_MASK;
2250 0 : mpll_ss2 |= CLKS(clk_s);
2251 0 : }
2252 0 : }
2253 :
2254 0 : dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2255 : memory_clock);
2256 :
2257 0 : mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2258 0 : mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2259 0 : if (dll_state_on)
2260 0 : mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2261 : MRDCKA1_PDNB |
2262 : MRDCKB0_PDNB |
2263 : MRDCKB1_PDNB |
2264 : MRDCKC0_PDNB |
2265 : MRDCKC1_PDNB |
2266 : MRDCKD0_PDNB |
2267 : MRDCKD1_PDNB);
2268 : else
2269 0 : mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2270 : MRDCKA1_PDNB |
2271 : MRDCKB0_PDNB |
2272 : MRDCKB1_PDNB |
2273 : MRDCKC0_PDNB |
2274 : MRDCKC1_PDNB |
2275 : MRDCKD0_PDNB |
2276 : MRDCKD1_PDNB);
2277 :
2278 :
2279 0 : mclk->mclk_value = cpu_to_be32(memory_clock);
2280 0 : mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2281 0 : mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2282 0 : mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2283 0 : mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2284 0 : mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2285 0 : mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2286 0 : mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2287 0 : mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2288 :
2289 0 : return 0;
2290 0 : }
2291 :
2292 0 : static void ni_populate_smc_sp(struct radeon_device *rdev,
2293 : struct radeon_ps *radeon_state,
2294 : NISLANDS_SMC_SWSTATE *smc_state)
2295 : {
2296 0 : struct ni_ps *ps = ni_get_ps(radeon_state);
2297 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2298 : int i;
2299 :
2300 0 : for (i = 0; i < ps->performance_level_count - 1; i++)
2301 0 : smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2302 :
2303 0 : smc_state->levels[ps->performance_level_count - 1].bSP =
2304 0 : cpu_to_be32(pi->psp);
2305 0 : }
2306 :
2307 0 : static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2308 : struct rv7xx_pl *pl,
2309 : NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2310 : {
2311 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2312 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2313 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2314 : int ret;
2315 : bool dll_state_on;
2316 0 : u16 std_vddc;
2317 0 : u32 tmp = RREG32(DC_STUTTER_CNTL);
2318 :
2319 0 : level->gen2PCIE = pi->pcie_gen2 ?
2320 0 : ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2321 :
2322 0 : ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2323 0 : if (ret)
2324 0 : return ret;
2325 :
2326 0 : level->mcFlags = 0;
2327 0 : if (pi->mclk_stutter_mode_threshold &&
2328 0 : (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2329 0 : !eg_pi->uvd_enabled &&
2330 0 : (tmp & DC_STUTTER_ENABLE_A) &&
2331 0 : (tmp & DC_STUTTER_ENABLE_B))
2332 0 : level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2333 :
2334 0 : if (pi->mem_gddr5) {
2335 0 : if (pl->mclk > pi->mclk_edc_enable_threshold)
2336 0 : level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2337 0 : if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2338 0 : level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2339 :
2340 0 : level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2341 :
2342 0 : if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2343 0 : if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2344 0 : ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2345 0 : dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2346 : else
2347 0 : dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2348 : } else {
2349 : dll_state_on = false;
2350 0 : if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2351 0 : level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2352 : }
2353 :
2354 0 : ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2355 0 : &level->mclk,
2356 0 : (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2357 0 : dll_state_on);
2358 0 : } else
2359 0 : ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2360 :
2361 0 : if (ret)
2362 0 : return ret;
2363 :
2364 0 : ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2365 0 : pl->vddc, &level->vddc);
2366 0 : if (ret)
2367 0 : return ret;
2368 :
2369 0 : ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2370 0 : if (ret)
2371 0 : return ret;
2372 :
2373 0 : ni_populate_std_voltage_value(rdev, std_vddc,
2374 0 : level->vddc.index, &level->std_vddc);
2375 :
2376 0 : if (eg_pi->vddci_control) {
2377 0 : ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2378 0 : pl->vddci, &level->vddci);
2379 0 : if (ret)
2380 0 : return ret;
2381 : }
2382 :
2383 0 : ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2384 :
2385 0 : return ret;
2386 0 : }
2387 :
2388 0 : static int ni_populate_smc_t(struct radeon_device *rdev,
2389 : struct radeon_ps *radeon_state,
2390 : NISLANDS_SMC_SWSTATE *smc_state)
2391 : {
2392 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2393 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2394 0 : struct ni_ps *state = ni_get_ps(radeon_state);
2395 : u32 a_t;
2396 0 : u32 t_l, t_h;
2397 : u32 high_bsp;
2398 : int i, ret;
2399 :
2400 0 : if (state->performance_level_count >= 9)
2401 0 : return -EINVAL;
2402 :
2403 0 : if (state->performance_level_count < 2) {
2404 : a_t = CG_R(0xffff) | CG_L(0);
2405 0 : smc_state->levels[0].aT = cpu_to_be32(a_t);
2406 0 : return 0;
2407 : }
2408 :
2409 0 : smc_state->levels[0].aT = cpu_to_be32(0);
2410 :
2411 0 : for (i = 0; i <= state->performance_level_count - 2; i++) {
2412 0 : if (eg_pi->uvd_enabled)
2413 0 : ret = r600_calculate_at(
2414 0 : 1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2415 : 100 * R600_AH_DFLT,
2416 0 : state->performance_levels[i + 1].sclk,
2417 0 : state->performance_levels[i].sclk,
2418 : &t_l,
2419 : &t_h);
2420 : else
2421 0 : ret = r600_calculate_at(
2422 0 : 1000 * (i + 1),
2423 : 100 * R600_AH_DFLT,
2424 0 : state->performance_levels[i + 1].sclk,
2425 0 : state->performance_levels[i].sclk,
2426 : &t_l,
2427 : &t_h);
2428 :
2429 0 : if (ret) {
2430 0 : t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2431 0 : t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2432 0 : }
2433 :
2434 0 : a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2435 0 : a_t |= CG_R(t_l * pi->bsp / 20000);
2436 0 : smc_state->levels[i].aT = cpu_to_be32(a_t);
2437 :
2438 0 : high_bsp = (i == state->performance_level_count - 2) ?
2439 0 : pi->pbsp : pi->bsp;
2440 :
2441 0 : a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2442 0 : smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2443 : }
2444 :
2445 0 : return 0;
2446 0 : }
2447 :
2448 0 : static int ni_populate_power_containment_values(struct radeon_device *rdev,
2449 : struct radeon_ps *radeon_state,
2450 : NISLANDS_SMC_SWSTATE *smc_state)
2451 : {
2452 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2453 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2454 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2455 0 : struct ni_ps *state = ni_get_ps(radeon_state);
2456 : u32 prev_sclk;
2457 : u32 max_sclk;
2458 : u32 min_sclk;
2459 : int i, ret;
2460 0 : u32 tdp_limit;
2461 0 : u32 near_tdp_limit;
2462 : u32 power_boost_limit;
2463 : u8 max_ps_percent;
2464 :
2465 0 : if (ni_pi->enable_power_containment == false)
2466 0 : return 0;
2467 :
2468 0 : if (state->performance_level_count == 0)
2469 0 : return -EINVAL;
2470 :
2471 0 : if (smc_state->levelCount != state->performance_level_count)
2472 0 : return -EINVAL;
2473 :
2474 0 : ret = ni_calculate_adjusted_tdp_limits(rdev,
2475 : false, /* ??? */
2476 0 : rdev->pm.dpm.tdp_adjustment,
2477 : &tdp_limit,
2478 : &near_tdp_limit);
2479 0 : if (ret)
2480 0 : return ret;
2481 :
2482 0 : power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2483 :
2484 0 : ret = rv770_write_smc_sram_dword(rdev,
2485 0 : pi->state_table_start +
2486 0 : offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2487 : offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2488 0 : ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2489 0 : pi->sram_end);
2490 0 : if (ret)
2491 0 : power_boost_limit = 0;
2492 :
2493 0 : smc_state->levels[0].dpm2.MaxPS = 0;
2494 0 : smc_state->levels[0].dpm2.NearTDPDec = 0;
2495 0 : smc_state->levels[0].dpm2.AboveSafeInc = 0;
2496 0 : smc_state->levels[0].dpm2.BelowSafeInc = 0;
2497 0 : smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2498 :
2499 0 : for (i = 1; i < state->performance_level_count; i++) {
2500 0 : prev_sclk = state->performance_levels[i-1].sclk;
2501 0 : max_sclk = state->performance_levels[i].sclk;
2502 0 : max_ps_percent = (i != (state->performance_level_count - 1)) ?
2503 : NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2504 :
2505 0 : if (max_sclk < prev_sclk)
2506 0 : return -EINVAL;
2507 :
2508 0 : if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2509 0 : min_sclk = max_sclk;
2510 0 : else if (1 == i)
2511 0 : min_sclk = prev_sclk;
2512 : else
2513 0 : min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2514 :
2515 0 : if (min_sclk < state->performance_levels[0].sclk)
2516 0 : min_sclk = state->performance_levels[0].sclk;
2517 :
2518 0 : if (min_sclk == 0)
2519 0 : return -EINVAL;
2520 :
2521 0 : smc_state->levels[i].dpm2.MaxPS =
2522 0 : (u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2523 0 : smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2524 0 : smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2525 0 : smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2526 0 : smc_state->levels[i].stateFlags |=
2527 0 : ((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2528 : PPSMC_STATEFLAG_POWERBOOST : 0;
2529 : }
2530 :
2531 0 : return 0;
2532 0 : }
2533 :
2534 0 : static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2535 : struct radeon_ps *radeon_state,
2536 : NISLANDS_SMC_SWSTATE *smc_state)
2537 : {
2538 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2539 0 : struct ni_ps *state = ni_get_ps(radeon_state);
2540 : u32 sq_power_throttle;
2541 : u32 sq_power_throttle2;
2542 0 : bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2543 : int i;
2544 :
2545 0 : if (state->performance_level_count == 0)
2546 0 : return -EINVAL;
2547 :
2548 0 : if (smc_state->levelCount != state->performance_level_count)
2549 0 : return -EINVAL;
2550 :
2551 0 : if (rdev->pm.dpm.sq_ramping_threshold == 0)
2552 0 : return -EINVAL;
2553 :
2554 : if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2555 : enable_sq_ramping = false;
2556 :
2557 : if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2558 : enable_sq_ramping = false;
2559 :
2560 : if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2561 : enable_sq_ramping = false;
2562 :
2563 : if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2564 : enable_sq_ramping = false;
2565 :
2566 : if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO > (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2567 : enable_sq_ramping = false;
2568 :
2569 0 : for (i = 0; i < state->performance_level_count; i++) {
2570 : sq_power_throttle = 0;
2571 : sq_power_throttle2 = 0;
2572 :
2573 0 : if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2574 : enable_sq_ramping) {
2575 : sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2576 : sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2577 : sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2578 : sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2579 : sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2580 0 : } else {
2581 : sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2582 : sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2583 : }
2584 :
2585 0 : smc_state->levels[i].SQPowerThrottle = cpu_to_be32(sq_power_throttle);
2586 0 : smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2587 : }
2588 :
2589 0 : return 0;
2590 0 : }
2591 :
2592 0 : static int ni_enable_power_containment(struct radeon_device *rdev,
2593 : struct radeon_ps *radeon_new_state,
2594 : bool enable)
2595 : {
2596 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2597 : PPSMC_Result smc_result;
2598 : int ret = 0;
2599 :
2600 0 : if (ni_pi->enable_power_containment) {
2601 0 : if (enable) {
2602 0 : if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2603 0 : smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2604 0 : if (smc_result != PPSMC_Result_OK) {
2605 : ret = -EINVAL;
2606 0 : ni_pi->pc_enabled = false;
2607 0 : } else {
2608 0 : ni_pi->pc_enabled = true;
2609 : }
2610 : }
2611 : } else {
2612 0 : smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2613 0 : if (smc_result != PPSMC_Result_OK)
2614 0 : ret = -EINVAL;
2615 0 : ni_pi->pc_enabled = false;
2616 : }
2617 : }
2618 :
2619 0 : return ret;
2620 : }
2621 :
2622 0 : static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2623 : struct radeon_ps *radeon_state,
2624 : NISLANDS_SMC_SWSTATE *smc_state)
2625 : {
2626 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2627 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2628 0 : struct ni_ps *state = ni_get_ps(radeon_state);
2629 : int i, ret;
2630 0 : u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2631 :
2632 0 : if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2633 0 : smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2634 :
2635 0 : smc_state->levelCount = 0;
2636 :
2637 0 : if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2638 0 : return -EINVAL;
2639 :
2640 0 : for (i = 0; i < state->performance_level_count; i++) {
2641 0 : ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2642 0 : &smc_state->levels[i]);
2643 0 : smc_state->levels[i].arbRefreshState =
2644 0 : (u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2645 :
2646 0 : if (ret)
2647 0 : return ret;
2648 :
2649 0 : if (ni_pi->enable_power_containment)
2650 0 : smc_state->levels[i].displayWatermark =
2651 0 : (state->performance_levels[i].sclk < threshold) ?
2652 : PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2653 : else
2654 0 : smc_state->levels[i].displayWatermark = (i < 2) ?
2655 : PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2656 :
2657 0 : if (eg_pi->dynamic_ac_timing)
2658 0 : smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2659 : else
2660 0 : smc_state->levels[i].ACIndex = 0;
2661 :
2662 0 : smc_state->levelCount++;
2663 : }
2664 :
2665 0 : rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2666 0 : cpu_to_be32(threshold / 512));
2667 :
2668 0 : ni_populate_smc_sp(rdev, radeon_state, smc_state);
2669 :
2670 0 : ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2671 0 : if (ret)
2672 0 : ni_pi->enable_power_containment = false;
2673 :
2674 0 : ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2675 0 : if (ret)
2676 0 : ni_pi->enable_sq_ramping = false;
2677 :
2678 0 : return ni_populate_smc_t(rdev, radeon_state, smc_state);
2679 0 : }
2680 :
2681 0 : static int ni_upload_sw_state(struct radeon_device *rdev,
2682 : struct radeon_ps *radeon_new_state)
2683 : {
2684 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2685 0 : u16 address = pi->state_table_start +
2686 : offsetof(NISLANDS_SMC_STATETABLE, driverState);
2687 : u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2688 : ((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2689 : int ret;
2690 0 : NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2691 :
2692 0 : if (smc_state == NULL)
2693 0 : return -ENOMEM;
2694 :
2695 0 : ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2696 0 : if (ret)
2697 : goto done;
2698 :
2699 0 : ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2700 :
2701 : done:
2702 0 : kfree(smc_state);
2703 :
2704 0 : return ret;
2705 0 : }
2706 :
2707 0 : static int ni_set_mc_special_registers(struct radeon_device *rdev,
2708 : struct ni_mc_reg_table *table)
2709 : {
2710 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2711 : u8 i, j, k;
2712 : u32 temp_reg;
2713 :
2714 0 : for (i = 0, j = table->last; i < table->last; i++) {
2715 0 : switch (table->mc_reg_address[i].s1) {
2716 : case MC_SEQ_MISC1 >> 2:
2717 0 : if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2718 0 : return -EINVAL;
2719 0 : temp_reg = RREG32(MC_PMG_CMD_EMRS);
2720 0 : table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2721 0 : table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2722 0 : for (k = 0; k < table->num_entries; k++)
2723 0 : table->mc_reg_table_entry[k].mc_data[j] =
2724 0 : ((temp_reg & 0xffff0000)) |
2725 0 : ((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2726 0 : j++;
2727 0 : if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2728 0 : return -EINVAL;
2729 :
2730 0 : temp_reg = RREG32(MC_PMG_CMD_MRS);
2731 0 : table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2732 0 : table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2733 0 : for(k = 0; k < table->num_entries; k++) {
2734 0 : table->mc_reg_table_entry[k].mc_data[j] =
2735 0 : (temp_reg & 0xffff0000) |
2736 0 : (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2737 0 : if (!pi->mem_gddr5)
2738 0 : table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2739 : }
2740 0 : j++;
2741 0 : if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2742 0 : return -EINVAL;
2743 : break;
2744 : case MC_SEQ_RESERVE_M >> 2:
2745 0 : temp_reg = RREG32(MC_PMG_CMD_MRS1);
2746 0 : table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2747 0 : table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2748 0 : for (k = 0; k < table->num_entries; k++)
2749 0 : table->mc_reg_table_entry[k].mc_data[j] =
2750 0 : (temp_reg & 0xffff0000) |
2751 0 : (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2752 0 : j++;
2753 0 : if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2754 0 : return -EINVAL;
2755 : break;
2756 : default:
2757 : break;
2758 : }
2759 : }
2760 :
2761 0 : table->last = j;
2762 :
2763 0 : return 0;
2764 0 : }
2765 :
2766 0 : static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2767 : {
2768 : bool result = true;
2769 :
2770 0 : switch (in_reg) {
2771 : case MC_SEQ_RAS_TIMING >> 2:
2772 0 : *out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2773 0 : break;
2774 : case MC_SEQ_CAS_TIMING >> 2:
2775 0 : *out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2776 0 : break;
2777 : case MC_SEQ_MISC_TIMING >> 2:
2778 0 : *out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2779 0 : break;
2780 : case MC_SEQ_MISC_TIMING2 >> 2:
2781 0 : *out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2782 0 : break;
2783 : case MC_SEQ_RD_CTL_D0 >> 2:
2784 0 : *out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2785 0 : break;
2786 : case MC_SEQ_RD_CTL_D1 >> 2:
2787 0 : *out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2788 0 : break;
2789 : case MC_SEQ_WR_CTL_D0 >> 2:
2790 0 : *out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2791 0 : break;
2792 : case MC_SEQ_WR_CTL_D1 >> 2:
2793 0 : *out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2794 0 : break;
2795 : case MC_PMG_CMD_EMRS >> 2:
2796 0 : *out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2797 0 : break;
2798 : case MC_PMG_CMD_MRS >> 2:
2799 0 : *out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2800 0 : break;
2801 : case MC_PMG_CMD_MRS1 >> 2:
2802 0 : *out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2803 0 : break;
2804 : case MC_SEQ_PMG_TIMING >> 2:
2805 0 : *out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2806 0 : break;
2807 : case MC_PMG_CMD_MRS2 >> 2:
2808 0 : *out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2809 0 : break;
2810 : default:
2811 : result = false;
2812 0 : break;
2813 : }
2814 :
2815 0 : return result;
2816 : }
2817 :
2818 0 : static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2819 : {
2820 : u8 i, j;
2821 :
2822 0 : for (i = 0; i < table->last; i++) {
2823 0 : for (j = 1; j < table->num_entries; j++) {
2824 0 : if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2825 0 : table->valid_flag |= 1 << i;
2826 0 : break;
2827 : }
2828 : }
2829 : }
2830 0 : }
2831 :
2832 0 : static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2833 : {
2834 : u32 i;
2835 0 : u16 address;
2836 :
2837 0 : for (i = 0; i < table->last; i++)
2838 0 : table->mc_reg_address[i].s0 =
2839 0 : ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2840 0 : address : table->mc_reg_address[i].s1;
2841 0 : }
2842 :
2843 0 : static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2844 : struct ni_mc_reg_table *ni_table)
2845 : {
2846 : u8 i, j;
2847 :
2848 0 : if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2849 0 : return -EINVAL;
2850 0 : if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2851 0 : return -EINVAL;
2852 :
2853 0 : for (i = 0; i < table->last; i++)
2854 0 : ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2855 0 : ni_table->last = table->last;
2856 :
2857 0 : for (i = 0; i < table->num_entries; i++) {
2858 0 : ni_table->mc_reg_table_entry[i].mclk_max =
2859 0 : table->mc_reg_table_entry[i].mclk_max;
2860 0 : for (j = 0; j < table->last; j++)
2861 0 : ni_table->mc_reg_table_entry[i].mc_data[j] =
2862 0 : table->mc_reg_table_entry[i].mc_data[j];
2863 : }
2864 0 : ni_table->num_entries = table->num_entries;
2865 :
2866 0 : return 0;
2867 0 : }
2868 :
2869 0 : static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2870 : {
2871 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2872 : int ret;
2873 : struct atom_mc_reg_table *table;
2874 0 : struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2875 0 : u8 module_index = rv770_get_memory_module_index(rdev);
2876 :
2877 0 : table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2878 0 : if (!table)
2879 0 : return -ENOMEM;
2880 :
2881 0 : WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2882 0 : WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2883 0 : WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2884 0 : WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2885 0 : WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2886 0 : WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2887 0 : WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2888 0 : WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2889 0 : WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2890 0 : WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2891 0 : WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2892 0 : WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2893 0 : WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2894 :
2895 0 : ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2896 :
2897 0 : if (ret)
2898 : goto init_mc_done;
2899 :
2900 0 : ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2901 :
2902 0 : if (ret)
2903 : goto init_mc_done;
2904 :
2905 0 : ni_set_s0_mc_reg_index(ni_table);
2906 :
2907 0 : ret = ni_set_mc_special_registers(rdev, ni_table);
2908 :
2909 0 : if (ret)
2910 : goto init_mc_done;
2911 :
2912 0 : ni_set_valid_flag(ni_table);
2913 :
2914 : init_mc_done:
2915 0 : kfree(table);
2916 :
2917 0 : return ret;
2918 0 : }
2919 :
2920 0 : static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2921 : SMC_NIslands_MCRegisters *mc_reg_table)
2922 : {
2923 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2924 : u32 i, j;
2925 :
2926 0 : for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2927 0 : if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2928 0 : if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2929 : break;
2930 0 : mc_reg_table->address[i].s0 =
2931 0 : cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2932 0 : mc_reg_table->address[i].s1 =
2933 0 : cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2934 0 : i++;
2935 0 : }
2936 : }
2937 0 : mc_reg_table->last = (u8)i;
2938 0 : }
2939 :
2940 :
2941 0 : static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2942 : SMC_NIslands_MCRegisterSet *data,
2943 : u32 num_entries, u32 valid_flag)
2944 : {
2945 : u32 i, j;
2946 :
2947 0 : for (i = 0, j = 0; j < num_entries; j++) {
2948 0 : if (valid_flag & (1 << j)) {
2949 0 : data->value[i] = cpu_to_be32(entry->mc_data[j]);
2950 0 : i++;
2951 0 : }
2952 : }
2953 0 : }
2954 :
2955 0 : static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2956 : struct rv7xx_pl *pl,
2957 : SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2958 : {
2959 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2960 : u32 i = 0;
2961 :
2962 0 : for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2963 0 : if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2964 : break;
2965 : }
2966 :
2967 0 : if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2968 0 : --i;
2969 :
2970 0 : ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2971 : mc_reg_table_data,
2972 0 : ni_pi->mc_reg_table.last,
2973 0 : ni_pi->mc_reg_table.valid_flag);
2974 0 : }
2975 :
2976 0 : static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
2977 : struct radeon_ps *radeon_state,
2978 : SMC_NIslands_MCRegisters *mc_reg_table)
2979 : {
2980 0 : struct ni_ps *state = ni_get_ps(radeon_state);
2981 : int i;
2982 :
2983 0 : for (i = 0; i < state->performance_level_count; i++) {
2984 0 : ni_convert_mc_reg_table_entry_to_smc(rdev,
2985 0 : &state->performance_levels[i],
2986 0 : &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
2987 : }
2988 0 : }
2989 :
2990 0 : static int ni_populate_mc_reg_table(struct radeon_device *rdev,
2991 : struct radeon_ps *radeon_boot_state)
2992 : {
2993 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2994 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2995 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
2996 0 : struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
2997 0 : SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
2998 :
2999 0 : memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3000 :
3001 0 : rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
3002 :
3003 0 : ni_populate_mc_reg_addresses(rdev, mc_reg_table);
3004 :
3005 0 : ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3006 0 : &mc_reg_table->data[0]);
3007 :
3008 0 : ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3009 0 : &mc_reg_table->data[1],
3010 0 : ni_pi->mc_reg_table.last,
3011 0 : ni_pi->mc_reg_table.valid_flag);
3012 :
3013 0 : ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3014 :
3015 0 : return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3016 : (u8 *)mc_reg_table,
3017 : sizeof(SMC_NIslands_MCRegisters),
3018 0 : pi->sram_end);
3019 : }
3020 :
3021 0 : static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3022 : struct radeon_ps *radeon_new_state)
3023 : {
3024 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3025 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3026 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
3027 0 : struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3028 0 : SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3029 : u16 address;
3030 :
3031 0 : memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3032 :
3033 0 : ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3034 :
3035 0 : address = eg_pi->mc_reg_table_start +
3036 : (u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3037 :
3038 0 : return rv770_copy_bytes_to_smc(rdev, address,
3039 0 : (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3040 0 : sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3041 0 : pi->sram_end);
3042 : }
3043 :
3044 0 : static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3045 : PP_NIslands_CACTABLES *cac_tables)
3046 : {
3047 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
3048 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3049 0 : u32 leakage = 0;
3050 : unsigned int i, j, table_size;
3051 : s32 t;
3052 : u32 smc_leakage, max_leakage = 0;
3053 : u32 scaling_factor;
3054 :
3055 0 : table_size = eg_pi->vddc_voltage_table.count;
3056 :
3057 0 : if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3058 : table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3059 :
3060 0 : scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3061 :
3062 0 : for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3063 0 : for (j = 0; j < table_size; j++) {
3064 0 : t = (1000 * ((i + 1) * 8));
3065 :
3066 0 : if (t < ni_pi->cac_data.leakage_minimum_temperature)
3067 0 : t = ni_pi->cac_data.leakage_minimum_temperature;
3068 :
3069 0 : ni_calculate_leakage_for_v_and_t(rdev,
3070 0 : &ni_pi->cac_data.leakage_coefficients,
3071 0 : eg_pi->vddc_voltage_table.entries[j].value,
3072 : t,
3073 0 : ni_pi->cac_data.i_leakage,
3074 : &leakage);
3075 :
3076 0 : smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3077 0 : if (smc_leakage > max_leakage)
3078 0 : max_leakage = smc_leakage;
3079 :
3080 0 : cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3081 : }
3082 : }
3083 :
3084 0 : for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3085 0 : for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3086 0 : cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3087 : }
3088 0 : return 0;
3089 0 : }
3090 :
3091 0 : static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3092 : PP_NIslands_CACTABLES *cac_tables)
3093 : {
3094 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3095 : struct radeon_cac_leakage_table *leakage_table =
3096 0 : &rdev->pm.dpm.dyn_state.cac_leakage_table;
3097 : u32 i, j, table_size;
3098 : u32 smc_leakage, max_leakage = 0;
3099 : u32 scaling_factor;
3100 :
3101 0 : if (!leakage_table)
3102 0 : return -EINVAL;
3103 :
3104 0 : table_size = leakage_table->count;
3105 :
3106 0 : if (eg_pi->vddc_voltage_table.count != table_size)
3107 0 : table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3108 : eg_pi->vddc_voltage_table.count : leakage_table->count;
3109 :
3110 0 : if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3111 0 : table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3112 :
3113 0 : if (table_size == 0)
3114 0 : return -EINVAL;
3115 :
3116 0 : scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3117 :
3118 0 : for (j = 0; j < table_size; j++) {
3119 0 : smc_leakage = leakage_table->entries[j].leakage;
3120 :
3121 0 : if (smc_leakage > max_leakage)
3122 0 : max_leakage = smc_leakage;
3123 :
3124 0 : for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3125 0 : cac_tables->cac_lkge_lut[i][j] =
3126 0 : cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3127 : }
3128 :
3129 0 : for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3130 0 : for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3131 0 : cac_tables->cac_lkge_lut[i][j] =
3132 0 : cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3133 : }
3134 0 : return 0;
3135 0 : }
3136 :
3137 0 : static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3138 : {
3139 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3140 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
3141 : PP_NIslands_CACTABLES *cac_tables = NULL;
3142 : int i, ret;
3143 : u32 reg;
3144 :
3145 0 : if (ni_pi->enable_cac == false)
3146 0 : return 0;
3147 :
3148 0 : cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3149 0 : if (!cac_tables)
3150 0 : return -ENOMEM;
3151 :
3152 0 : reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3153 0 : reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3154 0 : TID_UNIT(ni_pi->cac_weights->tid_unit));
3155 0 : WREG32(CG_CAC_CTRL, reg);
3156 :
3157 0 : for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3158 0 : ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3159 :
3160 0 : for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3161 0 : cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3162 :
3163 0 : ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3164 0 : ni_pi->cac_data.pwr_const = 0;
3165 0 : ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3166 0 : ni_pi->cac_data.bif_cac_value = 0;
3167 0 : ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3168 0 : ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3169 0 : ni_pi->cac_data.allow_ovrflw = 0;
3170 0 : ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3171 0 : ni_pi->cac_data.num_win_tdp = 0;
3172 0 : ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3173 :
3174 0 : if (ni_pi->driver_calculate_cac_leakage)
3175 0 : ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3176 : else
3177 0 : ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3178 :
3179 0 : if (ret)
3180 : goto done_free;
3181 :
3182 0 : cac_tables->pwr_const = cpu_to_be32(ni_pi->cac_data.pwr_const);
3183 0 : cac_tables->dc_cacValue = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3184 0 : cac_tables->bif_cacValue = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3185 0 : cac_tables->AllowOvrflw = ni_pi->cac_data.allow_ovrflw;
3186 0 : cac_tables->MCWrWeight = ni_pi->cac_data.mc_wr_weight;
3187 0 : cac_tables->MCRdWeight = ni_pi->cac_data.mc_rd_weight;
3188 0 : cac_tables->numWin_TDP = ni_pi->cac_data.num_win_tdp;
3189 0 : cac_tables->l2numWin_TDP = ni_pi->cac_data.l2num_win_tdp;
3190 0 : cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3191 :
3192 0 : ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3193 0 : sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3194 :
3195 : done_free:
3196 0 : if (ret) {
3197 0 : ni_pi->enable_cac = false;
3198 0 : ni_pi->enable_power_containment = false;
3199 0 : }
3200 :
3201 0 : kfree(cac_tables);
3202 :
3203 0 : return 0;
3204 0 : }
3205 :
3206 0 : static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3207 : {
3208 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
3209 : u32 reg;
3210 :
3211 0 : if (!ni_pi->enable_cac ||
3212 0 : !ni_pi->cac_configuration_required)
3213 0 : return 0;
3214 :
3215 0 : if (ni_pi->cac_weights == NULL)
3216 0 : return -EINVAL;
3217 :
3218 0 : reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3219 : WEIGHT_TCP_SIG1_MASK |
3220 : WEIGHT_TA_SIG_MASK);
3221 0 : reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3222 0 : WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3223 0 : WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3224 0 : WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3225 :
3226 0 : reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3227 : WEIGHT_TCC_EN1_MASK |
3228 : WEIGHT_TCC_EN2_MASK);
3229 0 : reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3230 0 : WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3231 0 : WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3232 0 : WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3233 :
3234 0 : reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3235 : WEIGHT_CB_EN1_MASK |
3236 : WEIGHT_CB_EN2_MASK |
3237 : WEIGHT_CB_EN3_MASK);
3238 0 : reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3239 0 : WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3240 0 : WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3241 0 : WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3242 0 : WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3243 :
3244 0 : reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3245 : WEIGHT_DB_SIG1_MASK |
3246 : WEIGHT_DB_SIG2_MASK |
3247 : WEIGHT_DB_SIG3_MASK);
3248 0 : reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3249 0 : WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3250 0 : WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3251 0 : WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3252 0 : WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3253 :
3254 0 : reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3255 : WEIGHT_SXM_SIG1_MASK |
3256 : WEIGHT_SXM_SIG2_MASK |
3257 : WEIGHT_SXS_SIG0_MASK |
3258 : WEIGHT_SXS_SIG1_MASK);
3259 0 : reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3260 0 : WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3261 0 : WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3262 0 : WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3263 0 : WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3264 0 : WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3265 :
3266 0 : reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3267 : WEIGHT_XBR_1_MASK |
3268 : WEIGHT_XBR_2_MASK |
3269 : WEIGHT_SPI_SIG0_MASK);
3270 0 : reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3271 0 : WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3272 0 : WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3273 0 : WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3274 0 : WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3275 :
3276 0 : reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3277 : WEIGHT_SPI_SIG2_MASK |
3278 : WEIGHT_SPI_SIG3_MASK |
3279 : WEIGHT_SPI_SIG4_MASK |
3280 : WEIGHT_SPI_SIG5_MASK);
3281 0 : reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3282 0 : WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3283 0 : WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3284 0 : WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3285 0 : WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3286 0 : WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3287 :
3288 0 : reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3289 : WEIGHT_LDS_SIG1_MASK |
3290 : WEIGHT_SC_MASK);
3291 0 : reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3292 0 : WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3293 0 : WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3294 0 : WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3295 :
3296 0 : reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3297 : WEIGHT_CP_MASK |
3298 : WEIGHT_PA_SIG0_MASK |
3299 : WEIGHT_PA_SIG1_MASK |
3300 : WEIGHT_VGT_SIG0_MASK);
3301 0 : reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3302 0 : WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3303 0 : WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3304 0 : WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3305 0 : WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3306 0 : WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3307 :
3308 0 : reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3309 : WEIGHT_VGT_SIG2_MASK |
3310 : WEIGHT_DC_SIG0_MASK |
3311 : WEIGHT_DC_SIG1_MASK |
3312 : WEIGHT_DC_SIG2_MASK);
3313 0 : reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3314 0 : WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3315 0 : WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3316 0 : WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3317 0 : WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3318 0 : WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3319 :
3320 0 : reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3321 : WEIGHT_UVD_SIG0_MASK |
3322 : WEIGHT_UVD_SIG1_MASK |
3323 : WEIGHT_SPARE0_MASK |
3324 : WEIGHT_SPARE1_MASK);
3325 0 : reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3326 0 : WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3327 0 : WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3328 0 : WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3329 0 : WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3330 0 : WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3331 :
3332 0 : reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3333 : WEIGHT_SQ_VSP0_MASK);
3334 0 : reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3335 0 : WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3336 0 : WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3337 :
3338 0 : reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3339 0 : reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3340 0 : WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3341 :
3342 0 : reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3343 : OVR_VAL_SPARE_0_MASK |
3344 : OVR_MODE_SPARE_1_MASK |
3345 : OVR_VAL_SPARE_1_MASK);
3346 0 : reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3347 0 : OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3348 0 : OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3349 0 : OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3350 0 : WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3351 :
3352 0 : reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3353 : VSP0_MASK |
3354 : GPR_MASK);
3355 0 : reg |= (VSP(ni_pi->cac_weights->vsp) |
3356 0 : VSP0(ni_pi->cac_weights->vsp0) |
3357 0 : GPR(ni_pi->cac_weights->gpr));
3358 0 : WREG32(SQ_CAC_THRESHOLD, reg);
3359 :
3360 : reg = (MCDW_WR_ENABLE |
3361 : MCDX_WR_ENABLE |
3362 : MCDY_WR_ENABLE |
3363 : MCDZ_WR_ENABLE |
3364 : INDEX(0x09D4));
3365 0 : WREG32(MC_CG_CONFIG, reg);
3366 :
3367 0 : reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3368 0 : WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3369 : ALLOW_OVERFLOW);
3370 0 : WREG32(MC_CG_DATAPORT, reg);
3371 :
3372 0 : return 0;
3373 0 : }
3374 :
3375 0 : static int ni_enable_smc_cac(struct radeon_device *rdev,
3376 : struct radeon_ps *radeon_new_state,
3377 : bool enable)
3378 : {
3379 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
3380 : int ret = 0;
3381 : PPSMC_Result smc_result;
3382 :
3383 0 : if (ni_pi->enable_cac) {
3384 0 : if (enable) {
3385 0 : if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3386 0 : smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3387 :
3388 0 : if (ni_pi->support_cac_long_term_average) {
3389 0 : smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3390 0 : if (PPSMC_Result_OK != smc_result)
3391 0 : ni_pi->support_cac_long_term_average = false;
3392 : }
3393 :
3394 0 : smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3395 0 : if (PPSMC_Result_OK != smc_result)
3396 0 : ret = -EINVAL;
3397 :
3398 0 : ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3399 0 : }
3400 0 : } else if (ni_pi->cac_enabled) {
3401 0 : smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3402 :
3403 0 : ni_pi->cac_enabled = false;
3404 :
3405 0 : if (ni_pi->support_cac_long_term_average) {
3406 0 : smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3407 0 : if (PPSMC_Result_OK != smc_result)
3408 0 : ni_pi->support_cac_long_term_average = false;
3409 : }
3410 : }
3411 : }
3412 :
3413 0 : return ret;
3414 : }
3415 :
3416 0 : static int ni_pcie_performance_request(struct radeon_device *rdev,
3417 : u8 perf_req, bool advertise)
3418 : {
3419 : #if defined(CONFIG_ACPI)
3420 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3421 :
3422 : if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3423 : (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3424 : if (eg_pi->pcie_performance_request_registered == false)
3425 : radeon_acpi_pcie_notify_device_ready(rdev);
3426 : eg_pi->pcie_performance_request_registered = true;
3427 : return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3428 : } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3429 : eg_pi->pcie_performance_request_registered) {
3430 : eg_pi->pcie_performance_request_registered = false;
3431 : return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3432 : }
3433 : #endif
3434 0 : return 0;
3435 : }
3436 :
3437 0 : static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3438 : {
3439 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3440 : u32 tmp;
3441 :
3442 0 : tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3443 :
3444 0 : if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3445 0 : (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3446 0 : pi->pcie_gen2 = true;
3447 : else
3448 0 : pi->pcie_gen2 = false;
3449 :
3450 0 : if (!pi->pcie_gen2)
3451 0 : ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3452 :
3453 0 : return 0;
3454 : }
3455 :
3456 0 : static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3457 : bool enable)
3458 : {
3459 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3460 : u32 tmp, bif;
3461 :
3462 0 : tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3463 :
3464 0 : if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3465 0 : (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3466 0 : if (enable) {
3467 0 : if (!pi->boot_in_gen2) {
3468 0 : bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3469 0 : bif |= CG_CLIENT_REQ(0xd);
3470 0 : WREG32(CG_BIF_REQ_AND_RSP, bif);
3471 0 : }
3472 0 : tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3473 0 : tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3474 0 : tmp |= LC_GEN2_EN_STRAP;
3475 :
3476 0 : tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3477 0 : WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3478 0 : udelay(10);
3479 0 : tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3480 0 : WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3481 0 : } else {
3482 0 : if (!pi->boot_in_gen2) {
3483 0 : bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3484 0 : bif |= CG_CLIENT_REQ(0xd);
3485 0 : WREG32(CG_BIF_REQ_AND_RSP, bif);
3486 :
3487 0 : tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3488 0 : tmp &= ~LC_GEN2_EN_STRAP;
3489 0 : }
3490 0 : WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3491 : }
3492 : }
3493 0 : }
3494 :
3495 0 : static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3496 : bool enable)
3497 : {
3498 0 : ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3499 :
3500 0 : if (enable)
3501 0 : WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3502 : else
3503 0 : WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3504 0 : }
3505 :
3506 0 : void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3507 : struct radeon_ps *new_ps,
3508 : struct radeon_ps *old_ps)
3509 : {
3510 0 : struct ni_ps *new_state = ni_get_ps(new_ps);
3511 0 : struct ni_ps *current_state = ni_get_ps(old_ps);
3512 :
3513 0 : if ((new_ps->vclk == old_ps->vclk) &&
3514 0 : (new_ps->dclk == old_ps->dclk))
3515 0 : return;
3516 :
3517 0 : if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3518 0 : current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3519 0 : return;
3520 :
3521 0 : radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3522 0 : }
3523 :
3524 0 : void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3525 : struct radeon_ps *new_ps,
3526 : struct radeon_ps *old_ps)
3527 : {
3528 0 : struct ni_ps *new_state = ni_get_ps(new_ps);
3529 0 : struct ni_ps *current_state = ni_get_ps(old_ps);
3530 :
3531 0 : if ((new_ps->vclk == old_ps->vclk) &&
3532 0 : (new_ps->dclk == old_ps->dclk))
3533 0 : return;
3534 :
3535 0 : if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3536 0 : current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3537 0 : return;
3538 :
3539 0 : radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3540 0 : }
3541 :
3542 0 : void ni_dpm_setup_asic(struct radeon_device *rdev)
3543 : {
3544 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3545 : int r;
3546 :
3547 0 : r = ni_mc_load_microcode(rdev);
3548 0 : if (r)
3549 0 : DRM_ERROR("Failed to load MC firmware!\n");
3550 0 : ni_read_clock_registers(rdev);
3551 0 : btc_read_arb_registers(rdev);
3552 0 : rv770_get_memory_type(rdev);
3553 0 : if (eg_pi->pcie_performance_request)
3554 0 : ni_advertise_gen2_capability(rdev);
3555 0 : rv770_get_pcie_gen2_status(rdev);
3556 0 : rv770_enable_acpi_pm(rdev);
3557 0 : }
3558 :
3559 0 : void ni_update_current_ps(struct radeon_device *rdev,
3560 : struct radeon_ps *rps)
3561 : {
3562 0 : struct ni_ps *new_ps = ni_get_ps(rps);
3563 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3564 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
3565 :
3566 0 : eg_pi->current_rps = *rps;
3567 0 : ni_pi->current_ps = *new_ps;
3568 0 : eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3569 0 : }
3570 :
3571 0 : void ni_update_requested_ps(struct radeon_device *rdev,
3572 : struct radeon_ps *rps)
3573 : {
3574 0 : struct ni_ps *new_ps = ni_get_ps(rps);
3575 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3576 0 : struct ni_power_info *ni_pi = ni_get_pi(rdev);
3577 :
3578 0 : eg_pi->requested_rps = *rps;
3579 0 : ni_pi->requested_ps = *new_ps;
3580 0 : eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3581 0 : }
3582 :
3583 0 : int ni_dpm_enable(struct radeon_device *rdev)
3584 : {
3585 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3586 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3587 0 : struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3588 : int ret;
3589 :
3590 0 : if (pi->gfx_clock_gating)
3591 0 : ni_cg_clockgating_default(rdev);
3592 0 : if (btc_dpm_enabled(rdev))
3593 0 : return -EINVAL;
3594 0 : if (pi->mg_clock_gating)
3595 0 : ni_mg_clockgating_default(rdev);
3596 0 : if (eg_pi->ls_clock_gating)
3597 0 : ni_ls_clockgating_default(rdev);
3598 0 : if (pi->voltage_control) {
3599 0 : rv770_enable_voltage_control(rdev, true);
3600 0 : ret = cypress_construct_voltage_tables(rdev);
3601 0 : if (ret) {
3602 0 : DRM_ERROR("cypress_construct_voltage_tables failed\n");
3603 0 : return ret;
3604 : }
3605 : }
3606 0 : if (eg_pi->dynamic_ac_timing) {
3607 0 : ret = ni_initialize_mc_reg_table(rdev);
3608 0 : if (ret)
3609 0 : eg_pi->dynamic_ac_timing = false;
3610 : }
3611 0 : if (pi->dynamic_ss)
3612 0 : cypress_enable_spread_spectrum(rdev, true);
3613 0 : if (pi->thermal_protection)
3614 0 : rv770_enable_thermal_protection(rdev, true);
3615 0 : rv770_setup_bsp(rdev);
3616 0 : rv770_program_git(rdev);
3617 0 : rv770_program_tp(rdev);
3618 0 : rv770_program_tpp(rdev);
3619 0 : rv770_program_sstp(rdev);
3620 0 : cypress_enable_display_gap(rdev);
3621 0 : rv770_program_vc(rdev);
3622 0 : if (pi->dynamic_pcie_gen2)
3623 0 : ni_enable_dynamic_pcie_gen2(rdev, true);
3624 0 : ret = rv770_upload_firmware(rdev);
3625 0 : if (ret) {
3626 0 : DRM_ERROR("rv770_upload_firmware failed\n");
3627 0 : return ret;
3628 : }
3629 0 : ret = ni_process_firmware_header(rdev);
3630 0 : if (ret) {
3631 0 : DRM_ERROR("ni_process_firmware_header failed\n");
3632 0 : return ret;
3633 : }
3634 0 : ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3635 0 : if (ret) {
3636 0 : DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3637 0 : return ret;
3638 : }
3639 0 : ret = ni_init_smc_table(rdev);
3640 0 : if (ret) {
3641 0 : DRM_ERROR("ni_init_smc_table failed\n");
3642 0 : return ret;
3643 : }
3644 0 : ret = ni_init_smc_spll_table(rdev);
3645 0 : if (ret) {
3646 0 : DRM_ERROR("ni_init_smc_spll_table failed\n");
3647 0 : return ret;
3648 : }
3649 0 : ret = ni_init_arb_table_index(rdev);
3650 0 : if (ret) {
3651 0 : DRM_ERROR("ni_init_arb_table_index failed\n");
3652 0 : return ret;
3653 : }
3654 0 : if (eg_pi->dynamic_ac_timing) {
3655 0 : ret = ni_populate_mc_reg_table(rdev, boot_ps);
3656 0 : if (ret) {
3657 0 : DRM_ERROR("ni_populate_mc_reg_table failed\n");
3658 0 : return ret;
3659 : }
3660 : }
3661 0 : ret = ni_initialize_smc_cac_tables(rdev);
3662 0 : if (ret) {
3663 0 : DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3664 0 : return ret;
3665 : }
3666 0 : ret = ni_initialize_hardware_cac_manager(rdev);
3667 0 : if (ret) {
3668 0 : DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3669 0 : return ret;
3670 : }
3671 0 : ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3672 0 : if (ret) {
3673 0 : DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3674 0 : return ret;
3675 : }
3676 0 : ni_program_response_times(rdev);
3677 0 : r7xx_start_smc(rdev);
3678 0 : ret = cypress_notify_smc_display_change(rdev, false);
3679 0 : if (ret) {
3680 0 : DRM_ERROR("cypress_notify_smc_display_change failed\n");
3681 0 : return ret;
3682 : }
3683 0 : cypress_enable_sclk_control(rdev, true);
3684 0 : if (eg_pi->memory_transition)
3685 0 : cypress_enable_mclk_control(rdev, true);
3686 0 : cypress_start_dpm(rdev);
3687 0 : if (pi->gfx_clock_gating)
3688 0 : ni_gfx_clockgating_enable(rdev, true);
3689 0 : if (pi->mg_clock_gating)
3690 0 : ni_mg_clockgating_enable(rdev, true);
3691 0 : if (eg_pi->ls_clock_gating)
3692 0 : ni_ls_clockgating_enable(rdev, true);
3693 :
3694 0 : rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3695 :
3696 0 : ni_update_current_ps(rdev, boot_ps);
3697 :
3698 0 : return 0;
3699 0 : }
3700 :
3701 0 : void ni_dpm_disable(struct radeon_device *rdev)
3702 : {
3703 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3704 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3705 0 : struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3706 :
3707 0 : if (!btc_dpm_enabled(rdev))
3708 0 : return;
3709 0 : rv770_clear_vc(rdev);
3710 0 : if (pi->thermal_protection)
3711 0 : rv770_enable_thermal_protection(rdev, false);
3712 0 : ni_enable_power_containment(rdev, boot_ps, false);
3713 0 : ni_enable_smc_cac(rdev, boot_ps, false);
3714 0 : cypress_enable_spread_spectrum(rdev, false);
3715 0 : rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3716 0 : if (pi->dynamic_pcie_gen2)
3717 0 : ni_enable_dynamic_pcie_gen2(rdev, false);
3718 :
3719 0 : if (rdev->irq.installed &&
3720 0 : r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3721 0 : rdev->irq.dpm_thermal = false;
3722 0 : radeon_irq_set(rdev);
3723 0 : }
3724 :
3725 0 : if (pi->gfx_clock_gating)
3726 0 : ni_gfx_clockgating_enable(rdev, false);
3727 0 : if (pi->mg_clock_gating)
3728 0 : ni_mg_clockgating_enable(rdev, false);
3729 0 : if (eg_pi->ls_clock_gating)
3730 0 : ni_ls_clockgating_enable(rdev, false);
3731 0 : ni_stop_dpm(rdev);
3732 0 : btc_reset_to_default(rdev);
3733 0 : ni_stop_smc(rdev);
3734 0 : ni_force_switch_to_arb_f0(rdev);
3735 :
3736 0 : ni_update_current_ps(rdev, boot_ps);
3737 0 : }
3738 :
3739 0 : static int ni_power_control_set_level(struct radeon_device *rdev)
3740 : {
3741 0 : struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3742 : int ret;
3743 :
3744 0 : ret = ni_restrict_performance_levels_before_switch(rdev);
3745 0 : if (ret)
3746 0 : return ret;
3747 0 : ret = rv770_halt_smc(rdev);
3748 0 : if (ret)
3749 0 : return ret;
3750 0 : ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3751 0 : if (ret)
3752 0 : return ret;
3753 0 : ret = rv770_resume_smc(rdev);
3754 0 : if (ret)
3755 0 : return ret;
3756 0 : ret = rv770_set_sw_state(rdev);
3757 0 : if (ret)
3758 0 : return ret;
3759 :
3760 0 : return 0;
3761 0 : }
3762 :
3763 0 : int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3764 : {
3765 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3766 0 : struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3767 : struct radeon_ps *new_ps = &requested_ps;
3768 :
3769 0 : ni_update_requested_ps(rdev, new_ps);
3770 :
3771 0 : ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3772 :
3773 0 : return 0;
3774 0 : }
3775 :
3776 0 : int ni_dpm_set_power_state(struct radeon_device *rdev)
3777 : {
3778 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3779 0 : struct radeon_ps *new_ps = &eg_pi->requested_rps;
3780 0 : struct radeon_ps *old_ps = &eg_pi->current_rps;
3781 : int ret;
3782 :
3783 0 : ret = ni_restrict_performance_levels_before_switch(rdev);
3784 0 : if (ret) {
3785 0 : DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3786 0 : return ret;
3787 : }
3788 0 : ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3789 0 : ret = ni_enable_power_containment(rdev, new_ps, false);
3790 0 : if (ret) {
3791 0 : DRM_ERROR("ni_enable_power_containment failed\n");
3792 0 : return ret;
3793 : }
3794 0 : ret = ni_enable_smc_cac(rdev, new_ps, false);
3795 0 : if (ret) {
3796 0 : DRM_ERROR("ni_enable_smc_cac failed\n");
3797 0 : return ret;
3798 : }
3799 0 : ret = rv770_halt_smc(rdev);
3800 0 : if (ret) {
3801 0 : DRM_ERROR("rv770_halt_smc failed\n");
3802 0 : return ret;
3803 : }
3804 0 : if (eg_pi->smu_uvd_hs)
3805 0 : btc_notify_uvd_to_smc(rdev, new_ps);
3806 0 : ret = ni_upload_sw_state(rdev, new_ps);
3807 0 : if (ret) {
3808 0 : DRM_ERROR("ni_upload_sw_state failed\n");
3809 0 : return ret;
3810 : }
3811 0 : if (eg_pi->dynamic_ac_timing) {
3812 0 : ret = ni_upload_mc_reg_table(rdev, new_ps);
3813 0 : if (ret) {
3814 0 : DRM_ERROR("ni_upload_mc_reg_table failed\n");
3815 0 : return ret;
3816 : }
3817 : }
3818 0 : ret = ni_program_memory_timing_parameters(rdev, new_ps);
3819 0 : if (ret) {
3820 0 : DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3821 0 : return ret;
3822 : }
3823 0 : ret = rv770_resume_smc(rdev);
3824 0 : if (ret) {
3825 0 : DRM_ERROR("rv770_resume_smc failed\n");
3826 0 : return ret;
3827 : }
3828 0 : ret = rv770_set_sw_state(rdev);
3829 0 : if (ret) {
3830 0 : DRM_ERROR("rv770_set_sw_state failed\n");
3831 0 : return ret;
3832 : }
3833 0 : ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3834 0 : ret = ni_enable_smc_cac(rdev, new_ps, true);
3835 0 : if (ret) {
3836 0 : DRM_ERROR("ni_enable_smc_cac failed\n");
3837 0 : return ret;
3838 : }
3839 0 : ret = ni_enable_power_containment(rdev, new_ps, true);
3840 0 : if (ret) {
3841 0 : DRM_ERROR("ni_enable_power_containment failed\n");
3842 0 : return ret;
3843 : }
3844 :
3845 : /* update tdp */
3846 0 : ret = ni_power_control_set_level(rdev);
3847 0 : if (ret) {
3848 0 : DRM_ERROR("ni_power_control_set_level failed\n");
3849 0 : return ret;
3850 : }
3851 :
3852 0 : return 0;
3853 0 : }
3854 :
3855 0 : void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3856 : {
3857 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3858 0 : struct radeon_ps *new_ps = &eg_pi->requested_rps;
3859 :
3860 0 : ni_update_current_ps(rdev, new_ps);
3861 0 : }
3862 :
3863 : #if 0
3864 : void ni_dpm_reset_asic(struct radeon_device *rdev)
3865 : {
3866 : ni_restrict_performance_levels_before_switch(rdev);
3867 : rv770_set_boot_state(rdev);
3868 : }
3869 : #endif
3870 :
3871 : union power_info {
3872 : struct _ATOM_POWERPLAY_INFO info;
3873 : struct _ATOM_POWERPLAY_INFO_V2 info_2;
3874 : struct _ATOM_POWERPLAY_INFO_V3 info_3;
3875 : struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3876 : struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3877 : struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3878 : };
3879 :
3880 : union pplib_clock_info {
3881 : struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3882 : struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3883 : struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3884 : struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3885 : };
3886 :
3887 : union pplib_power_state {
3888 : struct _ATOM_PPLIB_STATE v1;
3889 : struct _ATOM_PPLIB_STATE_V2 v2;
3890 : };
3891 :
3892 0 : static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3893 : struct radeon_ps *rps,
3894 : struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3895 : u8 table_rev)
3896 : {
3897 0 : rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3898 0 : rps->class = le16_to_cpu(non_clock_info->usClassification);
3899 0 : rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3900 :
3901 0 : if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3902 0 : rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3903 0 : rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3904 0 : } else if (r600_is_uvd_state(rps->class, rps->class2)) {
3905 0 : rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3906 0 : rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3907 0 : } else {
3908 0 : rps->vclk = 0;
3909 0 : rps->dclk = 0;
3910 : }
3911 :
3912 0 : if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3913 0 : rdev->pm.dpm.boot_ps = rps;
3914 0 : if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3915 0 : rdev->pm.dpm.uvd_ps = rps;
3916 0 : }
3917 :
3918 0 : static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3919 : struct radeon_ps *rps, int index,
3920 : union pplib_clock_info *clock_info)
3921 : {
3922 0 : struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3923 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3924 0 : struct ni_ps *ps = ni_get_ps(rps);
3925 0 : struct rv7xx_pl *pl = &ps->performance_levels[index];
3926 :
3927 0 : ps->performance_level_count = index + 1;
3928 :
3929 0 : pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3930 0 : pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3931 0 : pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3932 0 : pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3933 :
3934 0 : pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3935 0 : pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3936 0 : pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3937 :
3938 : /* patch up vddc if necessary */
3939 0 : if (pl->vddc == 0xff01) {
3940 0 : if (pi->max_vddc)
3941 0 : pl->vddc = pi->max_vddc;
3942 : }
3943 :
3944 0 : if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3945 0 : pi->acpi_vddc = pl->vddc;
3946 0 : eg_pi->acpi_vddci = pl->vddci;
3947 0 : if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3948 0 : pi->acpi_pcie_gen2 = true;
3949 : else
3950 0 : pi->acpi_pcie_gen2 = false;
3951 : }
3952 :
3953 0 : if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3954 0 : eg_pi->ulv.supported = true;
3955 0 : eg_pi->ulv.pl = pl;
3956 0 : }
3957 :
3958 0 : if (pi->min_vddc_in_table > pl->vddc)
3959 0 : pi->min_vddc_in_table = pl->vddc;
3960 :
3961 0 : if (pi->max_vddc_in_table < pl->vddc)
3962 0 : pi->max_vddc_in_table = pl->vddc;
3963 :
3964 : /* patch up boot state */
3965 0 : if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3966 0 : u16 vddc, vddci, mvdd;
3967 0 : radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3968 0 : pl->mclk = rdev->clock.default_mclk;
3969 0 : pl->sclk = rdev->clock.default_sclk;
3970 0 : pl->vddc = vddc;
3971 0 : pl->vddci = vddci;
3972 0 : }
3973 :
3974 0 : if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
3975 : ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
3976 0 : rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
3977 0 : rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
3978 0 : rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
3979 0 : rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
3980 0 : }
3981 0 : }
3982 :
3983 0 : static int ni_parse_power_table(struct radeon_device *rdev)
3984 : {
3985 0 : struct radeon_mode_info *mode_info = &rdev->mode_info;
3986 : struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
3987 : union pplib_power_state *power_state;
3988 : int i, j;
3989 : union pplib_clock_info *clock_info;
3990 : union power_info *power_info;
3991 : int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
3992 0 : u16 data_offset;
3993 0 : u8 frev, crev;
3994 : struct ni_ps *ps;
3995 :
3996 0 : if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
3997 : &frev, &crev, &data_offset))
3998 0 : return -EINVAL;
3999 0 : power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
4000 :
4001 0 : rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
4002 0 : power_info->pplib.ucNumStates, GFP_KERNEL);
4003 0 : if (!rdev->pm.dpm.ps)
4004 0 : return -ENOMEM;
4005 :
4006 0 : for (i = 0; i < power_info->pplib.ucNumStates; i++) {
4007 0 : power_state = (union pplib_power_state *)
4008 0 : (mode_info->atom_context->bios + data_offset +
4009 0 : le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4010 0 : i * power_info->pplib.ucStateEntrySize);
4011 0 : non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4012 0 : (mode_info->atom_context->bios + data_offset +
4013 0 : le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4014 0 : (power_state->v1.ucNonClockStateIndex *
4015 0 : power_info->pplib.ucNonClockSize));
4016 0 : if (power_info->pplib.ucStateEntrySize - 1) {
4017 : u8 *idx;
4018 0 : ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4019 0 : if (ps == NULL) {
4020 0 : kfree(rdev->pm.dpm.ps);
4021 0 : return -ENOMEM;
4022 : }
4023 0 : rdev->pm.dpm.ps[i].ps_priv = ps;
4024 0 : ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4025 : non_clock_info,
4026 0 : power_info->pplib.ucNonClockSize);
4027 0 : idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
4028 0 : for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4029 0 : clock_info = (union pplib_clock_info *)
4030 0 : (mode_info->atom_context->bios + data_offset +
4031 0 : le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4032 0 : (idx[j] * power_info->pplib.ucClockInfoSize));
4033 0 : ni_parse_pplib_clock_info(rdev,
4034 0 : &rdev->pm.dpm.ps[i], j,
4035 : clock_info);
4036 : }
4037 0 : }
4038 : }
4039 0 : rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4040 0 : return 0;
4041 0 : }
4042 :
4043 0 : int ni_dpm_init(struct radeon_device *rdev)
4044 : {
4045 : struct rv7xx_power_info *pi;
4046 : struct evergreen_power_info *eg_pi;
4047 : struct ni_power_info *ni_pi;
4048 0 : struct atom_clock_dividers dividers;
4049 : int ret;
4050 :
4051 0 : ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4052 0 : if (ni_pi == NULL)
4053 0 : return -ENOMEM;
4054 0 : rdev->pm.dpm.priv = ni_pi;
4055 0 : eg_pi = &ni_pi->eg;
4056 0 : pi = &eg_pi->rv7xx;
4057 :
4058 0 : rv770_get_max_vddc(rdev);
4059 :
4060 0 : eg_pi->ulv.supported = false;
4061 0 : pi->acpi_vddc = 0;
4062 0 : eg_pi->acpi_vddci = 0;
4063 0 : pi->min_vddc_in_table = 0;
4064 0 : pi->max_vddc_in_table = 0;
4065 :
4066 0 : ret = r600_get_platform_caps(rdev);
4067 0 : if (ret)
4068 0 : return ret;
4069 :
4070 0 : ret = ni_parse_power_table(rdev);
4071 0 : if (ret)
4072 0 : return ret;
4073 0 : ret = r600_parse_extended_power_table(rdev);
4074 0 : if (ret)
4075 0 : return ret;
4076 :
4077 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4078 0 : kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4079 0 : if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4080 0 : r600_free_extended_power_table(rdev);
4081 0 : return -ENOMEM;
4082 : }
4083 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4084 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4085 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4086 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4087 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4088 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4089 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4090 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4091 0 : rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4092 :
4093 0 : ni_patch_dependency_tables_based_on_leakage(rdev);
4094 :
4095 0 : if (rdev->pm.dpm.voltage_response_time == 0)
4096 0 : rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4097 0 : if (rdev->pm.dpm.backbias_response_time == 0)
4098 0 : rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4099 :
4100 0 : ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4101 : 0, false, ÷rs);
4102 0 : if (ret)
4103 0 : pi->ref_div = dividers.ref_div + 1;
4104 : else
4105 0 : pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4106 :
4107 0 : pi->rlp = RV770_RLP_DFLT;
4108 0 : pi->rmp = RV770_RMP_DFLT;
4109 0 : pi->lhp = RV770_LHP_DFLT;
4110 0 : pi->lmp = RV770_LMP_DFLT;
4111 :
4112 0 : eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4113 0 : eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4114 0 : eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4115 0 : eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4116 :
4117 0 : eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4118 0 : eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4119 0 : eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4120 0 : eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4121 :
4122 0 : eg_pi->smu_uvd_hs = true;
4123 :
4124 0 : if (rdev->pdev->device == 0x6707) {
4125 0 : pi->mclk_strobe_mode_threshold = 55000;
4126 0 : pi->mclk_edc_enable_threshold = 55000;
4127 0 : eg_pi->mclk_edc_wr_enable_threshold = 55000;
4128 0 : } else {
4129 0 : pi->mclk_strobe_mode_threshold = 40000;
4130 0 : pi->mclk_edc_enable_threshold = 40000;
4131 0 : eg_pi->mclk_edc_wr_enable_threshold = 40000;
4132 : }
4133 0 : ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4134 :
4135 0 : pi->voltage_control =
4136 0 : radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4137 :
4138 0 : pi->mvdd_control =
4139 0 : radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4140 :
4141 0 : eg_pi->vddci_control =
4142 0 : radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4143 :
4144 0 : rv770_get_engine_memory_ss(rdev);
4145 :
4146 0 : pi->asi = RV770_ASI_DFLT;
4147 0 : pi->pasi = CYPRESS_HASI_DFLT;
4148 0 : pi->vrc = CYPRESS_VRC_DFLT;
4149 :
4150 0 : pi->power_gating = false;
4151 :
4152 0 : pi->gfx_clock_gating = true;
4153 :
4154 0 : pi->mg_clock_gating = true;
4155 0 : pi->mgcgtssm = true;
4156 0 : eg_pi->ls_clock_gating = false;
4157 0 : eg_pi->sclk_deep_sleep = false;
4158 :
4159 0 : pi->dynamic_pcie_gen2 = true;
4160 :
4161 0 : if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
4162 0 : pi->thermal_protection = true;
4163 : else
4164 0 : pi->thermal_protection = false;
4165 :
4166 0 : pi->display_gap = true;
4167 :
4168 0 : pi->dcodt = true;
4169 :
4170 0 : pi->ulps = true;
4171 :
4172 0 : eg_pi->dynamic_ac_timing = true;
4173 0 : eg_pi->abm = true;
4174 0 : eg_pi->mcls = true;
4175 0 : eg_pi->light_sleep = true;
4176 0 : eg_pi->memory_transition = true;
4177 : #if defined(CONFIG_ACPI)
4178 : eg_pi->pcie_performance_request =
4179 : radeon_acpi_is_pcie_performance_request_supported(rdev);
4180 : #else
4181 0 : eg_pi->pcie_performance_request = false;
4182 : #endif
4183 :
4184 0 : eg_pi->dll_default_on = false;
4185 :
4186 0 : eg_pi->sclk_deep_sleep = false;
4187 :
4188 0 : pi->mclk_stutter_mode_threshold = 0;
4189 :
4190 0 : pi->sram_end = SMC_RAM_END;
4191 :
4192 0 : rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4193 0 : rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4194 0 : rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4195 0 : rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4196 0 : rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4197 0 : rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4198 0 : rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4199 0 : rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4200 :
4201 0 : ni_pi->cac_data.leakage_coefficients.at = 516;
4202 0 : ni_pi->cac_data.leakage_coefficients.bt = 18;
4203 0 : ni_pi->cac_data.leakage_coefficients.av = 51;
4204 0 : ni_pi->cac_data.leakage_coefficients.bv = 2957;
4205 :
4206 0 : switch (rdev->pdev->device) {
4207 : case 0x6700:
4208 : case 0x6701:
4209 : case 0x6702:
4210 : case 0x6703:
4211 : case 0x6718:
4212 0 : ni_pi->cac_weights = &cac_weights_cayman_xt;
4213 0 : break;
4214 : case 0x6705:
4215 : case 0x6719:
4216 : case 0x671D:
4217 : case 0x671C:
4218 : default:
4219 0 : ni_pi->cac_weights = &cac_weights_cayman_pro;
4220 0 : break;
4221 : case 0x6704:
4222 : case 0x6706:
4223 : case 0x6707:
4224 : case 0x6708:
4225 : case 0x6709:
4226 0 : ni_pi->cac_weights = &cac_weights_cayman_le;
4227 0 : break;
4228 : }
4229 :
4230 0 : if (ni_pi->cac_weights->enable_power_containment_by_default) {
4231 0 : ni_pi->enable_power_containment = true;
4232 0 : ni_pi->enable_cac = true;
4233 0 : ni_pi->enable_sq_ramping = true;
4234 0 : } else {
4235 0 : ni_pi->enable_power_containment = false;
4236 0 : ni_pi->enable_cac = false;
4237 0 : ni_pi->enable_sq_ramping = false;
4238 : }
4239 :
4240 0 : ni_pi->driver_calculate_cac_leakage = false;
4241 0 : ni_pi->cac_configuration_required = true;
4242 :
4243 0 : if (ni_pi->cac_configuration_required) {
4244 0 : ni_pi->support_cac_long_term_average = true;
4245 0 : ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4246 0 : ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4247 0 : } else {
4248 0 : ni_pi->support_cac_long_term_average = false;
4249 0 : ni_pi->lta_window_size = 0;
4250 0 : ni_pi->lts_truncate = 0;
4251 : }
4252 :
4253 0 : ni_pi->use_power_boost_limit = true;
4254 :
4255 : /* make sure dc limits are valid */
4256 0 : if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
4257 0 : (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
4258 0 : rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
4259 0 : rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
4260 :
4261 0 : return 0;
4262 0 : }
4263 :
4264 0 : void ni_dpm_fini(struct radeon_device *rdev)
4265 : {
4266 : int i;
4267 :
4268 0 : for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4269 0 : kfree(rdev->pm.dpm.ps[i].ps_priv);
4270 : }
4271 0 : kfree(rdev->pm.dpm.ps);
4272 0 : kfree(rdev->pm.dpm.priv);
4273 0 : kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4274 0 : r600_free_extended_power_table(rdev);
4275 0 : }
4276 :
4277 0 : void ni_dpm_print_power_state(struct radeon_device *rdev,
4278 : struct radeon_ps *rps)
4279 : {
4280 0 : struct ni_ps *ps = ni_get_ps(rps);
4281 : struct rv7xx_pl *pl;
4282 : int i;
4283 :
4284 0 : r600_dpm_print_class_info(rps->class, rps->class2);
4285 0 : r600_dpm_print_cap_info(rps->caps);
4286 0 : printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4287 0 : for (i = 0; i < ps->performance_level_count; i++) {
4288 0 : pl = &ps->performance_levels[i];
4289 0 : if (rdev->family >= CHIP_TAHITI)
4290 0 : printk("\t\tpower level %d sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4291 : i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4292 : else
4293 0 : printk("\t\tpower level %d sclk: %u mclk: %u vddc: %u vddci: %u\n",
4294 : i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4295 : }
4296 0 : r600_dpm_print_ps_status(rdev, rps);
4297 0 : }
4298 :
4299 0 : void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4300 : struct seq_file *m)
4301 : {
4302 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4303 0 : struct radeon_ps *rps = &eg_pi->current_rps;
4304 0 : struct ni_ps *ps = ni_get_ps(rps);
4305 : struct rv7xx_pl *pl;
4306 : u32 current_index =
4307 0 : (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4308 : CURRENT_STATE_INDEX_SHIFT;
4309 :
4310 0 : if (current_index >= ps->performance_level_count) {
4311 0 : seq_printf(m, "invalid dpm profile %d\n", current_index);
4312 0 : } else {
4313 0 : pl = &ps->performance_levels[current_index];
4314 0 : seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4315 0 : seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u vddci: %u\n",
4316 0 : current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4317 : }
4318 0 : }
4319 :
4320 0 : u32 ni_dpm_get_current_sclk(struct radeon_device *rdev)
4321 : {
4322 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4323 0 : struct radeon_ps *rps = &eg_pi->current_rps;
4324 0 : struct ni_ps *ps = ni_get_ps(rps);
4325 : struct rv7xx_pl *pl;
4326 : u32 current_index =
4327 0 : (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4328 : CURRENT_STATE_INDEX_SHIFT;
4329 :
4330 0 : if (current_index >= ps->performance_level_count) {
4331 0 : return 0;
4332 : } else {
4333 0 : pl = &ps->performance_levels[current_index];
4334 0 : return pl->sclk;
4335 : }
4336 0 : }
4337 :
4338 0 : u32 ni_dpm_get_current_mclk(struct radeon_device *rdev)
4339 : {
4340 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4341 0 : struct radeon_ps *rps = &eg_pi->current_rps;
4342 0 : struct ni_ps *ps = ni_get_ps(rps);
4343 : struct rv7xx_pl *pl;
4344 : u32 current_index =
4345 0 : (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4346 : CURRENT_STATE_INDEX_SHIFT;
4347 :
4348 0 : if (current_index >= ps->performance_level_count) {
4349 0 : return 0;
4350 : } else {
4351 0 : pl = &ps->performance_levels[current_index];
4352 0 : return pl->mclk;
4353 : }
4354 0 : }
4355 :
4356 0 : u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4357 : {
4358 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4359 0 : struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4360 :
4361 0 : if (low)
4362 0 : return requested_state->performance_levels[0].sclk;
4363 : else
4364 0 : return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4365 0 : }
4366 :
4367 0 : u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4368 : {
4369 0 : struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4370 0 : struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4371 :
4372 0 : if (low)
4373 0 : return requested_state->performance_levels[0].mclk;
4374 : else
4375 0 : return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4376 0 : }
4377 :
|