drm/radeon/dpm: switch on new late_enable callback
[firefly-linux-kernel-4.4.55.git] / drivers / gpu / drm / radeon / ni_dpm.c
1 /*
2  * Copyright 2012 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23
24 #include "drmP.h"
25 #include "radeon.h"
26 #include "nid.h"
27 #include "r600_dpm.h"
28 #include "ni_dpm.h"
29 #include "atom.h"
30 #include <linux/math64.h>
31 #include <linux/seq_file.h>
32
33 #define MC_CG_ARB_FREQ_F0           0x0a
34 #define MC_CG_ARB_FREQ_F1           0x0b
35 #define MC_CG_ARB_FREQ_F2           0x0c
36 #define MC_CG_ARB_FREQ_F3           0x0d
37
38 #define SMC_RAM_END 0xC000
39
40 static const struct ni_cac_weights cac_weights_cayman_xt =
41 {
42         0x15,
43         0x2,
44         0x19,
45         0x2,
46         0x8,
47         0x14,
48         0x2,
49         0x16,
50         0xE,
51         0x17,
52         0x13,
53         0x2B,
54         0x10,
55         0x7,
56         0x5,
57         0x5,
58         0x5,
59         0x2,
60         0x3,
61         0x9,
62         0x10,
63         0x10,
64         0x2B,
65         0xA,
66         0x9,
67         0x4,
68         0xD,
69         0xD,
70         0x3E,
71         0x18,
72         0x14,
73         0,
74         0x3,
75         0x3,
76         0x5,
77         0,
78         0x2,
79         0,
80         0,
81         0,
82         0,
83         0,
84         0,
85         0,
86         0,
87         0,
88         0x1CC,
89         0,
90         0x164,
91         1,
92         1,
93         1,
94         1,
95         12,
96         12,
97         12,
98         0x12,
99         0x1F,
100         132,
101         5,
102         7,
103         0,
104         { 0, 0, 0, 0, 0, 0, 0, 0 },
105         { 0, 0, 0, 0 },
106         true
107 };
108
109 static const struct ni_cac_weights cac_weights_cayman_pro =
110 {
111         0x16,
112         0x4,
113         0x10,
114         0x2,
115         0xA,
116         0x16,
117         0x2,
118         0x18,
119         0x10,
120         0x1A,
121         0x16,
122         0x2D,
123         0x12,
124         0xA,
125         0x6,
126         0x6,
127         0x6,
128         0x2,
129         0x4,
130         0xB,
131         0x11,
132         0x11,
133         0x2D,
134         0xC,
135         0xC,
136         0x7,
137         0x10,
138         0x10,
139         0x3F,
140         0x1A,
141         0x16,
142         0,
143         0x7,
144         0x4,
145         0x6,
146         1,
147         0x2,
148         0x1,
149         0,
150         0,
151         0,
152         0,
153         0,
154         0,
155         0x30,
156         0,
157         0x1CF,
158         0,
159         0x166,
160         1,
161         1,
162         1,
163         1,
164         12,
165         12,
166         12,
167         0x15,
168         0x1F,
169         132,
170         6,
171         6,
172         0,
173         { 0, 0, 0, 0, 0, 0, 0, 0 },
174         { 0, 0, 0, 0 },
175         true
176 };
177
178 static const struct ni_cac_weights cac_weights_cayman_le =
179 {
180         0x7,
181         0xE,
182         0x1,
183         0xA,
184         0x1,
185         0x3F,
186         0x2,
187         0x18,
188         0x10,
189         0x1A,
190         0x1,
191         0x3F,
192         0x1,
193         0xE,
194         0x6,
195         0x6,
196         0x6,
197         0x2,
198         0x4,
199         0x9,
200         0x1A,
201         0x1A,
202         0x2C,
203         0xA,
204         0x11,
205         0x8,
206         0x19,
207         0x19,
208         0x1,
209         0x1,
210         0x1A,
211         0,
212         0x8,
213         0x5,
214         0x8,
215         0x1,
216         0x3,
217         0x1,
218         0,
219         0,
220         0,
221         0,
222         0,
223         0,
224         0x38,
225         0x38,
226         0x239,
227         0x3,
228         0x18A,
229         1,
230         1,
231         1,
232         1,
233         12,
234         12,
235         12,
236         0x15,
237         0x22,
238         132,
239         6,
240         6,
241         0,
242         { 0, 0, 0, 0, 0, 0, 0, 0 },
243         { 0, 0, 0, 0 },
244         true
245 };
246
247 #define NISLANDS_MGCG_SEQUENCE  300
248
249 static const u32 cayman_cgcg_cgls_default[] =
250 {
251         0x000008f8, 0x00000010, 0xffffffff,
252         0x000008fc, 0x00000000, 0xffffffff,
253         0x000008f8, 0x00000011, 0xffffffff,
254         0x000008fc, 0x00000000, 0xffffffff,
255         0x000008f8, 0x00000012, 0xffffffff,
256         0x000008fc, 0x00000000, 0xffffffff,
257         0x000008f8, 0x00000013, 0xffffffff,
258         0x000008fc, 0x00000000, 0xffffffff,
259         0x000008f8, 0x00000014, 0xffffffff,
260         0x000008fc, 0x00000000, 0xffffffff,
261         0x000008f8, 0x00000015, 0xffffffff,
262         0x000008fc, 0x00000000, 0xffffffff,
263         0x000008f8, 0x00000016, 0xffffffff,
264         0x000008fc, 0x00000000, 0xffffffff,
265         0x000008f8, 0x00000017, 0xffffffff,
266         0x000008fc, 0x00000000, 0xffffffff,
267         0x000008f8, 0x00000018, 0xffffffff,
268         0x000008fc, 0x00000000, 0xffffffff,
269         0x000008f8, 0x00000019, 0xffffffff,
270         0x000008fc, 0x00000000, 0xffffffff,
271         0x000008f8, 0x0000001a, 0xffffffff,
272         0x000008fc, 0x00000000, 0xffffffff,
273         0x000008f8, 0x0000001b, 0xffffffff,
274         0x000008fc, 0x00000000, 0xffffffff,
275         0x000008f8, 0x00000020, 0xffffffff,
276         0x000008fc, 0x00000000, 0xffffffff,
277         0x000008f8, 0x00000021, 0xffffffff,
278         0x000008fc, 0x00000000, 0xffffffff,
279         0x000008f8, 0x00000022, 0xffffffff,
280         0x000008fc, 0x00000000, 0xffffffff,
281         0x000008f8, 0x00000023, 0xffffffff,
282         0x000008fc, 0x00000000, 0xffffffff,
283         0x000008f8, 0x00000024, 0xffffffff,
284         0x000008fc, 0x00000000, 0xffffffff,
285         0x000008f8, 0x00000025, 0xffffffff,
286         0x000008fc, 0x00000000, 0xffffffff,
287         0x000008f8, 0x00000026, 0xffffffff,
288         0x000008fc, 0x00000000, 0xffffffff,
289         0x000008f8, 0x00000027, 0xffffffff,
290         0x000008fc, 0x00000000, 0xffffffff,
291         0x000008f8, 0x00000028, 0xffffffff,
292         0x000008fc, 0x00000000, 0xffffffff,
293         0x000008f8, 0x00000029, 0xffffffff,
294         0x000008fc, 0x00000000, 0xffffffff,
295         0x000008f8, 0x0000002a, 0xffffffff,
296         0x000008fc, 0x00000000, 0xffffffff,
297         0x000008f8, 0x0000002b, 0xffffffff,
298         0x000008fc, 0x00000000, 0xffffffff
299 };
300 #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
301
302 static const u32 cayman_cgcg_cgls_disable[] =
303 {
304         0x000008f8, 0x00000010, 0xffffffff,
305         0x000008fc, 0xffffffff, 0xffffffff,
306         0x000008f8, 0x00000011, 0xffffffff,
307         0x000008fc, 0xffffffff, 0xffffffff,
308         0x000008f8, 0x00000012, 0xffffffff,
309         0x000008fc, 0xffffffff, 0xffffffff,
310         0x000008f8, 0x00000013, 0xffffffff,
311         0x000008fc, 0xffffffff, 0xffffffff,
312         0x000008f8, 0x00000014, 0xffffffff,
313         0x000008fc, 0xffffffff, 0xffffffff,
314         0x000008f8, 0x00000015, 0xffffffff,
315         0x000008fc, 0xffffffff, 0xffffffff,
316         0x000008f8, 0x00000016, 0xffffffff,
317         0x000008fc, 0xffffffff, 0xffffffff,
318         0x000008f8, 0x00000017, 0xffffffff,
319         0x000008fc, 0xffffffff, 0xffffffff,
320         0x000008f8, 0x00000018, 0xffffffff,
321         0x000008fc, 0xffffffff, 0xffffffff,
322         0x000008f8, 0x00000019, 0xffffffff,
323         0x000008fc, 0xffffffff, 0xffffffff,
324         0x000008f8, 0x0000001a, 0xffffffff,
325         0x000008fc, 0xffffffff, 0xffffffff,
326         0x000008f8, 0x0000001b, 0xffffffff,
327         0x000008fc, 0xffffffff, 0xffffffff,
328         0x000008f8, 0x00000020, 0xffffffff,
329         0x000008fc, 0x00000000, 0xffffffff,
330         0x000008f8, 0x00000021, 0xffffffff,
331         0x000008fc, 0x00000000, 0xffffffff,
332         0x000008f8, 0x00000022, 0xffffffff,
333         0x000008fc, 0x00000000, 0xffffffff,
334         0x000008f8, 0x00000023, 0xffffffff,
335         0x000008fc, 0x00000000, 0xffffffff,
336         0x000008f8, 0x00000024, 0xffffffff,
337         0x000008fc, 0x00000000, 0xffffffff,
338         0x000008f8, 0x00000025, 0xffffffff,
339         0x000008fc, 0x00000000, 0xffffffff,
340         0x000008f8, 0x00000026, 0xffffffff,
341         0x000008fc, 0x00000000, 0xffffffff,
342         0x000008f8, 0x00000027, 0xffffffff,
343         0x000008fc, 0x00000000, 0xffffffff,
344         0x000008f8, 0x00000028, 0xffffffff,
345         0x000008fc, 0x00000000, 0xffffffff,
346         0x000008f8, 0x00000029, 0xffffffff,
347         0x000008fc, 0x00000000, 0xffffffff,
348         0x000008f8, 0x0000002a, 0xffffffff,
349         0x000008fc, 0x00000000, 0xffffffff,
350         0x000008f8, 0x0000002b, 0xffffffff,
351         0x000008fc, 0x00000000, 0xffffffff,
352         0x00000644, 0x000f7902, 0x001f4180,
353         0x00000644, 0x000f3802, 0x001f4180
354 };
355 #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
356
357 static const u32 cayman_cgcg_cgls_enable[] =
358 {
359         0x00000644, 0x000f7882, 0x001f4080,
360         0x000008f8, 0x00000010, 0xffffffff,
361         0x000008fc, 0x00000000, 0xffffffff,
362         0x000008f8, 0x00000011, 0xffffffff,
363         0x000008fc, 0x00000000, 0xffffffff,
364         0x000008f8, 0x00000012, 0xffffffff,
365         0x000008fc, 0x00000000, 0xffffffff,
366         0x000008f8, 0x00000013, 0xffffffff,
367         0x000008fc, 0x00000000, 0xffffffff,
368         0x000008f8, 0x00000014, 0xffffffff,
369         0x000008fc, 0x00000000, 0xffffffff,
370         0x000008f8, 0x00000015, 0xffffffff,
371         0x000008fc, 0x00000000, 0xffffffff,
372         0x000008f8, 0x00000016, 0xffffffff,
373         0x000008fc, 0x00000000, 0xffffffff,
374         0x000008f8, 0x00000017, 0xffffffff,
375         0x000008fc, 0x00000000, 0xffffffff,
376         0x000008f8, 0x00000018, 0xffffffff,
377         0x000008fc, 0x00000000, 0xffffffff,
378         0x000008f8, 0x00000019, 0xffffffff,
379         0x000008fc, 0x00000000, 0xffffffff,
380         0x000008f8, 0x0000001a, 0xffffffff,
381         0x000008fc, 0x00000000, 0xffffffff,
382         0x000008f8, 0x0000001b, 0xffffffff,
383         0x000008fc, 0x00000000, 0xffffffff,
384         0x000008f8, 0x00000020, 0xffffffff,
385         0x000008fc, 0xffffffff, 0xffffffff,
386         0x000008f8, 0x00000021, 0xffffffff,
387         0x000008fc, 0xffffffff, 0xffffffff,
388         0x000008f8, 0x00000022, 0xffffffff,
389         0x000008fc, 0xffffffff, 0xffffffff,
390         0x000008f8, 0x00000023, 0xffffffff,
391         0x000008fc, 0xffffffff, 0xffffffff,
392         0x000008f8, 0x00000024, 0xffffffff,
393         0x000008fc, 0xffffffff, 0xffffffff,
394         0x000008f8, 0x00000025, 0xffffffff,
395         0x000008fc, 0xffffffff, 0xffffffff,
396         0x000008f8, 0x00000026, 0xffffffff,
397         0x000008fc, 0xffffffff, 0xffffffff,
398         0x000008f8, 0x00000027, 0xffffffff,
399         0x000008fc, 0xffffffff, 0xffffffff,
400         0x000008f8, 0x00000028, 0xffffffff,
401         0x000008fc, 0xffffffff, 0xffffffff,
402         0x000008f8, 0x00000029, 0xffffffff,
403         0x000008fc, 0xffffffff, 0xffffffff,
404         0x000008f8, 0x0000002a, 0xffffffff,
405         0x000008fc, 0xffffffff, 0xffffffff,
406         0x000008f8, 0x0000002b, 0xffffffff,
407         0x000008fc, 0xffffffff, 0xffffffff
408 };
409 #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
410
411 static const u32 cayman_mgcg_default[] =
412 {
413         0x0000802c, 0xc0000000, 0xffffffff,
414         0x00003fc4, 0xc0000000, 0xffffffff,
415         0x00005448, 0x00000100, 0xffffffff,
416         0x000055e4, 0x00000100, 0xffffffff,
417         0x0000160c, 0x00000100, 0xffffffff,
418         0x00008984, 0x06000100, 0xffffffff,
419         0x0000c164, 0x00000100, 0xffffffff,
420         0x00008a18, 0x00000100, 0xffffffff,
421         0x0000897c, 0x06000100, 0xffffffff,
422         0x00008b28, 0x00000100, 0xffffffff,
423         0x00009144, 0x00800200, 0xffffffff,
424         0x00009a60, 0x00000100, 0xffffffff,
425         0x00009868, 0x00000100, 0xffffffff,
426         0x00008d58, 0x00000100, 0xffffffff,
427         0x00009510, 0x00000100, 0xffffffff,
428         0x0000949c, 0x00000100, 0xffffffff,
429         0x00009654, 0x00000100, 0xffffffff,
430         0x00009030, 0x00000100, 0xffffffff,
431         0x00009034, 0x00000100, 0xffffffff,
432         0x00009038, 0x00000100, 0xffffffff,
433         0x0000903c, 0x00000100, 0xffffffff,
434         0x00009040, 0x00000100, 0xffffffff,
435         0x0000a200, 0x00000100, 0xffffffff,
436         0x0000a204, 0x00000100, 0xffffffff,
437         0x0000a208, 0x00000100, 0xffffffff,
438         0x0000a20c, 0x00000100, 0xffffffff,
439         0x00009744, 0x00000100, 0xffffffff,
440         0x00003f80, 0x00000100, 0xffffffff,
441         0x0000a210, 0x00000100, 0xffffffff,
442         0x0000a214, 0x00000100, 0xffffffff,
443         0x000004d8, 0x00000100, 0xffffffff,
444         0x00009664, 0x00000100, 0xffffffff,
445         0x00009698, 0x00000100, 0xffffffff,
446         0x000004d4, 0x00000200, 0xffffffff,
447         0x000004d0, 0x00000000, 0xffffffff,
448         0x000030cc, 0x00000104, 0xffffffff,
449         0x0000d0c0, 0x00000100, 0xffffffff,
450         0x0000d8c0, 0x00000100, 0xffffffff,
451         0x0000802c, 0x40000000, 0xffffffff,
452         0x00003fc4, 0x40000000, 0xffffffff,
453         0x0000915c, 0x00010000, 0xffffffff,
454         0x00009160, 0x00030002, 0xffffffff,
455         0x00009164, 0x00050004, 0xffffffff,
456         0x00009168, 0x00070006, 0xffffffff,
457         0x00009178, 0x00070000, 0xffffffff,
458         0x0000917c, 0x00030002, 0xffffffff,
459         0x00009180, 0x00050004, 0xffffffff,
460         0x0000918c, 0x00010006, 0xffffffff,
461         0x00009190, 0x00090008, 0xffffffff,
462         0x00009194, 0x00070000, 0xffffffff,
463         0x00009198, 0x00030002, 0xffffffff,
464         0x0000919c, 0x00050004, 0xffffffff,
465         0x000091a8, 0x00010006, 0xffffffff,
466         0x000091ac, 0x00090008, 0xffffffff,
467         0x000091b0, 0x00070000, 0xffffffff,
468         0x000091b4, 0x00030002, 0xffffffff,
469         0x000091b8, 0x00050004, 0xffffffff,
470         0x000091c4, 0x00010006, 0xffffffff,
471         0x000091c8, 0x00090008, 0xffffffff,
472         0x000091cc, 0x00070000, 0xffffffff,
473         0x000091d0, 0x00030002, 0xffffffff,
474         0x000091d4, 0x00050004, 0xffffffff,
475         0x000091e0, 0x00010006, 0xffffffff,
476         0x000091e4, 0x00090008, 0xffffffff,
477         0x000091e8, 0x00000000, 0xffffffff,
478         0x000091ec, 0x00070000, 0xffffffff,
479         0x000091f0, 0x00030002, 0xffffffff,
480         0x000091f4, 0x00050004, 0xffffffff,
481         0x00009200, 0x00010006, 0xffffffff,
482         0x00009204, 0x00090008, 0xffffffff,
483         0x00009208, 0x00070000, 0xffffffff,
484         0x0000920c, 0x00030002, 0xffffffff,
485         0x00009210, 0x00050004, 0xffffffff,
486         0x0000921c, 0x00010006, 0xffffffff,
487         0x00009220, 0x00090008, 0xffffffff,
488         0x00009224, 0x00070000, 0xffffffff,
489         0x00009228, 0x00030002, 0xffffffff,
490         0x0000922c, 0x00050004, 0xffffffff,
491         0x00009238, 0x00010006, 0xffffffff,
492         0x0000923c, 0x00090008, 0xffffffff,
493         0x00009240, 0x00070000, 0xffffffff,
494         0x00009244, 0x00030002, 0xffffffff,
495         0x00009248, 0x00050004, 0xffffffff,
496         0x00009254, 0x00010006, 0xffffffff,
497         0x00009258, 0x00090008, 0xffffffff,
498         0x0000925c, 0x00070000, 0xffffffff,
499         0x00009260, 0x00030002, 0xffffffff,
500         0x00009264, 0x00050004, 0xffffffff,
501         0x00009270, 0x00010006, 0xffffffff,
502         0x00009274, 0x00090008, 0xffffffff,
503         0x00009278, 0x00070000, 0xffffffff,
504         0x0000927c, 0x00030002, 0xffffffff,
505         0x00009280, 0x00050004, 0xffffffff,
506         0x0000928c, 0x00010006, 0xffffffff,
507         0x00009290, 0x00090008, 0xffffffff,
508         0x000092a8, 0x00070000, 0xffffffff,
509         0x000092ac, 0x00030002, 0xffffffff,
510         0x000092b0, 0x00050004, 0xffffffff,
511         0x000092bc, 0x00010006, 0xffffffff,
512         0x000092c0, 0x00090008, 0xffffffff,
513         0x000092c4, 0x00070000, 0xffffffff,
514         0x000092c8, 0x00030002, 0xffffffff,
515         0x000092cc, 0x00050004, 0xffffffff,
516         0x000092d8, 0x00010006, 0xffffffff,
517         0x000092dc, 0x00090008, 0xffffffff,
518         0x00009294, 0x00000000, 0xffffffff,
519         0x0000802c, 0x40010000, 0xffffffff,
520         0x00003fc4, 0x40010000, 0xffffffff,
521         0x0000915c, 0x00010000, 0xffffffff,
522         0x00009160, 0x00030002, 0xffffffff,
523         0x00009164, 0x00050004, 0xffffffff,
524         0x00009168, 0x00070006, 0xffffffff,
525         0x00009178, 0x00070000, 0xffffffff,
526         0x0000917c, 0x00030002, 0xffffffff,
527         0x00009180, 0x00050004, 0xffffffff,
528         0x0000918c, 0x00010006, 0xffffffff,
529         0x00009190, 0x00090008, 0xffffffff,
530         0x00009194, 0x00070000, 0xffffffff,
531         0x00009198, 0x00030002, 0xffffffff,
532         0x0000919c, 0x00050004, 0xffffffff,
533         0x000091a8, 0x00010006, 0xffffffff,
534         0x000091ac, 0x00090008, 0xffffffff,
535         0x000091b0, 0x00070000, 0xffffffff,
536         0x000091b4, 0x00030002, 0xffffffff,
537         0x000091b8, 0x00050004, 0xffffffff,
538         0x000091c4, 0x00010006, 0xffffffff,
539         0x000091c8, 0x00090008, 0xffffffff,
540         0x000091cc, 0x00070000, 0xffffffff,
541         0x000091d0, 0x00030002, 0xffffffff,
542         0x000091d4, 0x00050004, 0xffffffff,
543         0x000091e0, 0x00010006, 0xffffffff,
544         0x000091e4, 0x00090008, 0xffffffff,
545         0x000091e8, 0x00000000, 0xffffffff,
546         0x000091ec, 0x00070000, 0xffffffff,
547         0x000091f0, 0x00030002, 0xffffffff,
548         0x000091f4, 0x00050004, 0xffffffff,
549         0x00009200, 0x00010006, 0xffffffff,
550         0x00009204, 0x00090008, 0xffffffff,
551         0x00009208, 0x00070000, 0xffffffff,
552         0x0000920c, 0x00030002, 0xffffffff,
553         0x00009210, 0x00050004, 0xffffffff,
554         0x0000921c, 0x00010006, 0xffffffff,
555         0x00009220, 0x00090008, 0xffffffff,
556         0x00009224, 0x00070000, 0xffffffff,
557         0x00009228, 0x00030002, 0xffffffff,
558         0x0000922c, 0x00050004, 0xffffffff,
559         0x00009238, 0x00010006, 0xffffffff,
560         0x0000923c, 0x00090008, 0xffffffff,
561         0x00009240, 0x00070000, 0xffffffff,
562         0x00009244, 0x00030002, 0xffffffff,
563         0x00009248, 0x00050004, 0xffffffff,
564         0x00009254, 0x00010006, 0xffffffff,
565         0x00009258, 0x00090008, 0xffffffff,
566         0x0000925c, 0x00070000, 0xffffffff,
567         0x00009260, 0x00030002, 0xffffffff,
568         0x00009264, 0x00050004, 0xffffffff,
569         0x00009270, 0x00010006, 0xffffffff,
570         0x00009274, 0x00090008, 0xffffffff,
571         0x00009278, 0x00070000, 0xffffffff,
572         0x0000927c, 0x00030002, 0xffffffff,
573         0x00009280, 0x00050004, 0xffffffff,
574         0x0000928c, 0x00010006, 0xffffffff,
575         0x00009290, 0x00090008, 0xffffffff,
576         0x000092a8, 0x00070000, 0xffffffff,
577         0x000092ac, 0x00030002, 0xffffffff,
578         0x000092b0, 0x00050004, 0xffffffff,
579         0x000092bc, 0x00010006, 0xffffffff,
580         0x000092c0, 0x00090008, 0xffffffff,
581         0x000092c4, 0x00070000, 0xffffffff,
582         0x000092c8, 0x00030002, 0xffffffff,
583         0x000092cc, 0x00050004, 0xffffffff,
584         0x000092d8, 0x00010006, 0xffffffff,
585         0x000092dc, 0x00090008, 0xffffffff,
586         0x00009294, 0x00000000, 0xffffffff,
587         0x0000802c, 0xc0000000, 0xffffffff,
588         0x00003fc4, 0xc0000000, 0xffffffff,
589         0x000008f8, 0x00000010, 0xffffffff,
590         0x000008fc, 0x00000000, 0xffffffff,
591         0x000008f8, 0x00000011, 0xffffffff,
592         0x000008fc, 0x00000000, 0xffffffff,
593         0x000008f8, 0x00000012, 0xffffffff,
594         0x000008fc, 0x00000000, 0xffffffff,
595         0x000008f8, 0x00000013, 0xffffffff,
596         0x000008fc, 0x00000000, 0xffffffff,
597         0x000008f8, 0x00000014, 0xffffffff,
598         0x000008fc, 0x00000000, 0xffffffff,
599         0x000008f8, 0x00000015, 0xffffffff,
600         0x000008fc, 0x00000000, 0xffffffff,
601         0x000008f8, 0x00000016, 0xffffffff,
602         0x000008fc, 0x00000000, 0xffffffff,
603         0x000008f8, 0x00000017, 0xffffffff,
604         0x000008fc, 0x00000000, 0xffffffff,
605         0x000008f8, 0x00000018, 0xffffffff,
606         0x000008fc, 0x00000000, 0xffffffff,
607         0x000008f8, 0x00000019, 0xffffffff,
608         0x000008fc, 0x00000000, 0xffffffff,
609         0x000008f8, 0x0000001a, 0xffffffff,
610         0x000008fc, 0x00000000, 0xffffffff,
611         0x000008f8, 0x0000001b, 0xffffffff,
612         0x000008fc, 0x00000000, 0xffffffff
613 };
614 #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
615
616 static const u32 cayman_mgcg_disable[] =
617 {
618         0x0000802c, 0xc0000000, 0xffffffff,
619         0x000008f8, 0x00000000, 0xffffffff,
620         0x000008fc, 0xffffffff, 0xffffffff,
621         0x000008f8, 0x00000001, 0xffffffff,
622         0x000008fc, 0xffffffff, 0xffffffff,
623         0x000008f8, 0x00000002, 0xffffffff,
624         0x000008fc, 0xffffffff, 0xffffffff,
625         0x000008f8, 0x00000003, 0xffffffff,
626         0x000008fc, 0xffffffff, 0xffffffff,
627         0x00009150, 0x00600000, 0xffffffff
628 };
629 #define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
630
631 static const u32 cayman_mgcg_enable[] =
632 {
633         0x0000802c, 0xc0000000, 0xffffffff,
634         0x000008f8, 0x00000000, 0xffffffff,
635         0x000008fc, 0x00000000, 0xffffffff,
636         0x000008f8, 0x00000001, 0xffffffff,
637         0x000008fc, 0x00000000, 0xffffffff,
638         0x000008f8, 0x00000002, 0xffffffff,
639         0x000008fc, 0x00600000, 0xffffffff,
640         0x000008f8, 0x00000003, 0xffffffff,
641         0x000008fc, 0x00000000, 0xffffffff,
642         0x00009150, 0x96944200, 0xffffffff
643 };
644
645 #define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
646
647 #define NISLANDS_SYSLS_SEQUENCE  100
648
649 static const u32 cayman_sysls_default[] =
650 {
651         /* Register,   Value,     Mask bits */
652         0x000055e8, 0x00000000, 0xffffffff,
653         0x0000d0bc, 0x00000000, 0xffffffff,
654         0x0000d8bc, 0x00000000, 0xffffffff,
655         0x000015c0, 0x000c1401, 0xffffffff,
656         0x0000264c, 0x000c0400, 0xffffffff,
657         0x00002648, 0x000c0400, 0xffffffff,
658         0x00002650, 0x000c0400, 0xffffffff,
659         0x000020b8, 0x000c0400, 0xffffffff,
660         0x000020bc, 0x000c0400, 0xffffffff,
661         0x000020c0, 0x000c0c80, 0xffffffff,
662         0x0000f4a0, 0x000000c0, 0xffffffff,
663         0x0000f4a4, 0x00680fff, 0xffffffff,
664         0x00002f50, 0x00000404, 0xffffffff,
665         0x000004c8, 0x00000001, 0xffffffff,
666         0x000064ec, 0x00000000, 0xffffffff,
667         0x00000c7c, 0x00000000, 0xffffffff,
668         0x00008dfc, 0x00000000, 0xffffffff
669 };
670 #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
671
672 static const u32 cayman_sysls_disable[] =
673 {
674         /* Register,   Value,     Mask bits */
675         0x0000d0c0, 0x00000000, 0xffffffff,
676         0x0000d8c0, 0x00000000, 0xffffffff,
677         0x000055e8, 0x00000000, 0xffffffff,
678         0x0000d0bc, 0x00000000, 0xffffffff,
679         0x0000d8bc, 0x00000000, 0xffffffff,
680         0x000015c0, 0x00041401, 0xffffffff,
681         0x0000264c, 0x00040400, 0xffffffff,
682         0x00002648, 0x00040400, 0xffffffff,
683         0x00002650, 0x00040400, 0xffffffff,
684         0x000020b8, 0x00040400, 0xffffffff,
685         0x000020bc, 0x00040400, 0xffffffff,
686         0x000020c0, 0x00040c80, 0xffffffff,
687         0x0000f4a0, 0x000000c0, 0xffffffff,
688         0x0000f4a4, 0x00680000, 0xffffffff,
689         0x00002f50, 0x00000404, 0xffffffff,
690         0x000004c8, 0x00000001, 0xffffffff,
691         0x000064ec, 0x00007ffd, 0xffffffff,
692         0x00000c7c, 0x0000ff00, 0xffffffff,
693         0x00008dfc, 0x0000007f, 0xffffffff
694 };
695 #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
696
697 static const u32 cayman_sysls_enable[] =
698 {
699         /* Register,   Value,     Mask bits */
700         0x000055e8, 0x00000001, 0xffffffff,
701         0x0000d0bc, 0x00000100, 0xffffffff,
702         0x0000d8bc, 0x00000100, 0xffffffff,
703         0x000015c0, 0x000c1401, 0xffffffff,
704         0x0000264c, 0x000c0400, 0xffffffff,
705         0x00002648, 0x000c0400, 0xffffffff,
706         0x00002650, 0x000c0400, 0xffffffff,
707         0x000020b8, 0x000c0400, 0xffffffff,
708         0x000020bc, 0x000c0400, 0xffffffff,
709         0x000020c0, 0x000c0c80, 0xffffffff,
710         0x0000f4a0, 0x000000c0, 0xffffffff,
711         0x0000f4a4, 0x00680fff, 0xffffffff,
712         0x00002f50, 0x00000903, 0xffffffff,
713         0x000004c8, 0x00000000, 0xffffffff,
714         0x000064ec, 0x00000000, 0xffffffff,
715         0x00000c7c, 0x00000000, 0xffffffff,
716         0x00008dfc, 0x00000000, 0xffffffff
717 };
718 #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
719
720 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
721 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
722
723 struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
724 {
725         struct ni_power_info *pi = rdev->pm.dpm.priv;
726
727         return pi;
728 }
729
730 struct ni_ps *ni_get_ps(struct radeon_ps *rps)
731 {
732         struct ni_ps *ps = rps->ps_priv;
733
734         return ps;
735 }
736
737 static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
738                                                      u16 v, s32 t,
739                                                      u32 ileakage,
740                                                      u32 *leakage)
741 {
742         s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
743
744         i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
745         vddc = div64_s64(drm_int2fixp(v), 1000);
746         temperature = div64_s64(drm_int2fixp(t), 1000);
747
748         kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
749                           drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
750         kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
751                           drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
752
753         leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
754
755         *leakage = drm_fixp2int(leakage_w * 1000);
756 }
757
758 static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
759                                              const struct ni_leakage_coeffients *coeff,
760                                              u16 v,
761                                              s32 t,
762                                              u32 i_leakage,
763                                              u32 *leakage)
764 {
765         ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
766 }
767
768 bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
769 {
770         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
771         u32 vblank_time = r600_dpm_get_vblank_time(rdev);
772         /* we never hit the non-gddr5 limit so disable it */
773         u32 switch_limit = pi->mem_gddr5 ? 450 : 0;
774
775         if (vblank_time < switch_limit)
776                 return true;
777         else
778                 return false;
779
780 }
781
782 static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
783                                         struct radeon_ps *rps)
784 {
785         struct ni_ps *ps = ni_get_ps(rps);
786         struct radeon_clock_and_voltage_limits *max_limits;
787         bool disable_mclk_switching;
788         u32 mclk;
789         u16 vddci;
790         u32 max_sclk_vddc, max_mclk_vddci, max_mclk_vddc;
791         int i;
792
793         if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
794             ni_dpm_vblank_too_short(rdev))
795                 disable_mclk_switching = true;
796         else
797                 disable_mclk_switching = false;
798
799         if (rdev->pm.dpm.ac_power)
800                 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
801         else
802                 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
803
804         if (rdev->pm.dpm.ac_power == false) {
805                 for (i = 0; i < ps->performance_level_count; i++) {
806                         if (ps->performance_levels[i].mclk > max_limits->mclk)
807                                 ps->performance_levels[i].mclk = max_limits->mclk;
808                         if (ps->performance_levels[i].sclk > max_limits->sclk)
809                                 ps->performance_levels[i].sclk = max_limits->sclk;
810                         if (ps->performance_levels[i].vddc > max_limits->vddc)
811                                 ps->performance_levels[i].vddc = max_limits->vddc;
812                         if (ps->performance_levels[i].vddci > max_limits->vddci)
813                                 ps->performance_levels[i].vddci = max_limits->vddci;
814                 }
815         }
816
817         /* limit clocks to max supported clocks based on voltage dependency tables */
818         btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
819                                                         &max_sclk_vddc);
820         btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
821                                                         &max_mclk_vddci);
822         btc_get_max_clock_from_voltage_dependency_table(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
823                                                         &max_mclk_vddc);
824
825         for (i = 0; i < ps->performance_level_count; i++) {
826                 if (max_sclk_vddc) {
827                         if (ps->performance_levels[i].sclk > max_sclk_vddc)
828                                 ps->performance_levels[i].sclk = max_sclk_vddc;
829                 }
830                 if (max_mclk_vddci) {
831                         if (ps->performance_levels[i].mclk > max_mclk_vddci)
832                                 ps->performance_levels[i].mclk = max_mclk_vddci;
833                 }
834                 if (max_mclk_vddc) {
835                         if (ps->performance_levels[i].mclk > max_mclk_vddc)
836                                 ps->performance_levels[i].mclk = max_mclk_vddc;
837                 }
838         }
839
840         /* XXX validate the min clocks required for display */
841
842         /* adjust low state */
843         if (disable_mclk_switching) {
844                 ps->performance_levels[0].mclk =
845                         ps->performance_levels[ps->performance_level_count - 1].mclk;
846                 ps->performance_levels[0].vddci =
847                         ps->performance_levels[ps->performance_level_count - 1].vddci;
848         }
849
850         btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
851                                   &ps->performance_levels[0].sclk,
852                                   &ps->performance_levels[0].mclk);
853
854         for (i = 1; i < ps->performance_level_count; i++) {
855                 if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
856                         ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
857                 if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
858                         ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
859         }
860
861         /* adjust remaining states */
862         if (disable_mclk_switching) {
863                 mclk = ps->performance_levels[0].mclk;
864                 vddci = ps->performance_levels[0].vddci;
865                 for (i = 1; i < ps->performance_level_count; i++) {
866                         if (mclk < ps->performance_levels[i].mclk)
867                                 mclk = ps->performance_levels[i].mclk;
868                         if (vddci < ps->performance_levels[i].vddci)
869                                 vddci = ps->performance_levels[i].vddci;
870                 }
871                 for (i = 0; i < ps->performance_level_count; i++) {
872                         ps->performance_levels[i].mclk = mclk;
873                         ps->performance_levels[i].vddci = vddci;
874                 }
875         } else {
876                 for (i = 1; i < ps->performance_level_count; i++) {
877                         if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
878                                 ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
879                         if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
880                                 ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
881                 }
882         }
883
884         for (i = 1; i < ps->performance_level_count; i++)
885                 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
886                                           &ps->performance_levels[i].sclk,
887                                           &ps->performance_levels[i].mclk);
888
889         for (i = 0; i < ps->performance_level_count; i++)
890                 btc_adjust_clock_combinations(rdev, max_limits,
891                                               &ps->performance_levels[i]);
892
893         for (i = 0; i < ps->performance_level_count; i++) {
894                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
895                                                    ps->performance_levels[i].sclk,
896                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
897                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
898                                                    ps->performance_levels[i].mclk,
899                                                    max_limits->vddci, &ps->performance_levels[i].vddci);
900                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
901                                                    ps->performance_levels[i].mclk,
902                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
903                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
904                                                    rdev->clock.current_dispclk,
905                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
906         }
907
908         for (i = 0; i < ps->performance_level_count; i++) {
909                 btc_apply_voltage_delta_rules(rdev,
910                                               max_limits->vddc, max_limits->vddci,
911                                               &ps->performance_levels[i].vddc,
912                                               &ps->performance_levels[i].vddci);
913         }
914
915         ps->dc_compatible = true;
916         for (i = 0; i < ps->performance_level_count; i++) {
917                 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
918                         ps->dc_compatible = false;
919
920                 if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
921                         ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
922         }
923 }
924
925 static void ni_cg_clockgating_default(struct radeon_device *rdev)
926 {
927         u32 count;
928         const u32 *ps = NULL;
929
930         ps = (const u32 *)&cayman_cgcg_cgls_default;
931         count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
932
933         btc_program_mgcg_hw_sequence(rdev, ps, count);
934 }
935
936 static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
937                                       bool enable)
938 {
939         u32 count;
940         const u32 *ps = NULL;
941
942         if (enable) {
943                 ps = (const u32 *)&cayman_cgcg_cgls_enable;
944                 count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
945         } else {
946                 ps = (const u32 *)&cayman_cgcg_cgls_disable;
947                 count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
948         }
949
950         btc_program_mgcg_hw_sequence(rdev, ps, count);
951 }
952
953 static void ni_mg_clockgating_default(struct radeon_device *rdev)
954 {
955         u32 count;
956         const u32 *ps = NULL;
957
958         ps = (const u32 *)&cayman_mgcg_default;
959         count = CAYMAN_MGCG_DEFAULT_LENGTH;
960
961         btc_program_mgcg_hw_sequence(rdev, ps, count);
962 }
963
964 static void ni_mg_clockgating_enable(struct radeon_device *rdev,
965                                      bool enable)
966 {
967         u32 count;
968         const u32 *ps = NULL;
969
970         if (enable) {
971                 ps = (const u32 *)&cayman_mgcg_enable;
972                 count = CAYMAN_MGCG_ENABLE_LENGTH;
973         } else {
974                 ps = (const u32 *)&cayman_mgcg_disable;
975                 count = CAYMAN_MGCG_DISABLE_LENGTH;
976         }
977
978         btc_program_mgcg_hw_sequence(rdev, ps, count);
979 }
980
981 static void ni_ls_clockgating_default(struct radeon_device *rdev)
982 {
983         u32 count;
984         const u32 *ps = NULL;
985
986         ps = (const u32 *)&cayman_sysls_default;
987         count = CAYMAN_SYSLS_DEFAULT_LENGTH;
988
989         btc_program_mgcg_hw_sequence(rdev, ps, count);
990 }
991
992 static void ni_ls_clockgating_enable(struct radeon_device *rdev,
993                                      bool enable)
994 {
995         u32 count;
996         const u32 *ps = NULL;
997
998         if (enable) {
999                 ps = (const u32 *)&cayman_sysls_enable;
1000                 count = CAYMAN_SYSLS_ENABLE_LENGTH;
1001         } else {
1002                 ps = (const u32 *)&cayman_sysls_disable;
1003                 count = CAYMAN_SYSLS_DISABLE_LENGTH;
1004         }
1005
1006         btc_program_mgcg_hw_sequence(rdev, ps, count);
1007
1008 }
1009
1010 static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
1011                                                              struct radeon_clock_voltage_dependency_table *table)
1012 {
1013         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1014         u32 i;
1015
1016         if (table) {
1017                 for (i = 0; i < table->count; i++) {
1018                         if (0xff01 == table->entries[i].v) {
1019                                 if (pi->max_vddc == 0)
1020                                         return -EINVAL;
1021                                 table->entries[i].v = pi->max_vddc;
1022                         }
1023                 }
1024         }
1025         return 0;
1026 }
1027
1028 static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1029 {
1030         int ret = 0;
1031
1032         ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1033                                                                 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1034
1035         ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1036                                                                 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1037         return ret;
1038 }
1039
1040 static void ni_stop_dpm(struct radeon_device *rdev)
1041 {
1042         WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1043 }
1044
1045 #if 0
1046 static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1047                                         bool ac_power)
1048 {
1049         if (ac_power)
1050                 return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1051                         0 : -EINVAL;
1052
1053         return 0;
1054 }
1055 #endif
1056
1057 static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1058                                                       PPSMC_Msg msg, u32 parameter)
1059 {
1060         WREG32(SMC_SCRATCH0, parameter);
1061         return rv770_send_msg_to_smc(rdev, msg);
1062 }
1063
1064 static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1065 {
1066         if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1067                 return -EINVAL;
1068
1069         return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1070                 0 : -EINVAL;
1071 }
1072
1073 int ni_dpm_force_performance_level(struct radeon_device *rdev,
1074                                    enum radeon_dpm_forced_level level)
1075 {
1076         if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1077                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1078                         return -EINVAL;
1079
1080                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
1081                         return -EINVAL;
1082         } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1083                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1084                         return -EINVAL;
1085
1086                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) != PPSMC_Result_OK)
1087                         return -EINVAL;
1088         } else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
1089                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1090                         return -EINVAL;
1091
1092                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1093                         return -EINVAL;
1094         }
1095
1096         rdev->pm.dpm.forced_level = level;
1097
1098         return 0;
1099 }
1100
1101 static void ni_stop_smc(struct radeon_device *rdev)
1102 {
1103         u32 tmp;
1104         int i;
1105
1106         for (i = 0; i < rdev->usec_timeout; i++) {
1107                 tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1108                 if (tmp != 1)
1109                         break;
1110                 udelay(1);
1111         }
1112
1113         udelay(100);
1114
1115         r7xx_stop_smc(rdev);
1116 }
1117
1118 static int ni_process_firmware_header(struct radeon_device *rdev)
1119 {
1120         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1121         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1122         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1123         u32 tmp;
1124         int ret;
1125
1126         ret = rv770_read_smc_sram_dword(rdev,
1127                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1128                                         NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1129                                         &tmp, pi->sram_end);
1130
1131         if (ret)
1132                 return ret;
1133
1134         pi->state_table_start = (u16)tmp;
1135
1136         ret = rv770_read_smc_sram_dword(rdev,
1137                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1138                                         NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1139                                         &tmp, pi->sram_end);
1140
1141         if (ret)
1142                 return ret;
1143
1144         pi->soft_regs_start = (u16)tmp;
1145
1146         ret = rv770_read_smc_sram_dword(rdev,
1147                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1148                                         NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1149                                         &tmp, pi->sram_end);
1150
1151         if (ret)
1152                 return ret;
1153
1154         eg_pi->mc_reg_table_start = (u16)tmp;
1155
1156         ret = rv770_read_smc_sram_dword(rdev,
1157                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1158                                         NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1159                                         &tmp, pi->sram_end);
1160
1161         if (ret)
1162                 return ret;
1163
1164         ni_pi->fan_table_start = (u16)tmp;
1165
1166         ret = rv770_read_smc_sram_dword(rdev,
1167                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1168                                         NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1169                                         &tmp, pi->sram_end);
1170
1171         if (ret)
1172                 return ret;
1173
1174         ni_pi->arb_table_start = (u16)tmp;
1175
1176         ret = rv770_read_smc_sram_dword(rdev,
1177                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1178                                         NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1179                                         &tmp, pi->sram_end);
1180
1181         if (ret)
1182                 return ret;
1183
1184         ni_pi->cac_table_start = (u16)tmp;
1185
1186         ret = rv770_read_smc_sram_dword(rdev,
1187                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1188                                         NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1189                                         &tmp, pi->sram_end);
1190
1191         if (ret)
1192                 return ret;
1193
1194         ni_pi->spll_table_start = (u16)tmp;
1195
1196
1197         return ret;
1198 }
1199
1200 static void ni_read_clock_registers(struct radeon_device *rdev)
1201 {
1202         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1203
1204         ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1205         ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1206         ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1207         ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1208         ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1209         ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1210         ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1211         ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1212         ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1213         ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1214         ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1215         ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1216         ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1217         ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1218 }
1219
1220 #if 0
1221 static int ni_enter_ulp_state(struct radeon_device *rdev)
1222 {
1223         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1224
1225         if (pi->gfx_clock_gating) {
1226                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1227                 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1228                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1229                 RREG32(GB_ADDR_CONFIG);
1230         }
1231
1232         WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1233                  ~HOST_SMC_MSG_MASK);
1234
1235         udelay(25000);
1236
1237         return 0;
1238 }
1239 #endif
1240
1241 static void ni_program_response_times(struct radeon_device *rdev)
1242 {
1243         u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1244         u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1245         u32 reference_clock;
1246
1247         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1248
1249         voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1250         backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1251
1252         if (voltage_response_time == 0)
1253                 voltage_response_time = 1000;
1254
1255         if (backbias_response_time == 0)
1256                 backbias_response_time = 1000;
1257
1258         acpi_delay_time = 15000;
1259         vbi_time_out = 100000;
1260
1261         reference_clock = radeon_get_xclk(rdev);
1262
1263         vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1264         bb_dly   = (backbias_response_time * reference_clock) / 1600;
1265         acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1266         vbi_dly  = (vbi_time_out * reference_clock) / 1600;
1267
1268         mclk_switch_limit = (460 * reference_clock) / 100;
1269
1270         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
1271         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1272         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
1273         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1274         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1275         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1276 }
1277
1278 static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1279                                           struct atom_voltage_table *voltage_table,
1280                                           NISLANDS_SMC_STATETABLE *table)
1281 {
1282         unsigned int i;
1283
1284         for (i = 0; i < voltage_table->count; i++) {
1285                 table->highSMIO[i] = 0;
1286                 table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1287         }
1288 }
1289
1290 static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1291                                            NISLANDS_SMC_STATETABLE *table)
1292 {
1293         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1294         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1295         unsigned char i;
1296
1297         if (eg_pi->vddc_voltage_table.count) {
1298                 ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1299                 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1300                 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1301                         cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1302
1303                 for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1304                         if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1305                                 table->maxVDDCIndexInPPTable = i;
1306                                 break;
1307                         }
1308                 }
1309         }
1310
1311         if (eg_pi->vddci_voltage_table.count) {
1312                 ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1313
1314                 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1315                 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1316                         cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1317         }
1318 }
1319
1320 static int ni_populate_voltage_value(struct radeon_device *rdev,
1321                                      struct atom_voltage_table *table,
1322                                      u16 value,
1323                                      NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1324 {
1325         unsigned int i;
1326
1327         for (i = 0; i < table->count; i++) {
1328                 if (value <= table->entries[i].value) {
1329                         voltage->index = (u8)i;
1330                         voltage->value = cpu_to_be16(table->entries[i].value);
1331                         break;
1332                 }
1333         }
1334
1335         if (i >= table->count)
1336                 return -EINVAL;
1337
1338         return 0;
1339 }
1340
1341 static void ni_populate_mvdd_value(struct radeon_device *rdev,
1342                                    u32 mclk,
1343                                    NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1344 {
1345         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1346         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1347
1348         if (!pi->mvdd_control) {
1349                 voltage->index = eg_pi->mvdd_high_index;
1350                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1351                 return;
1352         }
1353
1354         if (mclk <= pi->mvdd_split_frequency) {
1355                 voltage->index = eg_pi->mvdd_low_index;
1356                 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1357         } else {
1358                 voltage->index = eg_pi->mvdd_high_index;
1359                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1360         }
1361 }
1362
1363 static int ni_get_std_voltage_value(struct radeon_device *rdev,
1364                                     NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1365                                     u16 *std_voltage)
1366 {
1367         if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1368             ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1369                 *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1370         else
1371                 *std_voltage = be16_to_cpu(voltage->value);
1372
1373         return 0;
1374 }
1375
1376 static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1377                                           u16 value, u8 index,
1378                                           NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1379 {
1380         voltage->index = index;
1381         voltage->value = cpu_to_be16(value);
1382 }
1383
1384 static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1385 {
1386         u32 xclk_period;
1387         u32 xclk = radeon_get_xclk(rdev);
1388         u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1389
1390         xclk_period = (1000000000UL / xclk);
1391         xclk_period /= 10000UL;
1392
1393         return tmp * xclk_period;
1394 }
1395
1396 static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1397 {
1398         return (power_in_watts * scaling_factor) << 2;
1399 }
1400
1401 static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1402                                           struct radeon_ps *radeon_state,
1403                                           u32 near_tdp_limit)
1404 {
1405         struct ni_ps *state = ni_get_ps(radeon_state);
1406         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1407         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1408         u32 power_boost_limit = 0;
1409         int ret;
1410
1411         if (ni_pi->enable_power_containment &&
1412             ni_pi->use_power_boost_limit) {
1413                 NISLANDS_SMC_VOLTAGE_VALUE vddc;
1414                 u16 std_vddc_med;
1415                 u16 std_vddc_high;
1416                 u64 tmp, n, d;
1417
1418                 if (state->performance_level_count < 3)
1419                         return 0;
1420
1421                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1422                                                 state->performance_levels[state->performance_level_count - 2].vddc,
1423                                                 &vddc);
1424                 if (ret)
1425                         return 0;
1426
1427                 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1428                 if (ret)
1429                         return 0;
1430
1431                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1432                                                 state->performance_levels[state->performance_level_count - 1].vddc,
1433                                                 &vddc);
1434                 if (ret)
1435                         return 0;
1436
1437                 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1438                 if (ret)
1439                         return 0;
1440
1441                 n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1442                 d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1443                 tmp = div64_u64(n, d);
1444
1445                 if (tmp >> 32)
1446                         return 0;
1447                 power_boost_limit = (u32)tmp;
1448         }
1449
1450         return power_boost_limit;
1451 }
1452
1453 static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1454                                             bool adjust_polarity,
1455                                             u32 tdp_adjustment,
1456                                             u32 *tdp_limit,
1457                                             u32 *near_tdp_limit)
1458 {
1459         if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1460                 return -EINVAL;
1461
1462         if (adjust_polarity) {
1463                 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1464                 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1465         } else {
1466                 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1467                 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1468         }
1469
1470         return 0;
1471 }
1472
1473 static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1474                                       struct radeon_ps *radeon_state)
1475 {
1476         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1477         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1478
1479         if (ni_pi->enable_power_containment) {
1480                 NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1481                 u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1482                 u32 tdp_limit;
1483                 u32 near_tdp_limit;
1484                 u32 power_boost_limit;
1485                 int ret;
1486
1487                 if (scaling_factor == 0)
1488                         return -EINVAL;
1489
1490                 memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1491
1492                 ret = ni_calculate_adjusted_tdp_limits(rdev,
1493                                                        false, /* ??? */
1494                                                        rdev->pm.dpm.tdp_adjustment,
1495                                                        &tdp_limit,
1496                                                        &near_tdp_limit);
1497                 if (ret)
1498                         return ret;
1499
1500                 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1501                                                                    near_tdp_limit);
1502
1503                 smc_table->dpm2Params.TDPLimit =
1504                         cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1505                 smc_table->dpm2Params.NearTDPLimit =
1506                         cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1507                 smc_table->dpm2Params.SafePowerLimit =
1508                         cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1509                                                            scaling_factor));
1510                 smc_table->dpm2Params.PowerBoostLimit =
1511                         cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1512
1513                 ret = rv770_copy_bytes_to_smc(rdev,
1514                                               (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1515                                                     offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1516                                               (u8 *)(&smc_table->dpm2Params.TDPLimit),
1517                                               sizeof(u32) * 4, pi->sram_end);
1518                 if (ret)
1519                         return ret;
1520         }
1521
1522         return 0;
1523 }
1524
1525 int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1526                                 u32 arb_freq_src, u32 arb_freq_dest)
1527 {
1528         u32 mc_arb_dram_timing;
1529         u32 mc_arb_dram_timing2;
1530         u32 burst_time;
1531         u32 mc_cg_config;
1532
1533         switch (arb_freq_src) {
1534         case MC_CG_ARB_FREQ_F0:
1535                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
1536                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1537                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1538                 break;
1539         case MC_CG_ARB_FREQ_F1:
1540                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
1541                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1542                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1543                 break;
1544         case MC_CG_ARB_FREQ_F2:
1545                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
1546                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1547                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1548                 break;
1549         case MC_CG_ARB_FREQ_F3:
1550                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
1551                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1552                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1553                 break;
1554         default:
1555                 return -EINVAL;
1556         }
1557
1558         switch (arb_freq_dest) {
1559         case MC_CG_ARB_FREQ_F0:
1560                 WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1561                 WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1562                 WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1563                 break;
1564         case MC_CG_ARB_FREQ_F1:
1565                 WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1566                 WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1567                 WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1568                 break;
1569         case MC_CG_ARB_FREQ_F2:
1570                 WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1571                 WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1572                 WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1573                 break;
1574         case MC_CG_ARB_FREQ_F3:
1575                 WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1576                 WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1577                 WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1578                 break;
1579         default:
1580                 return -EINVAL;
1581         }
1582
1583         mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1584         WREG32(MC_CG_CONFIG, mc_cg_config);
1585         WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1586
1587         return 0;
1588 }
1589
1590 static int ni_init_arb_table_index(struct radeon_device *rdev)
1591 {
1592         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1593         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1594         u32 tmp;
1595         int ret;
1596
1597         ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1598                                         &tmp, pi->sram_end);
1599         if (ret)
1600                 return ret;
1601
1602         tmp &= 0x00FFFFFF;
1603         tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1604
1605         return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1606                                           tmp, pi->sram_end);
1607 }
1608
1609 static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1610 {
1611         return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1612 }
1613
1614 static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1615 {
1616         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1617         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1618         u32 tmp;
1619         int ret;
1620
1621         ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1622                                         &tmp, pi->sram_end);
1623         if (ret)
1624                 return ret;
1625
1626         tmp = (tmp >> 24) & 0xff;
1627
1628         if (tmp == MC_CG_ARB_FREQ_F0)
1629                 return 0;
1630
1631         return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1632 }
1633
1634 static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1635                                                 struct rv7xx_pl *pl,
1636                                                 SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1637 {
1638         u32 dram_timing;
1639         u32 dram_timing2;
1640
1641         arb_regs->mc_arb_rfsh_rate =
1642                 (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1643
1644
1645         radeon_atom_set_engine_dram_timings(rdev,
1646                                             pl->sclk,
1647                                             pl->mclk);
1648
1649         dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1650         dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1651
1652         arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
1653         arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1654
1655         return 0;
1656 }
1657
1658 static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1659                                                   struct radeon_ps *radeon_state,
1660                                                   unsigned int first_arb_set)
1661 {
1662         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1663         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1664         struct ni_ps *state = ni_get_ps(radeon_state);
1665         SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1666         int i, ret = 0;
1667
1668         for (i = 0; i < state->performance_level_count; i++) {
1669                 ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1670                 if (ret)
1671                         break;
1672
1673                 ret = rv770_copy_bytes_to_smc(rdev,
1674                                               (u16)(ni_pi->arb_table_start +
1675                                                     offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1676                                                     sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1677                                               (u8 *)&arb_regs,
1678                                               (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1679                                               pi->sram_end);
1680                 if (ret)
1681                         break;
1682         }
1683         return ret;
1684 }
1685
1686 static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1687                                                struct radeon_ps *radeon_new_state)
1688 {
1689         return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1690                                                       NISLANDS_DRIVER_STATE_ARB_INDEX);
1691 }
1692
1693 static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1694                                            struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1695 {
1696         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1697
1698         voltage->index = eg_pi->mvdd_high_index;
1699         voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1700 }
1701
1702 static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1703                                          struct radeon_ps *radeon_initial_state,
1704                                          NISLANDS_SMC_STATETABLE *table)
1705 {
1706         struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1707         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1708         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1709         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1710         u32 reg;
1711         int ret;
1712
1713         table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1714                 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1715         table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1716                 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1717         table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1718                 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1719         table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1720                 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1721         table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1722                 cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1723         table->initialState.levels[0].mclk.vDLL_CNTL =
1724                 cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1725         table->initialState.levels[0].mclk.vMPLL_SS =
1726                 cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1727         table->initialState.levels[0].mclk.vMPLL_SS2 =
1728                 cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1729         table->initialState.levels[0].mclk.mclk_value =
1730                 cpu_to_be32(initial_state->performance_levels[0].mclk);
1731
1732         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1733                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1734         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1735                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1736         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1737                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1738         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1739                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1740         table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1741                 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1742         table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1743                 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1744         table->initialState.levels[0].sclk.sclk_value =
1745                 cpu_to_be32(initial_state->performance_levels[0].sclk);
1746         table->initialState.levels[0].arbRefreshState =
1747                 NISLANDS_INITIAL_STATE_ARB_INDEX;
1748
1749         table->initialState.levels[0].ACIndex = 0;
1750
1751         ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1752                                         initial_state->performance_levels[0].vddc,
1753                                         &table->initialState.levels[0].vddc);
1754         if (!ret) {
1755                 u16 std_vddc;
1756
1757                 ret = ni_get_std_voltage_value(rdev,
1758                                                &table->initialState.levels[0].vddc,
1759                                                &std_vddc);
1760                 if (!ret)
1761                         ni_populate_std_voltage_value(rdev, std_vddc,
1762                                                       table->initialState.levels[0].vddc.index,
1763                                                       &table->initialState.levels[0].std_vddc);
1764         }
1765
1766         if (eg_pi->vddci_control)
1767                 ni_populate_voltage_value(rdev,
1768                                           &eg_pi->vddci_voltage_table,
1769                                           initial_state->performance_levels[0].vddci,
1770                                           &table->initialState.levels[0].vddci);
1771
1772         ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1773
1774         reg = CG_R(0xffff) | CG_L(0);
1775         table->initialState.levels[0].aT = cpu_to_be32(reg);
1776
1777         table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1778
1779         if (pi->boot_in_gen2)
1780                 table->initialState.levels[0].gen2PCIE = 1;
1781         else
1782                 table->initialState.levels[0].gen2PCIE = 0;
1783
1784         if (pi->mem_gddr5) {
1785                 table->initialState.levels[0].strobeMode =
1786                         cypress_get_strobe_mode_settings(rdev,
1787                                                          initial_state->performance_levels[0].mclk);
1788
1789                 if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1790                         table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1791                 else
1792                         table->initialState.levels[0].mcFlags =  0;
1793         }
1794
1795         table->initialState.levelCount = 1;
1796
1797         table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1798
1799         table->initialState.levels[0].dpm2.MaxPS = 0;
1800         table->initialState.levels[0].dpm2.NearTDPDec = 0;
1801         table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1802         table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1803
1804         reg = MIN_POWER_MASK | MAX_POWER_MASK;
1805         table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1806
1807         reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1808         table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1809
1810         return 0;
1811 }
1812
1813 static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1814                                       NISLANDS_SMC_STATETABLE *table)
1815 {
1816         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1817         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1818         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1819         u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
1820         u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1821         u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
1822         u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1823         u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
1824         u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
1825         u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
1826         u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
1827         u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1828         u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
1829         u32 reg;
1830         int ret;
1831
1832         table->ACPIState = table->initialState;
1833
1834         table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1835
1836         if (pi->acpi_vddc) {
1837                 ret = ni_populate_voltage_value(rdev,
1838                                                 &eg_pi->vddc_voltage_table,
1839                                                 pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1840                 if (!ret) {
1841                         u16 std_vddc;
1842
1843                         ret = ni_get_std_voltage_value(rdev,
1844                                                        &table->ACPIState.levels[0].vddc, &std_vddc);
1845                         if (!ret)
1846                                 ni_populate_std_voltage_value(rdev, std_vddc,
1847                                                               table->ACPIState.levels[0].vddc.index,
1848                                                               &table->ACPIState.levels[0].std_vddc);
1849                 }
1850
1851                 if (pi->pcie_gen2) {
1852                         if (pi->acpi_pcie_gen2)
1853                                 table->ACPIState.levels[0].gen2PCIE = 1;
1854                         else
1855                                 table->ACPIState.levels[0].gen2PCIE = 0;
1856                 } else {
1857                         table->ACPIState.levels[0].gen2PCIE = 0;
1858                 }
1859         } else {
1860                 ret = ni_populate_voltage_value(rdev,
1861                                                 &eg_pi->vddc_voltage_table,
1862                                                 pi->min_vddc_in_table,
1863                                                 &table->ACPIState.levels[0].vddc);
1864                 if (!ret) {
1865                         u16 std_vddc;
1866
1867                         ret = ni_get_std_voltage_value(rdev,
1868                                                        &table->ACPIState.levels[0].vddc,
1869                                                        &std_vddc);
1870                         if (!ret)
1871                                 ni_populate_std_voltage_value(rdev, std_vddc,
1872                                                               table->ACPIState.levels[0].vddc.index,
1873                                                               &table->ACPIState.levels[0].std_vddc);
1874                 }
1875                 table->ACPIState.levels[0].gen2PCIE = 0;
1876         }
1877
1878         if (eg_pi->acpi_vddci) {
1879                 if (eg_pi->vddci_control)
1880                         ni_populate_voltage_value(rdev,
1881                                                   &eg_pi->vddci_voltage_table,
1882                                                   eg_pi->acpi_vddci,
1883                                                   &table->ACPIState.levels[0].vddci);
1884         }
1885
1886
1887         mpll_ad_func_cntl &= ~PDNB;
1888
1889         mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1890
1891         if (pi->mem_gddr5)
1892                 mpll_dq_func_cntl &= ~PDNB;
1893         mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1894
1895
1896         mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1897                              MRDCKA1_RESET |
1898                              MRDCKB0_RESET |
1899                              MRDCKB1_RESET |
1900                              MRDCKC0_RESET |
1901                              MRDCKC1_RESET |
1902                              MRDCKD0_RESET |
1903                              MRDCKD1_RESET);
1904
1905         mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1906                               MRDCKA1_PDNB |
1907                               MRDCKB0_PDNB |
1908                               MRDCKB1_PDNB |
1909                               MRDCKC0_PDNB |
1910                               MRDCKC1_PDNB |
1911                               MRDCKD0_PDNB |
1912                               MRDCKD1_PDNB);
1913
1914         dll_cntl |= (MRDCKA0_BYPASS |
1915                      MRDCKA1_BYPASS |
1916                      MRDCKB0_BYPASS |
1917                      MRDCKB1_BYPASS |
1918                      MRDCKC0_BYPASS |
1919                      MRDCKC1_BYPASS |
1920                      MRDCKD0_BYPASS |
1921                      MRDCKD1_BYPASS);
1922
1923         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1924         spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1925
1926         table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1927         table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1928         table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1929         table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1930         table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1931         table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1932
1933         table->ACPIState.levels[0].mclk.mclk_value = 0;
1934
1935         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1936         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1937         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1938         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1939
1940         table->ACPIState.levels[0].sclk.sclk_value = 0;
1941
1942         ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1943
1944         if (eg_pi->dynamic_ac_timing)
1945                 table->ACPIState.levels[0].ACIndex = 1;
1946
1947         table->ACPIState.levels[0].dpm2.MaxPS = 0;
1948         table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1949         table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1950         table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1951
1952         reg = MIN_POWER_MASK | MAX_POWER_MASK;
1953         table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1954
1955         reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1956         table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1957
1958         return 0;
1959 }
1960
1961 static int ni_init_smc_table(struct radeon_device *rdev)
1962 {
1963         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1964         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1965         int ret;
1966         struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1967         NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1968
1969         memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1970
1971         ni_populate_smc_voltage_tables(rdev, table);
1972
1973         switch (rdev->pm.int_thermal_type) {
1974         case THERMAL_TYPE_NI:
1975         case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1976                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1977                 break;
1978         case THERMAL_TYPE_NONE:
1979                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1980                 break;
1981         default:
1982                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1983                 break;
1984         }
1985
1986         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1987                 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1988
1989         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1990                 table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1991
1992         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1993                 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1994
1995         if (pi->mem_gddr5)
1996                 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1997
1998         ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1999         if (ret)
2000                 return ret;
2001
2002         ret = ni_populate_smc_acpi_state(rdev, table);
2003         if (ret)
2004                 return ret;
2005
2006         table->driverState = table->initialState;
2007
2008         table->ULVState = table->initialState;
2009
2010         ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
2011                                                      NISLANDS_INITIAL_STATE_ARB_INDEX);
2012         if (ret)
2013                 return ret;
2014
2015         return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
2016                                        sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
2017 }
2018
2019 static int ni_calculate_sclk_params(struct radeon_device *rdev,
2020                                     u32 engine_clock,
2021                                     NISLANDS_SMC_SCLK_VALUE *sclk)
2022 {
2023         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2024         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2025         struct atom_clock_dividers dividers;
2026         u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
2027         u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
2028         u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2029         u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2030         u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2031         u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2032         u64 tmp;
2033         u32 reference_clock = rdev->clock.spll.reference_freq;
2034         u32 reference_divider;
2035         u32 fbdiv;
2036         int ret;
2037
2038         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2039                                              engine_clock, false, &dividers);
2040         if (ret)
2041                 return ret;
2042
2043         reference_divider = 1 + dividers.ref_div;
2044
2045
2046         tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2047         do_div(tmp, reference_clock);
2048         fbdiv = (u32) tmp;
2049
2050         spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2051         spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2052         spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2053
2054         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2055         spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2056
2057         spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2058         spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2059         spll_func_cntl_3 |= SPLL_DITHEN;
2060
2061         if (pi->sclk_ss) {
2062                 struct radeon_atom_ss ss;
2063                 u32 vco_freq = engine_clock * dividers.post_div;
2064
2065                 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2066                                                      ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2067                         u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2068                         u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2069
2070                         cg_spll_spread_spectrum &= ~CLK_S_MASK;
2071                         cg_spll_spread_spectrum |= CLK_S(clk_s);
2072                         cg_spll_spread_spectrum |= SSEN;
2073
2074                         cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2075                         cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2076                 }
2077         }
2078
2079         sclk->sclk_value = engine_clock;
2080         sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2081         sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2082         sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2083         sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2084         sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2085         sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2086
2087         return 0;
2088 }
2089
2090 static int ni_populate_sclk_value(struct radeon_device *rdev,
2091                                   u32 engine_clock,
2092                                   NISLANDS_SMC_SCLK_VALUE *sclk)
2093 {
2094         NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2095         int ret;
2096
2097         ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2098         if (!ret) {
2099                 sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2100                 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2101                 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2102                 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2103                 sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2104                 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2105                 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2106         }
2107
2108         return ret;
2109 }
2110
2111 static int ni_init_smc_spll_table(struct radeon_device *rdev)
2112 {
2113         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2114         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2115         SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2116         NISLANDS_SMC_SCLK_VALUE sclk_params;
2117         u32 fb_div;
2118         u32 p_div;
2119         u32 clk_s;
2120         u32 clk_v;
2121         u32 sclk = 0;
2122         int i, ret;
2123         u32 tmp;
2124
2125         if (ni_pi->spll_table_start == 0)
2126                 return -EINVAL;
2127
2128         spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2129         if (spll_table == NULL)
2130                 return -ENOMEM;
2131
2132         for (i = 0; i < 256; i++) {
2133                 ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2134                 if (ret)
2135                         break;
2136
2137                 p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2138                 fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2139                 clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2140                 clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2141
2142                 fb_div &= ~0x00001FFF;
2143                 fb_div >>= 1;
2144                 clk_v >>= 6;
2145
2146                 if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2147                         ret = -EINVAL;
2148
2149                 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2150                         ret = -EINVAL;
2151
2152                 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2153                         ret = -EINVAL;
2154
2155                 if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2156                         ret = -EINVAL;
2157
2158                 if (ret)
2159                         break;
2160
2161                 tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2162                         ((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2163                 spll_table->freq[i] = cpu_to_be32(tmp);
2164
2165                 tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2166                         ((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2167                 spll_table->ss[i] = cpu_to_be32(tmp);
2168
2169                 sclk += 512;
2170         }
2171
2172         if (!ret)
2173                 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2174                                               sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2175
2176         kfree(spll_table);
2177
2178         return ret;
2179 }
2180
2181 static int ni_populate_mclk_value(struct radeon_device *rdev,
2182                                   u32 engine_clock,
2183                                   u32 memory_clock,
2184                                   NISLANDS_SMC_MCLK_VALUE *mclk,
2185                                   bool strobe_mode,
2186                                   bool dll_state_on)
2187 {
2188         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2189         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2190         u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2191         u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2192         u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2193         u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2194         u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2195         u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2196         u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2197         u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2198         struct atom_clock_dividers dividers;
2199         u32 ibias;
2200         u32 dll_speed;
2201         int ret;
2202         u32 mc_seq_misc7;
2203
2204         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2205                                              memory_clock, strobe_mode, &dividers);
2206         if (ret)
2207                 return ret;
2208
2209         if (!strobe_mode) {
2210                 mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2211
2212                 if (mc_seq_misc7 & 0x8000000)
2213                         dividers.post_div = 1;
2214         }
2215
2216         ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2217
2218         mpll_ad_func_cntl &= ~(CLKR_MASK |
2219                                YCLK_POST_DIV_MASK |
2220                                CLKF_MASK |
2221                                CLKFRAC_MASK |
2222                                IBIAS_MASK);
2223         mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2224         mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2225         mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2226         mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2227         mpll_ad_func_cntl |= IBIAS(ibias);
2228
2229         if (dividers.vco_mode)
2230                 mpll_ad_func_cntl_2 |= VCO_MODE;
2231         else
2232                 mpll_ad_func_cntl_2 &= ~VCO_MODE;
2233
2234         if (pi->mem_gddr5) {
2235                 mpll_dq_func_cntl &= ~(CLKR_MASK |
2236                                        YCLK_POST_DIV_MASK |
2237                                        CLKF_MASK |
2238                                        CLKFRAC_MASK |
2239                                        IBIAS_MASK);
2240                 mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2241                 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2242                 mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2243                 mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2244                 mpll_dq_func_cntl |= IBIAS(ibias);
2245
2246                 if (strobe_mode)
2247                         mpll_dq_func_cntl &= ~PDNB;
2248                 else
2249                         mpll_dq_func_cntl |= PDNB;
2250
2251                 if (dividers.vco_mode)
2252                         mpll_dq_func_cntl_2 |= VCO_MODE;
2253                 else
2254                         mpll_dq_func_cntl_2 &= ~VCO_MODE;
2255         }
2256
2257         if (pi->mclk_ss) {
2258                 struct radeon_atom_ss ss;
2259                 u32 vco_freq = memory_clock * dividers.post_div;
2260
2261                 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2262                                                      ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2263                         u32 reference_clock = rdev->clock.mpll.reference_freq;
2264                         u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2265                         u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2266                         u32 clk_v = ss.percentage *
2267                                 (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2268
2269                         mpll_ss1 &= ~CLKV_MASK;
2270                         mpll_ss1 |= CLKV(clk_v);
2271
2272                         mpll_ss2 &= ~CLKS_MASK;
2273                         mpll_ss2 |= CLKS(clk_s);
2274                 }
2275         }
2276
2277         dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2278                                         memory_clock);
2279
2280         mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2281         mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2282         if (dll_state_on)
2283                 mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2284                                      MRDCKA1_PDNB |
2285                                      MRDCKB0_PDNB |
2286                                      MRDCKB1_PDNB |
2287                                      MRDCKC0_PDNB |
2288                                      MRDCKC1_PDNB |
2289                                      MRDCKD0_PDNB |
2290                                      MRDCKD1_PDNB);
2291         else
2292                 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2293                                       MRDCKA1_PDNB |
2294                                       MRDCKB0_PDNB |
2295                                       MRDCKB1_PDNB |
2296                                       MRDCKC0_PDNB |
2297                                       MRDCKC1_PDNB |
2298                                       MRDCKD0_PDNB |
2299                                       MRDCKD1_PDNB);
2300
2301
2302         mclk->mclk_value = cpu_to_be32(memory_clock);
2303         mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2304         mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2305         mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2306         mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2307         mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2308         mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2309         mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2310         mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2311
2312         return 0;
2313 }
2314
2315 static void ni_populate_smc_sp(struct radeon_device *rdev,
2316                                struct radeon_ps *radeon_state,
2317                                NISLANDS_SMC_SWSTATE *smc_state)
2318 {
2319         struct ni_ps *ps = ni_get_ps(radeon_state);
2320         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2321         int i;
2322
2323         for (i = 0; i < ps->performance_level_count - 1; i++)
2324                 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2325
2326         smc_state->levels[ps->performance_level_count - 1].bSP =
2327                 cpu_to_be32(pi->psp);
2328 }
2329
2330 static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2331                                          struct rv7xx_pl *pl,
2332                                          NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2333 {
2334         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2335         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2336         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2337         int ret;
2338         bool dll_state_on;
2339         u16 std_vddc;
2340         u32 tmp = RREG32(DC_STUTTER_CNTL);
2341
2342         level->gen2PCIE = pi->pcie_gen2 ?
2343                 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2344
2345         ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2346         if (ret)
2347                 return ret;
2348
2349         level->mcFlags =  0;
2350         if (pi->mclk_stutter_mode_threshold &&
2351             (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2352             !eg_pi->uvd_enabled &&
2353             (tmp & DC_STUTTER_ENABLE_A) &&
2354             (tmp & DC_STUTTER_ENABLE_B))
2355                 level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2356
2357         if (pi->mem_gddr5) {
2358                 if (pl->mclk > pi->mclk_edc_enable_threshold)
2359                         level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2360                 if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2361                         level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2362
2363                 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2364
2365                 if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2366                         if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2367                             ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2368                                 dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2369                         else
2370                                 dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2371                 } else {
2372                         dll_state_on = false;
2373                         if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2374                                 level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2375                 }
2376
2377                 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2378                                              &level->mclk,
2379                                              (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2380                                              dll_state_on);
2381         } else
2382                 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2383
2384         if (ret)
2385                 return ret;
2386
2387         ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2388                                         pl->vddc, &level->vddc);
2389         if (ret)
2390                 return ret;
2391
2392         ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2393         if (ret)
2394                 return ret;
2395
2396         ni_populate_std_voltage_value(rdev, std_vddc,
2397                                       level->vddc.index, &level->std_vddc);
2398
2399         if (eg_pi->vddci_control) {
2400                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2401                                                 pl->vddci, &level->vddci);
2402                 if (ret)
2403                         return ret;
2404         }
2405
2406         ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2407
2408         return ret;
2409 }
2410
2411 static int ni_populate_smc_t(struct radeon_device *rdev,
2412                              struct radeon_ps *radeon_state,
2413                              NISLANDS_SMC_SWSTATE *smc_state)
2414 {
2415         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2416         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2417         struct ni_ps *state = ni_get_ps(radeon_state);
2418         u32 a_t;
2419         u32 t_l, t_h;
2420         u32 high_bsp;
2421         int i, ret;
2422
2423         if (state->performance_level_count >= 9)
2424                 return -EINVAL;
2425
2426         if (state->performance_level_count < 2) {
2427                 a_t = CG_R(0xffff) | CG_L(0);
2428                 smc_state->levels[0].aT = cpu_to_be32(a_t);
2429                 return 0;
2430         }
2431
2432         smc_state->levels[0].aT = cpu_to_be32(0);
2433
2434         for (i = 0; i <= state->performance_level_count - 2; i++) {
2435                 if (eg_pi->uvd_enabled)
2436                         ret = r600_calculate_at(
2437                                 1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2438                                 100 * R600_AH_DFLT,
2439                                 state->performance_levels[i + 1].sclk,
2440                                 state->performance_levels[i].sclk,
2441                                 &t_l,
2442                                 &t_h);
2443                 else
2444                         ret = r600_calculate_at(
2445                                 1000 * (i + 1),
2446                                 100 * R600_AH_DFLT,
2447                                 state->performance_levels[i + 1].sclk,
2448                                 state->performance_levels[i].sclk,
2449                                 &t_l,
2450                                 &t_h);
2451
2452                 if (ret) {
2453                         t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2454                         t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2455                 }
2456
2457                 a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2458                 a_t |= CG_R(t_l * pi->bsp / 20000);
2459                 smc_state->levels[i].aT = cpu_to_be32(a_t);
2460
2461                 high_bsp = (i == state->performance_level_count - 2) ?
2462                         pi->pbsp : pi->bsp;
2463
2464                 a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2465                 smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2466         }
2467
2468         return 0;
2469 }
2470
2471 static int ni_populate_power_containment_values(struct radeon_device *rdev,
2472                                                 struct radeon_ps *radeon_state,
2473                                                 NISLANDS_SMC_SWSTATE *smc_state)
2474 {
2475         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2476         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2477         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2478         struct ni_ps *state = ni_get_ps(radeon_state);
2479         u32 prev_sclk;
2480         u32 max_sclk;
2481         u32 min_sclk;
2482         int i, ret;
2483         u32 tdp_limit;
2484         u32 near_tdp_limit;
2485         u32 power_boost_limit;
2486         u8 max_ps_percent;
2487
2488         if (ni_pi->enable_power_containment == false)
2489                 return 0;
2490
2491         if (state->performance_level_count == 0)
2492                 return -EINVAL;
2493
2494         if (smc_state->levelCount != state->performance_level_count)
2495                 return -EINVAL;
2496
2497         ret = ni_calculate_adjusted_tdp_limits(rdev,
2498                                                false, /* ??? */
2499                                                rdev->pm.dpm.tdp_adjustment,
2500                                                &tdp_limit,
2501                                                &near_tdp_limit);
2502         if (ret)
2503                 return ret;
2504
2505         power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2506
2507         ret = rv770_write_smc_sram_dword(rdev,
2508                                          pi->state_table_start +
2509                                          offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2510                                          offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2511                                          ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2512                                          pi->sram_end);
2513         if (ret)
2514                 power_boost_limit = 0;
2515
2516         smc_state->levels[0].dpm2.MaxPS = 0;
2517         smc_state->levels[0].dpm2.NearTDPDec = 0;
2518         smc_state->levels[0].dpm2.AboveSafeInc = 0;
2519         smc_state->levels[0].dpm2.BelowSafeInc = 0;
2520         smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2521
2522         for (i = 1; i < state->performance_level_count; i++) {
2523                 prev_sclk = state->performance_levels[i-1].sclk;
2524                 max_sclk  = state->performance_levels[i].sclk;
2525                 max_ps_percent = (i != (state->performance_level_count - 1)) ?
2526                         NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2527
2528                 if (max_sclk < prev_sclk)
2529                         return -EINVAL;
2530
2531                 if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2532                         min_sclk = max_sclk;
2533                 else if (1 == i)
2534                         min_sclk = prev_sclk;
2535                 else
2536                         min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2537
2538                 if (min_sclk < state->performance_levels[0].sclk)
2539                         min_sclk = state->performance_levels[0].sclk;
2540
2541                 if (min_sclk == 0)
2542                         return -EINVAL;
2543
2544                 smc_state->levels[i].dpm2.MaxPS =
2545                         (u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2546                 smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2547                 smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2548                 smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2549                 smc_state->levels[i].stateFlags |=
2550                         ((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2551                         PPSMC_STATEFLAG_POWERBOOST : 0;
2552         }
2553
2554         return 0;
2555 }
2556
2557 static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2558                                          struct radeon_ps *radeon_state,
2559                                          NISLANDS_SMC_SWSTATE *smc_state)
2560 {
2561         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2562         struct ni_ps *state = ni_get_ps(radeon_state);
2563         u32 sq_power_throttle;
2564         u32 sq_power_throttle2;
2565         bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2566         int i;
2567
2568         if (state->performance_level_count == 0)
2569                 return -EINVAL;
2570
2571         if (smc_state->levelCount != state->performance_level_count)
2572                 return -EINVAL;
2573
2574         if (rdev->pm.dpm.sq_ramping_threshold == 0)
2575                 return -EINVAL;
2576
2577         if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2578                 enable_sq_ramping = false;
2579
2580         if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2581                 enable_sq_ramping = false;
2582
2583         if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2584                 enable_sq_ramping = false;
2585
2586         if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2587                 enable_sq_ramping = false;
2588
2589         if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO <= (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2590                 enable_sq_ramping = false;
2591
2592         for (i = 0; i < state->performance_level_count; i++) {
2593                 sq_power_throttle  = 0;
2594                 sq_power_throttle2 = 0;
2595
2596                 if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2597                     enable_sq_ramping) {
2598                         sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2599                         sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2600                         sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2601                         sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2602                         sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2603                 } else {
2604                         sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2605                         sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2606                 }
2607
2608                 smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
2609                 smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2610         }
2611
2612         return 0;
2613 }
2614
2615 static int ni_enable_power_containment(struct radeon_device *rdev,
2616                                        struct radeon_ps *radeon_new_state,
2617                                        bool enable)
2618 {
2619         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2620         PPSMC_Result smc_result;
2621         int ret = 0;
2622
2623         if (ni_pi->enable_power_containment) {
2624                 if (enable) {
2625                         if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2626                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2627                                 if (smc_result != PPSMC_Result_OK) {
2628                                         ret = -EINVAL;
2629                                         ni_pi->pc_enabled = false;
2630                                 } else {
2631                                         ni_pi->pc_enabled = true;
2632                                 }
2633                         }
2634                 } else {
2635                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2636                         if (smc_result != PPSMC_Result_OK)
2637                                 ret = -EINVAL;
2638                         ni_pi->pc_enabled = false;
2639                 }
2640         }
2641
2642         return ret;
2643 }
2644
2645 static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2646                                          struct radeon_ps *radeon_state,
2647                                          NISLANDS_SMC_SWSTATE *smc_state)
2648 {
2649         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2650         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2651         struct ni_ps *state = ni_get_ps(radeon_state);
2652         int i, ret;
2653         u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2654
2655         if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2656                 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2657
2658         smc_state->levelCount = 0;
2659
2660         if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2661                 return -EINVAL;
2662
2663         for (i = 0; i < state->performance_level_count; i++) {
2664                 ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2665                                                     &smc_state->levels[i]);
2666                 smc_state->levels[i].arbRefreshState =
2667                         (u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2668
2669                 if (ret)
2670                         return ret;
2671
2672                 if (ni_pi->enable_power_containment)
2673                         smc_state->levels[i].displayWatermark =
2674                                 (state->performance_levels[i].sclk < threshold) ?
2675                                 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2676                 else
2677                         smc_state->levels[i].displayWatermark = (i < 2) ?
2678                                 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2679
2680                 if (eg_pi->dynamic_ac_timing)
2681                         smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2682                 else
2683                         smc_state->levels[i].ACIndex = 0;
2684
2685                 smc_state->levelCount++;
2686         }
2687
2688         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2689                                       cpu_to_be32(threshold / 512));
2690
2691         ni_populate_smc_sp(rdev, radeon_state, smc_state);
2692
2693         ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2694         if (ret)
2695                 ni_pi->enable_power_containment = false;
2696
2697         ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2698         if (ret)
2699                 ni_pi->enable_sq_ramping = false;
2700
2701         return ni_populate_smc_t(rdev, radeon_state, smc_state);
2702 }
2703
2704 static int ni_upload_sw_state(struct radeon_device *rdev,
2705                               struct radeon_ps *radeon_new_state)
2706 {
2707         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2708         u16 address = pi->state_table_start +
2709                 offsetof(NISLANDS_SMC_STATETABLE, driverState);
2710         u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2711                 ((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2712         int ret;
2713         NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2714
2715         if (smc_state == NULL)
2716                 return -ENOMEM;
2717
2718         ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2719         if (ret)
2720                 goto done;
2721
2722         ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2723
2724 done:
2725         kfree(smc_state);
2726
2727         return ret;
2728 }
2729
2730 static int ni_set_mc_special_registers(struct radeon_device *rdev,
2731                                        struct ni_mc_reg_table *table)
2732 {
2733         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2734         u8 i, j, k;
2735         u32 temp_reg;
2736
2737         for (i = 0, j = table->last; i < table->last; i++) {
2738                 switch (table->mc_reg_address[i].s1) {
2739                 case MC_SEQ_MISC1 >> 2:
2740                         if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2741                                 return -EINVAL;
2742                         temp_reg = RREG32(MC_PMG_CMD_EMRS);
2743                         table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2744                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2745                         for (k = 0; k < table->num_entries; k++)
2746                                 table->mc_reg_table_entry[k].mc_data[j] =
2747                                         ((temp_reg & 0xffff0000)) |
2748                                         ((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2749                         j++;
2750                         if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2751                                 return -EINVAL;
2752
2753                         temp_reg = RREG32(MC_PMG_CMD_MRS);
2754                         table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2755                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2756                         for(k = 0; k < table->num_entries; k++) {
2757                                 table->mc_reg_table_entry[k].mc_data[j] =
2758                                         (temp_reg & 0xffff0000) |
2759                                         (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2760                                 if (!pi->mem_gddr5)
2761                                         table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2762                         }
2763                         j++;
2764                         if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2765                                 return -EINVAL;
2766                         break;
2767                 case MC_SEQ_RESERVE_M >> 2:
2768                         temp_reg = RREG32(MC_PMG_CMD_MRS1);
2769                         table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2770                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2771                         for (k = 0; k < table->num_entries; k++)
2772                                 table->mc_reg_table_entry[k].mc_data[j] =
2773                                         (temp_reg & 0xffff0000) |
2774                                         (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2775                         j++;
2776                         if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2777                                 return -EINVAL;
2778                         break;
2779                 default:
2780                         break;
2781                 }
2782         }
2783
2784         table->last = j;
2785
2786         return 0;
2787 }
2788
2789 static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2790 {
2791         bool result = true;
2792
2793         switch (in_reg) {
2794         case  MC_SEQ_RAS_TIMING >> 2:
2795                 *out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2796                 break;
2797         case MC_SEQ_CAS_TIMING >> 2:
2798                 *out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2799                 break;
2800         case MC_SEQ_MISC_TIMING >> 2:
2801                 *out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2802                 break;
2803         case MC_SEQ_MISC_TIMING2 >> 2:
2804                 *out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2805                 break;
2806         case MC_SEQ_RD_CTL_D0 >> 2:
2807                 *out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2808                 break;
2809         case MC_SEQ_RD_CTL_D1 >> 2:
2810                 *out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2811                 break;
2812         case MC_SEQ_WR_CTL_D0 >> 2:
2813                 *out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2814                 break;
2815         case MC_SEQ_WR_CTL_D1 >> 2:
2816                 *out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2817                 break;
2818         case MC_PMG_CMD_EMRS >> 2:
2819                 *out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2820                 break;
2821         case MC_PMG_CMD_MRS >> 2:
2822                 *out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2823                 break;
2824         case MC_PMG_CMD_MRS1 >> 2:
2825                 *out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2826                 break;
2827         case MC_SEQ_PMG_TIMING >> 2:
2828                 *out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2829                 break;
2830         case MC_PMG_CMD_MRS2 >> 2:
2831                 *out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2832                 break;
2833         default:
2834                 result = false;
2835                 break;
2836         }
2837
2838         return result;
2839 }
2840
2841 static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2842 {
2843         u8 i, j;
2844
2845         for (i = 0; i < table->last; i++) {
2846                 for (j = 1; j < table->num_entries; j++) {
2847                         if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2848                                 table->valid_flag |= 1 << i;
2849                                 break;
2850                         }
2851                 }
2852         }
2853 }
2854
2855 static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2856 {
2857         u32 i;
2858         u16 address;
2859
2860         for (i = 0; i < table->last; i++)
2861                 table->mc_reg_address[i].s0 =
2862                         ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2863                         address : table->mc_reg_address[i].s1;
2864 }
2865
2866 static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2867                                       struct ni_mc_reg_table *ni_table)
2868 {
2869         u8 i, j;
2870
2871         if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2872                 return -EINVAL;
2873         if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2874                 return -EINVAL;
2875
2876         for (i = 0; i < table->last; i++)
2877                 ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2878         ni_table->last = table->last;
2879
2880         for (i = 0; i < table->num_entries; i++) {
2881                 ni_table->mc_reg_table_entry[i].mclk_max =
2882                         table->mc_reg_table_entry[i].mclk_max;
2883                 for (j = 0; j < table->last; j++)
2884                         ni_table->mc_reg_table_entry[i].mc_data[j] =
2885                                 table->mc_reg_table_entry[i].mc_data[j];
2886         }
2887         ni_table->num_entries = table->num_entries;
2888
2889         return 0;
2890 }
2891
2892 static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2893 {
2894         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2895         int ret;
2896         struct atom_mc_reg_table *table;
2897         struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2898         u8 module_index = rv770_get_memory_module_index(rdev);
2899
2900         table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2901         if (!table)
2902                 return -ENOMEM;
2903
2904         WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2905         WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2906         WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2907         WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2908         WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2909         WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2910         WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2911         WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2912         WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2913         WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2914         WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2915         WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2916         WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2917
2918         ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2919
2920         if (ret)
2921                 goto init_mc_done;
2922
2923         ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2924
2925         if (ret)
2926                 goto init_mc_done;
2927
2928         ni_set_s0_mc_reg_index(ni_table);
2929
2930         ret = ni_set_mc_special_registers(rdev, ni_table);
2931
2932         if (ret)
2933                 goto init_mc_done;
2934
2935         ni_set_valid_flag(ni_table);
2936
2937 init_mc_done:
2938         kfree(table);
2939
2940         return ret;
2941 }
2942
2943 static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2944                                          SMC_NIslands_MCRegisters *mc_reg_table)
2945 {
2946         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2947         u32 i, j;
2948
2949         for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2950                 if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2951                         if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2952                                 break;
2953                         mc_reg_table->address[i].s0 =
2954                                 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2955                         mc_reg_table->address[i].s1 =
2956                                 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2957                         i++;
2958                 }
2959         }
2960         mc_reg_table->last = (u8)i;
2961 }
2962
2963
2964 static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2965                                     SMC_NIslands_MCRegisterSet *data,
2966                                     u32 num_entries, u32 valid_flag)
2967 {
2968         u32 i, j;
2969
2970         for (i = 0, j = 0; j < num_entries; j++) {
2971                 if (valid_flag & (1 << j)) {
2972                         data->value[i] = cpu_to_be32(entry->mc_data[j]);
2973                         i++;
2974                 }
2975         }
2976 }
2977
2978 static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2979                                                  struct rv7xx_pl *pl,
2980                                                  SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2981 {
2982         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2983         u32 i = 0;
2984
2985         for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2986                 if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2987                         break;
2988         }
2989
2990         if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2991                 --i;
2992
2993         ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2994                                 mc_reg_table_data,
2995                                 ni_pi->mc_reg_table.last,
2996                                 ni_pi->mc_reg_table.valid_flag);
2997 }
2998
2999 static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
3000                                            struct radeon_ps *radeon_state,
3001                                            SMC_NIslands_MCRegisters *mc_reg_table)
3002 {
3003         struct ni_ps *state = ni_get_ps(radeon_state);
3004         int i;
3005
3006         for (i = 0; i < state->performance_level_count; i++) {
3007                 ni_convert_mc_reg_table_entry_to_smc(rdev,
3008                                                      &state->performance_levels[i],
3009                                                      &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
3010         }
3011 }
3012
3013 static int ni_populate_mc_reg_table(struct radeon_device *rdev,
3014                                     struct radeon_ps *radeon_boot_state)
3015 {
3016         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3017         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3018         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3019         struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
3020         SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3021
3022         memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3023
3024         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
3025
3026         ni_populate_mc_reg_addresses(rdev, mc_reg_table);
3027
3028         ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3029                                              &mc_reg_table->data[0]);
3030
3031         ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3032                                 &mc_reg_table->data[1],
3033                                 ni_pi->mc_reg_table.last,
3034                                 ni_pi->mc_reg_table.valid_flag);
3035
3036         ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3037
3038         return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3039                                        (u8 *)mc_reg_table,
3040                                        sizeof(SMC_NIslands_MCRegisters),
3041                                        pi->sram_end);
3042 }
3043
3044 static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3045                                   struct radeon_ps *radeon_new_state)
3046 {
3047         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3048         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3049         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3050         struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3051         SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3052         u16 address;
3053
3054         memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3055
3056         ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3057
3058         address = eg_pi->mc_reg_table_start +
3059                 (u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3060
3061         return rv770_copy_bytes_to_smc(rdev, address,
3062                                        (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3063                                        sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3064                                        pi->sram_end);
3065 }
3066
3067 static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3068                                                    PP_NIslands_CACTABLES *cac_tables)
3069 {
3070         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3071         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3072         u32 leakage = 0;
3073         unsigned int i, j, table_size;
3074         s32 t;
3075         u32 smc_leakage, max_leakage = 0;
3076         u32 scaling_factor;
3077
3078         table_size = eg_pi->vddc_voltage_table.count;
3079
3080         if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3081                 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3082
3083         scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3084
3085         for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3086                 for (j = 0; j < table_size; j++) {
3087                         t = (1000 * ((i + 1) * 8));
3088
3089                         if (t < ni_pi->cac_data.leakage_minimum_temperature)
3090                                 t = ni_pi->cac_data.leakage_minimum_temperature;
3091
3092                         ni_calculate_leakage_for_v_and_t(rdev,
3093                                                          &ni_pi->cac_data.leakage_coefficients,
3094                                                          eg_pi->vddc_voltage_table.entries[j].value,
3095                                                          t,
3096                                                          ni_pi->cac_data.i_leakage,
3097                                                          &leakage);
3098
3099                         smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3100                         if (smc_leakage > max_leakage)
3101                                 max_leakage = smc_leakage;
3102
3103                         cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3104                 }
3105         }
3106
3107         for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3108                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3109                         cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3110         }
3111         return 0;
3112 }
3113
3114 static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3115                                             PP_NIslands_CACTABLES *cac_tables)
3116 {
3117         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3118         struct radeon_cac_leakage_table *leakage_table =
3119                 &rdev->pm.dpm.dyn_state.cac_leakage_table;
3120         u32 i, j, table_size;
3121         u32 smc_leakage, max_leakage = 0;
3122         u32 scaling_factor;
3123
3124         if (!leakage_table)
3125                 return -EINVAL;
3126
3127         table_size = leakage_table->count;
3128
3129         if (eg_pi->vddc_voltage_table.count != table_size)
3130                 table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3131                         eg_pi->vddc_voltage_table.count : leakage_table->count;
3132
3133         if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3134                 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3135
3136         if (table_size == 0)
3137                 return -EINVAL;
3138
3139         scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3140
3141         for (j = 0; j < table_size; j++) {
3142                 smc_leakage = leakage_table->entries[j].leakage;
3143
3144                 if (smc_leakage > max_leakage)
3145                         max_leakage = smc_leakage;
3146
3147                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3148                         cac_tables->cac_lkge_lut[i][j] =
3149                                 cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3150         }
3151
3152         for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3153                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3154                         cac_tables->cac_lkge_lut[i][j] =
3155                                 cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3156         }
3157         return 0;
3158 }
3159
3160 static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3161 {
3162         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3163         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3164         PP_NIslands_CACTABLES *cac_tables = NULL;
3165         int i, ret;
3166         u32 reg;
3167
3168         if (ni_pi->enable_cac == false)
3169                 return 0;
3170
3171         cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3172         if (!cac_tables)
3173                 return -ENOMEM;
3174
3175         reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3176         reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3177                 TID_UNIT(ni_pi->cac_weights->tid_unit));
3178         WREG32(CG_CAC_CTRL, reg);
3179
3180         for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3181                 ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3182
3183         for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3184                 cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3185
3186         ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3187         ni_pi->cac_data.pwr_const = 0;
3188         ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3189         ni_pi->cac_data.bif_cac_value = 0;
3190         ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3191         ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3192         ni_pi->cac_data.allow_ovrflw = 0;
3193         ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3194         ni_pi->cac_data.num_win_tdp = 0;
3195         ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3196
3197         if (ni_pi->driver_calculate_cac_leakage)
3198                 ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3199         else
3200                 ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3201
3202         if (ret)
3203                 goto done_free;
3204
3205         cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
3206         cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3207         cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3208         cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
3209         cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
3210         cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
3211         cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
3212         cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
3213         cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3214
3215         ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3216                                       sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3217
3218 done_free:
3219         if (ret) {
3220                 ni_pi->enable_cac = false;
3221                 ni_pi->enable_power_containment = false;
3222         }
3223
3224         kfree(cac_tables);
3225
3226         return 0;
3227 }
3228
3229 static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3230 {
3231         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3232         u32 reg;
3233
3234         if (!ni_pi->enable_cac ||
3235             !ni_pi->cac_configuration_required)
3236                 return 0;
3237
3238         if (ni_pi->cac_weights == NULL)
3239                 return -EINVAL;
3240
3241         reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3242                                                       WEIGHT_TCP_SIG1_MASK |
3243                                                       WEIGHT_TA_SIG_MASK);
3244         reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3245                 WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3246                 WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3247         WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3248
3249         reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3250                                                       WEIGHT_TCC_EN1_MASK |
3251                                                       WEIGHT_TCC_EN2_MASK);
3252         reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3253                 WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3254                 WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3255         WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3256
3257         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3258                                                       WEIGHT_CB_EN1_MASK |
3259                                                       WEIGHT_CB_EN2_MASK |
3260                                                       WEIGHT_CB_EN3_MASK);
3261         reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3262                 WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3263                 WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3264                 WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3265         WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3266
3267         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3268                                                       WEIGHT_DB_SIG1_MASK |
3269                                                       WEIGHT_DB_SIG2_MASK |
3270                                                       WEIGHT_DB_SIG3_MASK);
3271         reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3272                 WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3273                 WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3274                 WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3275         WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3276
3277         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3278                                                       WEIGHT_SXM_SIG1_MASK |
3279                                                       WEIGHT_SXM_SIG2_MASK |
3280                                                       WEIGHT_SXS_SIG0_MASK |
3281                                                       WEIGHT_SXS_SIG1_MASK);
3282         reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3283                 WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3284                 WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3285                 WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3286                 WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3287         WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3288
3289         reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3290                                                       WEIGHT_XBR_1_MASK |
3291                                                       WEIGHT_XBR_2_MASK |
3292                                                       WEIGHT_SPI_SIG0_MASK);
3293         reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3294                 WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3295                 WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3296                 WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3297         WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3298
3299         reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3300                                                       WEIGHT_SPI_SIG2_MASK |
3301                                                       WEIGHT_SPI_SIG3_MASK |
3302                                                       WEIGHT_SPI_SIG4_MASK |
3303                                                       WEIGHT_SPI_SIG5_MASK);
3304         reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3305                 WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3306                 WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3307                 WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3308                 WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3309         WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3310
3311         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3312                                                       WEIGHT_LDS_SIG1_MASK |
3313                                                       WEIGHT_SC_MASK);
3314         reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3315                 WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3316                 WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3317         WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3318
3319         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3320                                                       WEIGHT_CP_MASK |
3321                                                       WEIGHT_PA_SIG0_MASK |
3322                                                       WEIGHT_PA_SIG1_MASK |
3323                                                       WEIGHT_VGT_SIG0_MASK);
3324         reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3325                 WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3326                 WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3327                 WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3328                 WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3329         WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3330
3331         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3332                                                       WEIGHT_VGT_SIG2_MASK |
3333                                                       WEIGHT_DC_SIG0_MASK |
3334                                                       WEIGHT_DC_SIG1_MASK |
3335                                                       WEIGHT_DC_SIG2_MASK);
3336         reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3337                 WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3338                 WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3339                 WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3340                 WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3341         WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3342
3343         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3344                                                       WEIGHT_UVD_SIG0_MASK |
3345                                                       WEIGHT_UVD_SIG1_MASK |
3346                                                       WEIGHT_SPARE0_MASK |
3347                                                       WEIGHT_SPARE1_MASK);
3348         reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3349                 WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3350                 WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3351                 WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3352                 WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3353         WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3354
3355         reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3356                                                       WEIGHT_SQ_VSP0_MASK);
3357         reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3358                 WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3359         WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3360
3361         reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3362         reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3363         WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3364
3365         reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3366                                                         OVR_VAL_SPARE_0_MASK |
3367                                                         OVR_MODE_SPARE_1_MASK |
3368                                                         OVR_VAL_SPARE_1_MASK);
3369         reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3370                 OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3371                 OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3372                 OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3373         WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3374
3375         reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3376                                            VSP0_MASK |
3377                                            GPR_MASK);
3378         reg |= (VSP(ni_pi->cac_weights->vsp) |
3379                 VSP0(ni_pi->cac_weights->vsp0) |
3380                 GPR(ni_pi->cac_weights->gpr));
3381         WREG32(SQ_CAC_THRESHOLD, reg);
3382
3383         reg = (MCDW_WR_ENABLE |
3384                MCDX_WR_ENABLE |
3385                MCDY_WR_ENABLE |
3386                MCDZ_WR_ENABLE |
3387                INDEX(0x09D4));
3388         WREG32(MC_CG_CONFIG, reg);
3389
3390         reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3391                WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3392                ALLOW_OVERFLOW);
3393         WREG32(MC_CG_DATAPORT, reg);
3394
3395         return 0;
3396 }
3397
3398 static int ni_enable_smc_cac(struct radeon_device *rdev,
3399                              struct radeon_ps *radeon_new_state,
3400                              bool enable)
3401 {
3402         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3403         int ret = 0;
3404         PPSMC_Result smc_result;
3405
3406         if (ni_pi->enable_cac) {
3407                 if (enable) {
3408                         if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3409                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3410
3411                                 if (ni_pi->support_cac_long_term_average) {
3412                                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3413                                         if (PPSMC_Result_OK != smc_result)
3414                                                 ni_pi->support_cac_long_term_average = false;
3415                                 }
3416
3417                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3418                                 if (PPSMC_Result_OK != smc_result)
3419                                         ret = -EINVAL;
3420
3421                                 ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3422                         }
3423                 } else if (ni_pi->cac_enabled) {
3424                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3425
3426                         ni_pi->cac_enabled = false;
3427
3428                         if (ni_pi->support_cac_long_term_average) {
3429                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3430                                 if (PPSMC_Result_OK != smc_result)
3431                                         ni_pi->support_cac_long_term_average = false;
3432                         }
3433                 }
3434         }
3435
3436         return ret;
3437 }
3438
3439 static int ni_pcie_performance_request(struct radeon_device *rdev,
3440                                        u8 perf_req, bool advertise)
3441 {
3442 #if defined(CONFIG_ACPI)
3443         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3444
3445         if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3446             (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3447                 if (eg_pi->pcie_performance_request_registered == false)
3448                         radeon_acpi_pcie_notify_device_ready(rdev);
3449                 eg_pi->pcie_performance_request_registered = true;
3450                 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3451         } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3452                    eg_pi->pcie_performance_request_registered) {
3453                 eg_pi->pcie_performance_request_registered = false;
3454                 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3455         }
3456 #endif
3457         return 0;
3458 }
3459
3460 static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3461 {
3462         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3463         u32 tmp;
3464
3465         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3466
3467         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3468             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3469                 pi->pcie_gen2 = true;
3470         else
3471                 pi->pcie_gen2 = false;
3472
3473         if (!pi->pcie_gen2)
3474                 ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3475
3476         return 0;
3477 }
3478
3479 static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3480                                             bool enable)
3481 {
3482         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3483         u32 tmp, bif;
3484
3485         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3486
3487         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3488             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3489                 if (enable) {
3490                         if (!pi->boot_in_gen2) {
3491                                 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3492                                 bif |= CG_CLIENT_REQ(0xd);
3493                                 WREG32(CG_BIF_REQ_AND_RSP, bif);
3494                         }
3495                         tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3496                         tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3497                         tmp |= LC_GEN2_EN_STRAP;
3498
3499                         tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3500                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3501                         udelay(10);
3502                         tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3503                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3504                 } else {
3505                         if (!pi->boot_in_gen2) {
3506                                 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3507                                 bif |= CG_CLIENT_REQ(0xd);
3508                                 WREG32(CG_BIF_REQ_AND_RSP, bif);
3509
3510                                 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3511                                 tmp &= ~LC_GEN2_EN_STRAP;
3512                         }
3513                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3514                 }
3515         }
3516 }
3517
3518 static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3519                                         bool enable)
3520 {
3521         ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3522
3523         if (enable)
3524                 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3525         else
3526                 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3527 }
3528
3529 void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3530                                            struct radeon_ps *new_ps,
3531                                            struct radeon_ps *old_ps)
3532 {
3533         struct ni_ps *new_state = ni_get_ps(new_ps);
3534         struct ni_ps *current_state = ni_get_ps(old_ps);
3535
3536         if ((new_ps->vclk == old_ps->vclk) &&
3537             (new_ps->dclk == old_ps->dclk))
3538                 return;
3539
3540         if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3541             current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3542                 return;
3543
3544         radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3545 }
3546
3547 void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3548                                           struct radeon_ps *new_ps,
3549                                           struct radeon_ps *old_ps)
3550 {
3551         struct ni_ps *new_state = ni_get_ps(new_ps);
3552         struct ni_ps *current_state = ni_get_ps(old_ps);
3553
3554         if ((new_ps->vclk == old_ps->vclk) &&
3555             (new_ps->dclk == old_ps->dclk))
3556                 return;
3557
3558         if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3559             current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3560                 return;
3561
3562         radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3563 }
3564
3565 void ni_dpm_setup_asic(struct radeon_device *rdev)
3566 {
3567         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3568
3569         ni_read_clock_registers(rdev);
3570         btc_read_arb_registers(rdev);
3571         rv770_get_memory_type(rdev);
3572         if (eg_pi->pcie_performance_request)
3573                 ni_advertise_gen2_capability(rdev);
3574         rv770_get_pcie_gen2_status(rdev);
3575         rv770_enable_acpi_pm(rdev);
3576 }
3577
3578 void ni_update_current_ps(struct radeon_device *rdev,
3579                           struct radeon_ps *rps)
3580 {
3581         struct ni_ps *new_ps = ni_get_ps(rps);
3582         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3583         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3584
3585         eg_pi->current_rps = *rps;
3586         ni_pi->current_ps = *new_ps;
3587         eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3588 }
3589
3590 void ni_update_requested_ps(struct radeon_device *rdev,
3591                             struct radeon_ps *rps)
3592 {
3593         struct ni_ps *new_ps = ni_get_ps(rps);
3594         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3595         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3596
3597         eg_pi->requested_rps = *rps;
3598         ni_pi->requested_ps = *new_ps;
3599         eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3600 }
3601
3602 int ni_dpm_enable(struct radeon_device *rdev)
3603 {
3604         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3605         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3606         struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3607         int ret;
3608
3609         if (pi->gfx_clock_gating)
3610                 ni_cg_clockgating_default(rdev);
3611         if (btc_dpm_enabled(rdev))
3612                 return -EINVAL;
3613         if (pi->mg_clock_gating)
3614                 ni_mg_clockgating_default(rdev);
3615         if (eg_pi->ls_clock_gating)
3616                 ni_ls_clockgating_default(rdev);
3617         if (pi->voltage_control) {
3618                 rv770_enable_voltage_control(rdev, true);
3619                 ret = cypress_construct_voltage_tables(rdev);
3620                 if (ret) {
3621                         DRM_ERROR("cypress_construct_voltage_tables failed\n");
3622                         return ret;
3623                 }
3624         }
3625         if (eg_pi->dynamic_ac_timing) {
3626                 ret = ni_initialize_mc_reg_table(rdev);
3627                 if (ret)
3628                         eg_pi->dynamic_ac_timing = false;
3629         }
3630         if (pi->dynamic_ss)
3631                 cypress_enable_spread_spectrum(rdev, true);
3632         if (pi->thermal_protection)
3633                 rv770_enable_thermal_protection(rdev, true);
3634         rv770_setup_bsp(rdev);
3635         rv770_program_git(rdev);
3636         rv770_program_tp(rdev);
3637         rv770_program_tpp(rdev);
3638         rv770_program_sstp(rdev);
3639         cypress_enable_display_gap(rdev);
3640         rv770_program_vc(rdev);
3641         if (pi->dynamic_pcie_gen2)
3642                 ni_enable_dynamic_pcie_gen2(rdev, true);
3643         ret = rv770_upload_firmware(rdev);
3644         if (ret) {
3645                 DRM_ERROR("rv770_upload_firmware failed\n");
3646                 return ret;
3647         }
3648         ret = ni_process_firmware_header(rdev);
3649         if (ret) {
3650                 DRM_ERROR("ni_process_firmware_header failed\n");
3651                 return ret;
3652         }
3653         ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3654         if (ret) {
3655                 DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3656                 return ret;
3657         }
3658         ret = ni_init_smc_table(rdev);
3659         if (ret) {
3660                 DRM_ERROR("ni_init_smc_table failed\n");
3661                 return ret;
3662         }
3663         ret = ni_init_smc_spll_table(rdev);
3664         if (ret) {
3665                 DRM_ERROR("ni_init_smc_spll_table failed\n");
3666                 return ret;
3667         }
3668         ret = ni_init_arb_table_index(rdev);
3669         if (ret) {
3670                 DRM_ERROR("ni_init_arb_table_index failed\n");
3671                 return ret;
3672         }
3673         if (eg_pi->dynamic_ac_timing) {
3674                 ret = ni_populate_mc_reg_table(rdev, boot_ps);
3675                 if (ret) {
3676                         DRM_ERROR("ni_populate_mc_reg_table failed\n");
3677                         return ret;
3678                 }
3679         }
3680         ret = ni_initialize_smc_cac_tables(rdev);
3681         if (ret) {
3682                 DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3683                 return ret;
3684         }
3685         ret = ni_initialize_hardware_cac_manager(rdev);
3686         if (ret) {
3687                 DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3688                 return ret;
3689         }
3690         ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3691         if (ret) {
3692                 DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3693                 return ret;
3694         }
3695         ni_program_response_times(rdev);
3696         r7xx_start_smc(rdev);
3697         ret = cypress_notify_smc_display_change(rdev, false);
3698         if (ret) {
3699                 DRM_ERROR("cypress_notify_smc_display_change failed\n");
3700                 return ret;
3701         }
3702         cypress_enable_sclk_control(rdev, true);
3703         if (eg_pi->memory_transition)
3704                 cypress_enable_mclk_control(rdev, true);
3705         cypress_start_dpm(rdev);
3706         if (pi->gfx_clock_gating)
3707                 ni_gfx_clockgating_enable(rdev, true);
3708         if (pi->mg_clock_gating)
3709                 ni_mg_clockgating_enable(rdev, true);
3710         if (eg_pi->ls_clock_gating)
3711                 ni_ls_clockgating_enable(rdev, true);
3712
3713         rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3714
3715         ni_update_current_ps(rdev, boot_ps);
3716
3717         return 0;
3718 }
3719
3720 void ni_dpm_disable(struct radeon_device *rdev)
3721 {
3722         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3723         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3724         struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3725
3726         if (!btc_dpm_enabled(rdev))
3727                 return;
3728         rv770_clear_vc(rdev);
3729         if (pi->thermal_protection)
3730                 rv770_enable_thermal_protection(rdev, false);
3731         ni_enable_power_containment(rdev, boot_ps, false);
3732         ni_enable_smc_cac(rdev, boot_ps, false);
3733         cypress_enable_spread_spectrum(rdev, false);
3734         rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3735         if (pi->dynamic_pcie_gen2)
3736                 ni_enable_dynamic_pcie_gen2(rdev, false);
3737
3738         if (rdev->irq.installed &&
3739             r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3740                 rdev->irq.dpm_thermal = false;
3741                 radeon_irq_set(rdev);
3742         }
3743
3744         if (pi->gfx_clock_gating)
3745                 ni_gfx_clockgating_enable(rdev, false);
3746         if (pi->mg_clock_gating)
3747                 ni_mg_clockgating_enable(rdev, false);
3748         if (eg_pi->ls_clock_gating)
3749                 ni_ls_clockgating_enable(rdev, false);
3750         ni_stop_dpm(rdev);
3751         btc_reset_to_default(rdev);
3752         ni_stop_smc(rdev);
3753         ni_force_switch_to_arb_f0(rdev);
3754
3755         ni_update_current_ps(rdev, boot_ps);
3756 }
3757
3758 static int ni_power_control_set_level(struct radeon_device *rdev)
3759 {
3760         struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3761         int ret;
3762
3763         ret = ni_restrict_performance_levels_before_switch(rdev);
3764         if (ret)
3765                 return ret;
3766         ret = rv770_halt_smc(rdev);
3767         if (ret)
3768                 return ret;
3769         ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3770         if (ret)
3771                 return ret;
3772         ret = rv770_resume_smc(rdev);
3773         if (ret)
3774                 return ret;
3775         ret = rv770_set_sw_state(rdev);
3776         if (ret)
3777                 return ret;
3778
3779         return 0;
3780 }
3781
3782 int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3783 {
3784         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3785         struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3786         struct radeon_ps *new_ps = &requested_ps;
3787
3788         ni_update_requested_ps(rdev, new_ps);
3789
3790         ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3791
3792         return 0;
3793 }
3794
3795 int ni_dpm_set_power_state(struct radeon_device *rdev)
3796 {
3797         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3798         struct radeon_ps *new_ps = &eg_pi->requested_rps;
3799         struct radeon_ps *old_ps = &eg_pi->current_rps;
3800         int ret;
3801
3802         ret = ni_restrict_performance_levels_before_switch(rdev);
3803         if (ret) {
3804                 DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3805                 return ret;
3806         }
3807         ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3808         ret = ni_enable_power_containment(rdev, new_ps, false);
3809         if (ret) {
3810                 DRM_ERROR("ni_enable_power_containment failed\n");
3811                 return ret;
3812         }
3813         ret = ni_enable_smc_cac(rdev, new_ps, false);
3814         if (ret) {
3815                 DRM_ERROR("ni_enable_smc_cac failed\n");
3816                 return ret;
3817         }
3818         ret = rv770_halt_smc(rdev);
3819         if (ret) {
3820                 DRM_ERROR("rv770_halt_smc failed\n");
3821                 return ret;
3822         }
3823         if (eg_pi->smu_uvd_hs)
3824                 btc_notify_uvd_to_smc(rdev, new_ps);
3825         ret = ni_upload_sw_state(rdev, new_ps);
3826         if (ret) {
3827                 DRM_ERROR("ni_upload_sw_state failed\n");
3828                 return ret;
3829         }
3830         if (eg_pi->dynamic_ac_timing) {
3831                 ret = ni_upload_mc_reg_table(rdev, new_ps);
3832                 if (ret) {
3833                         DRM_ERROR("ni_upload_mc_reg_table failed\n");
3834                         return ret;
3835                 }
3836         }
3837         ret = ni_program_memory_timing_parameters(rdev, new_ps);
3838         if (ret) {
3839                 DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3840                 return ret;
3841         }
3842         ret = rv770_resume_smc(rdev);
3843         if (ret) {
3844                 DRM_ERROR("rv770_resume_smc failed\n");
3845                 return ret;
3846         }
3847         ret = rv770_set_sw_state(rdev);
3848         if (ret) {
3849                 DRM_ERROR("rv770_set_sw_state failed\n");
3850                 return ret;
3851         }
3852         ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3853         ret = ni_enable_smc_cac(rdev, new_ps, true);
3854         if (ret) {
3855                 DRM_ERROR("ni_enable_smc_cac failed\n");
3856                 return ret;
3857         }
3858         ret = ni_enable_power_containment(rdev, new_ps, true);
3859         if (ret) {
3860                 DRM_ERROR("ni_enable_power_containment failed\n");
3861                 return ret;
3862         }
3863
3864         /* update tdp */
3865         ret = ni_power_control_set_level(rdev);
3866         if (ret) {
3867                 DRM_ERROR("ni_power_control_set_level failed\n");
3868                 return ret;
3869         }
3870
3871         return 0;
3872 }
3873
3874 void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3875 {
3876         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3877         struct radeon_ps *new_ps = &eg_pi->requested_rps;
3878
3879         ni_update_current_ps(rdev, new_ps);
3880 }
3881
3882 void ni_dpm_reset_asic(struct radeon_device *rdev)
3883 {
3884         ni_restrict_performance_levels_before_switch(rdev);
3885         rv770_set_boot_state(rdev);
3886 }
3887
3888 union power_info {
3889         struct _ATOM_POWERPLAY_INFO info;
3890         struct _ATOM_POWERPLAY_INFO_V2 info_2;
3891         struct _ATOM_POWERPLAY_INFO_V3 info_3;
3892         struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3893         struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3894         struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3895 };
3896
3897 union pplib_clock_info {
3898         struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3899         struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3900         struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3901         struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3902 };
3903
3904 union pplib_power_state {
3905         struct _ATOM_PPLIB_STATE v1;
3906         struct _ATOM_PPLIB_STATE_V2 v2;
3907 };
3908
3909 static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3910                                           struct radeon_ps *rps,
3911                                           struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3912                                           u8 table_rev)
3913 {
3914         rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3915         rps->class = le16_to_cpu(non_clock_info->usClassification);
3916         rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3917
3918         if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3919                 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3920                 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3921         } else if (r600_is_uvd_state(rps->class, rps->class2)) {
3922                 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3923                 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3924         } else {
3925                 rps->vclk = 0;
3926                 rps->dclk = 0;
3927         }
3928
3929         if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3930                 rdev->pm.dpm.boot_ps = rps;
3931         if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3932                 rdev->pm.dpm.uvd_ps = rps;
3933 }
3934
3935 static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3936                                       struct radeon_ps *rps, int index,
3937                                       union pplib_clock_info *clock_info)
3938 {
3939         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3940         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3941         struct ni_ps *ps = ni_get_ps(rps);
3942         u16 vddc;
3943         struct rv7xx_pl *pl = &ps->performance_levels[index];
3944
3945         ps->performance_level_count = index + 1;
3946
3947         pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3948         pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3949         pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3950         pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3951
3952         pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3953         pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3954         pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3955
3956         /* patch up vddc if necessary */
3957         if (pl->vddc == 0xff01) {
3958                 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
3959                         pl->vddc = vddc;
3960         }
3961
3962         if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3963                 pi->acpi_vddc = pl->vddc;
3964                 eg_pi->acpi_vddci = pl->vddci;
3965                 if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3966                         pi->acpi_pcie_gen2 = true;
3967                 else
3968                         pi->acpi_pcie_gen2 = false;
3969         }
3970
3971         if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3972                 eg_pi->ulv.supported = true;
3973                 eg_pi->ulv.pl = pl;
3974         }
3975
3976         if (pi->min_vddc_in_table > pl->vddc)
3977                 pi->min_vddc_in_table = pl->vddc;
3978
3979         if (pi->max_vddc_in_table < pl->vddc)
3980                 pi->max_vddc_in_table = pl->vddc;
3981
3982         /* patch up boot state */
3983         if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3984                 u16 vddc, vddci, mvdd;
3985                 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3986                 pl->mclk = rdev->clock.default_mclk;
3987                 pl->sclk = rdev->clock.default_sclk;
3988                 pl->vddc = vddc;
3989                 pl->vddci = vddci;
3990         }
3991
3992         if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
3993             ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
3994                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
3995                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
3996                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
3997                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
3998         }
3999 }
4000
4001 static int ni_parse_power_table(struct radeon_device *rdev)
4002 {
4003         struct radeon_mode_info *mode_info = &rdev->mode_info;
4004         struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
4005         union pplib_power_state *power_state;
4006         int i, j;
4007         union pplib_clock_info *clock_info;
4008         union power_info *power_info;
4009         int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
4010         u16 data_offset;
4011         u8 frev, crev;
4012         struct ni_ps *ps;
4013
4014         if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
4015                                    &frev, &crev, &data_offset))
4016                 return -EINVAL;
4017         power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
4018
4019         rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
4020                                   power_info->pplib.ucNumStates, GFP_KERNEL);
4021         if (!rdev->pm.dpm.ps)
4022                 return -ENOMEM;
4023         rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
4024         rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
4025         rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
4026
4027         for (i = 0; i < power_info->pplib.ucNumStates; i++) {
4028                 power_state = (union pplib_power_state *)
4029                         (mode_info->atom_context->bios + data_offset +
4030                          le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4031                          i * power_info->pplib.ucStateEntrySize);
4032                 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4033                         (mode_info->atom_context->bios + data_offset +
4034                          le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4035                          (power_state->v1.ucNonClockStateIndex *
4036                           power_info->pplib.ucNonClockSize));
4037                 if (power_info->pplib.ucStateEntrySize - 1) {
4038                         u8 *idx;
4039                         ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4040                         if (ps == NULL) {
4041                                 kfree(rdev->pm.dpm.ps);
4042                                 return -ENOMEM;
4043                         }
4044                         rdev->pm.dpm.ps[i].ps_priv = ps;
4045                         ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4046                                                          non_clock_info,
4047                                                          power_info->pplib.ucNonClockSize);
4048                         idx = (u8 *)&power_state->v1.ucClockStateIndices[0];
4049                         for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4050                                 clock_info = (union pplib_clock_info *)
4051                                         (mode_info->atom_context->bios + data_offset +
4052                                          le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4053                                          (idx[j] * power_info->pplib.ucClockInfoSize));
4054                                 ni_parse_pplib_clock_info(rdev,
4055                                                           &rdev->pm.dpm.ps[i], j,
4056                                                           clock_info);
4057                         }
4058                 }
4059         }
4060         rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4061         return 0;
4062 }
4063
4064 int ni_dpm_init(struct radeon_device *rdev)
4065 {
4066         struct rv7xx_power_info *pi;
4067         struct evergreen_power_info *eg_pi;
4068         struct ni_power_info *ni_pi;
4069         struct atom_clock_dividers dividers;
4070         int ret;
4071
4072         ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4073         if (ni_pi == NULL)
4074                 return -ENOMEM;
4075         rdev->pm.dpm.priv = ni_pi;
4076         eg_pi = &ni_pi->eg;
4077         pi = &eg_pi->rv7xx;
4078
4079         rv770_get_max_vddc(rdev);
4080
4081         eg_pi->ulv.supported = false;
4082         pi->acpi_vddc = 0;
4083         eg_pi->acpi_vddci = 0;
4084         pi->min_vddc_in_table = 0;
4085         pi->max_vddc_in_table = 0;
4086
4087         ret = ni_parse_power_table(rdev);
4088         if (ret)
4089                 return ret;
4090         ret = r600_parse_extended_power_table(rdev);
4091         if (ret)
4092                 return ret;
4093
4094         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4095                 kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4096         if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4097                 r600_free_extended_power_table(rdev);
4098                 return -ENOMEM;
4099         }
4100         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4101         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4102         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4103         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4104         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4105         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4106         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4107         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4108         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4109
4110         ni_patch_dependency_tables_based_on_leakage(rdev);
4111
4112         if (rdev->pm.dpm.voltage_response_time == 0)
4113                 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4114         if (rdev->pm.dpm.backbias_response_time == 0)
4115                 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4116
4117         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4118                                              0, false, &dividers);
4119         if (ret)
4120                 pi->ref_div = dividers.ref_div + 1;
4121         else
4122                 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4123
4124         pi->rlp = RV770_RLP_DFLT;
4125         pi->rmp = RV770_RMP_DFLT;
4126         pi->lhp = RV770_LHP_DFLT;
4127         pi->lmp = RV770_LMP_DFLT;
4128
4129         eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4130         eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4131         eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4132         eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4133
4134         eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4135         eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4136         eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4137         eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4138
4139         eg_pi->smu_uvd_hs = true;
4140
4141         if (rdev->pdev->device == 0x6707) {
4142                 pi->mclk_strobe_mode_threshold = 55000;
4143                 pi->mclk_edc_enable_threshold = 55000;
4144                 eg_pi->mclk_edc_wr_enable_threshold = 55000;
4145         } else {
4146                 pi->mclk_strobe_mode_threshold = 40000;
4147                 pi->mclk_edc_enable_threshold = 40000;
4148                 eg_pi->mclk_edc_wr_enable_threshold = 40000;
4149         }
4150         ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4151
4152         pi->voltage_control =
4153                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4154
4155         pi->mvdd_control =
4156                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4157
4158         eg_pi->vddci_control =
4159                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4160
4161         rv770_get_engine_memory_ss(rdev);
4162
4163         pi->asi = RV770_ASI_DFLT;
4164         pi->pasi = CYPRESS_HASI_DFLT;
4165         pi->vrc = CYPRESS_VRC_DFLT;
4166
4167         pi->power_gating = false;
4168
4169         pi->gfx_clock_gating = true;
4170
4171         pi->mg_clock_gating = true;
4172         pi->mgcgtssm = true;
4173         eg_pi->ls_clock_gating = false;
4174         eg_pi->sclk_deep_sleep = false;
4175
4176         pi->dynamic_pcie_gen2 = true;
4177
4178         if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE)
4179                 pi->thermal_protection = true;
4180         else
4181                 pi->thermal_protection = false;
4182
4183         pi->display_gap = true;
4184
4185         pi->dcodt = true;
4186
4187         pi->ulps = true;
4188
4189         eg_pi->dynamic_ac_timing = true;
4190         eg_pi->abm = true;
4191         eg_pi->mcls = true;
4192         eg_pi->light_sleep = true;
4193         eg_pi->memory_transition = true;
4194 #if defined(CONFIG_ACPI)
4195         eg_pi->pcie_performance_request =
4196                 radeon_acpi_is_pcie_performance_request_supported(rdev);
4197 #else
4198         eg_pi->pcie_performance_request = false;
4199 #endif
4200
4201         eg_pi->dll_default_on = false;
4202
4203         eg_pi->sclk_deep_sleep = false;
4204
4205         pi->mclk_stutter_mode_threshold = 0;
4206
4207         pi->sram_end = SMC_RAM_END;
4208
4209         rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4210         rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4211         rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4212         rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4213         rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4214         rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4215         rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4216         rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4217
4218         ni_pi->cac_data.leakage_coefficients.at = 516;
4219         ni_pi->cac_data.leakage_coefficients.bt = 18;
4220         ni_pi->cac_data.leakage_coefficients.av = 51;
4221         ni_pi->cac_data.leakage_coefficients.bv = 2957;
4222
4223         switch (rdev->pdev->device) {
4224         case 0x6700:
4225         case 0x6701:
4226         case 0x6702:
4227         case 0x6703:
4228         case 0x6718:
4229                 ni_pi->cac_weights = &cac_weights_cayman_xt;
4230                 break;
4231         case 0x6705:
4232         case 0x6719:
4233         case 0x671D:
4234         case 0x671C:
4235         default:
4236                 ni_pi->cac_weights = &cac_weights_cayman_pro;
4237                 break;
4238         case 0x6704:
4239         case 0x6706:
4240         case 0x6707:
4241         case 0x6708:
4242         case 0x6709:
4243                 ni_pi->cac_weights = &cac_weights_cayman_le;
4244                 break;
4245         }
4246
4247         if (ni_pi->cac_weights->enable_power_containment_by_default) {
4248                 ni_pi->enable_power_containment = true;
4249                 ni_pi->enable_cac = true;
4250                 ni_pi->enable_sq_ramping = true;
4251         } else {
4252                 ni_pi->enable_power_containment = false;
4253                 ni_pi->enable_cac = false;
4254                 ni_pi->enable_sq_ramping = false;
4255         }
4256
4257         ni_pi->driver_calculate_cac_leakage = false;
4258         ni_pi->cac_configuration_required = true;
4259
4260         if (ni_pi->cac_configuration_required) {
4261                 ni_pi->support_cac_long_term_average = true;
4262                 ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4263                 ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4264         } else {
4265                 ni_pi->support_cac_long_term_average = false;
4266                 ni_pi->lta_window_size = 0;
4267                 ni_pi->lts_truncate = 0;
4268         }
4269
4270         ni_pi->use_power_boost_limit = true;
4271
4272         /* make sure dc limits are valid */
4273         if ((rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.sclk == 0) ||
4274             (rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.mclk == 0))
4275                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc =
4276                         rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
4277
4278         return 0;
4279 }
4280
4281 void ni_dpm_fini(struct radeon_device *rdev)
4282 {
4283         int i;
4284
4285         for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4286                 kfree(rdev->pm.dpm.ps[i].ps_priv);
4287         }
4288         kfree(rdev->pm.dpm.ps);
4289         kfree(rdev->pm.dpm.priv);
4290         kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4291         r600_free_extended_power_table(rdev);
4292 }
4293
4294 void ni_dpm_print_power_state(struct radeon_device *rdev,
4295                               struct radeon_ps *rps)
4296 {
4297         struct ni_ps *ps = ni_get_ps(rps);
4298         struct rv7xx_pl *pl;
4299         int i;
4300
4301         r600_dpm_print_class_info(rps->class, rps->class2);
4302         r600_dpm_print_cap_info(rps->caps);
4303         printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4304         for (i = 0; i < ps->performance_level_count; i++) {
4305                 pl = &ps->performance_levels[i];
4306                 if (rdev->family >= CHIP_TAHITI)
4307                         printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4308                                i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4309                 else
4310                         printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4311                                i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4312         }
4313         r600_dpm_print_ps_status(rdev, rps);
4314 }
4315
4316 void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4317                                                     struct seq_file *m)
4318 {
4319         struct radeon_ps *rps = rdev->pm.dpm.current_ps;
4320         struct ni_ps *ps = ni_get_ps(rps);
4321         struct rv7xx_pl *pl;
4322         u32 current_index =
4323                 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4324                 CURRENT_STATE_INDEX_SHIFT;
4325
4326         if (current_index >= ps->performance_level_count) {
4327                 seq_printf(m, "invalid dpm profile %d\n", current_index);
4328         } else {
4329                 pl = &ps->performance_levels[current_index];
4330                 seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4331                 seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4332                            current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4333         }
4334 }
4335
4336 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4337 {
4338         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4339         struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4340
4341         if (low)
4342                 return requested_state->performance_levels[0].sclk;
4343         else
4344                 return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4345 }
4346
4347 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4348 {
4349         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4350         struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4351
4352         if (low)
4353                 return requested_state->performance_levels[0].mclk;
4354         else
4355                 return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4356 }
4357