ec31a82208367c74c0fcdf9c8df394283cda1af7
[firefly-linux-kernel-4.4.55.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 static const u32 crtc_offsets[6] =
39 {
40         EVERGREEN_CRTC0_REGISTER_OFFSET,
41         EVERGREEN_CRTC1_REGISTER_OFFSET,
42         EVERGREEN_CRTC2_REGISTER_OFFSET,
43         EVERGREEN_CRTC3_REGISTER_OFFSET,
44         EVERGREEN_CRTC4_REGISTER_OFFSET,
45         EVERGREEN_CRTC5_REGISTER_OFFSET
46 };
47
48 #include "clearstate_evergreen.h"
49
50 static const u32 sumo_rlc_save_restore_register_list[] =
51 {
52         0x98fc,
53         0x9830,
54         0x9834,
55         0x9838,
56         0x9870,
57         0x9874,
58         0x8a14,
59         0x8b24,
60         0x8bcc,
61         0x8b10,
62         0x8d00,
63         0x8d04,
64         0x8c00,
65         0x8c04,
66         0x8c08,
67         0x8c0c,
68         0x8d8c,
69         0x8c20,
70         0x8c24,
71         0x8c28,
72         0x8c18,
73         0x8c1c,
74         0x8cf0,
75         0x8e2c,
76         0x8e38,
77         0x8c30,
78         0x9508,
79         0x9688,
80         0x9608,
81         0x960c,
82         0x9610,
83         0x9614,
84         0x88c4,
85         0x88d4,
86         0xa008,
87         0x900c,
88         0x9100,
89         0x913c,
90         0x98f8,
91         0x98f4,
92         0x9b7c,
93         0x3f8c,
94         0x8950,
95         0x8954,
96         0x8a18,
97         0x8b28,
98         0x9144,
99         0x9148,
100         0x914c,
101         0x3f90,
102         0x3f94,
103         0x915c,
104         0x9160,
105         0x9178,
106         0x917c,
107         0x9180,
108         0x918c,
109         0x9190,
110         0x9194,
111         0x9198,
112         0x919c,
113         0x91a8,
114         0x91ac,
115         0x91b0,
116         0x91b4,
117         0x91b8,
118         0x91c4,
119         0x91c8,
120         0x91cc,
121         0x91d0,
122         0x91d4,
123         0x91e0,
124         0x91e4,
125         0x91ec,
126         0x91f0,
127         0x91f4,
128         0x9200,
129         0x9204,
130         0x929c,
131         0x9150,
132         0x802c,
133 };
134
135 static void evergreen_gpu_init(struct radeon_device *rdev);
136 void evergreen_fini(struct radeon_device *rdev);
137 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
138 void evergreen_program_aspm(struct radeon_device *rdev);
139 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140                                      int ring, u32 cp_int_cntl);
141 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142                                    u32 status, u32 addr);
143 void cik_init_cp_pg_table(struct radeon_device *rdev);
144
145 extern u32 si_get_csb_size(struct radeon_device *rdev);
146 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
147 extern u32 cik_get_csb_size(struct radeon_device *rdev);
148 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
149 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
150
151 static const u32 evergreen_golden_registers[] =
152 {
153         0x3f90, 0xffff0000, 0xff000000,
154         0x9148, 0xffff0000, 0xff000000,
155         0x3f94, 0xffff0000, 0xff000000,
156         0x914c, 0xffff0000, 0xff000000,
157         0x9b7c, 0xffffffff, 0x00000000,
158         0x8a14, 0xffffffff, 0x00000007,
159         0x8b10, 0xffffffff, 0x00000000,
160         0x960c, 0xffffffff, 0x54763210,
161         0x88c4, 0xffffffff, 0x000000c2,
162         0x88d4, 0xffffffff, 0x00000010,
163         0x8974, 0xffffffff, 0x00000000,
164         0xc78, 0x00000080, 0x00000080,
165         0x5eb4, 0xffffffff, 0x00000002,
166         0x5e78, 0xffffffff, 0x001000f0,
167         0x6104, 0x01000300, 0x00000000,
168         0x5bc0, 0x00300000, 0x00000000,
169         0x7030, 0xffffffff, 0x00000011,
170         0x7c30, 0xffffffff, 0x00000011,
171         0x10830, 0xffffffff, 0x00000011,
172         0x11430, 0xffffffff, 0x00000011,
173         0x12030, 0xffffffff, 0x00000011,
174         0x12c30, 0xffffffff, 0x00000011,
175         0xd02c, 0xffffffff, 0x08421000,
176         0x240c, 0xffffffff, 0x00000380,
177         0x8b24, 0xffffffff, 0x00ff0fff,
178         0x28a4c, 0x06000000, 0x06000000,
179         0x10c, 0x00000001, 0x00000001,
180         0x8d00, 0xffffffff, 0x100e4848,
181         0x8d04, 0xffffffff, 0x00164745,
182         0x8c00, 0xffffffff, 0xe4000003,
183         0x8c04, 0xffffffff, 0x40600060,
184         0x8c08, 0xffffffff, 0x001c001c,
185         0x8cf0, 0xffffffff, 0x08e00620,
186         0x8c20, 0xffffffff, 0x00800080,
187         0x8c24, 0xffffffff, 0x00800080,
188         0x8c18, 0xffffffff, 0x20202078,
189         0x8c1c, 0xffffffff, 0x00001010,
190         0x28350, 0xffffffff, 0x00000000,
191         0xa008, 0xffffffff, 0x00010000,
192         0x5cc, 0xffffffff, 0x00000001,
193         0x9508, 0xffffffff, 0x00000002,
194         0x913c, 0x0000000f, 0x0000000a
195 };
196
197 static const u32 evergreen_golden_registers2[] =
198 {
199         0x2f4c, 0xffffffff, 0x00000000,
200         0x54f4, 0xffffffff, 0x00000000,
201         0x54f0, 0xffffffff, 0x00000000,
202         0x5498, 0xffffffff, 0x00000000,
203         0x549c, 0xffffffff, 0x00000000,
204         0x5494, 0xffffffff, 0x00000000,
205         0x53cc, 0xffffffff, 0x00000000,
206         0x53c8, 0xffffffff, 0x00000000,
207         0x53c4, 0xffffffff, 0x00000000,
208         0x53c0, 0xffffffff, 0x00000000,
209         0x53bc, 0xffffffff, 0x00000000,
210         0x53b8, 0xffffffff, 0x00000000,
211         0x53b4, 0xffffffff, 0x00000000,
212         0x53b0, 0xffffffff, 0x00000000
213 };
214
215 static const u32 cypress_mgcg_init[] =
216 {
217         0x802c, 0xffffffff, 0xc0000000,
218         0x5448, 0xffffffff, 0x00000100,
219         0x55e4, 0xffffffff, 0x00000100,
220         0x160c, 0xffffffff, 0x00000100,
221         0x5644, 0xffffffff, 0x00000100,
222         0xc164, 0xffffffff, 0x00000100,
223         0x8a18, 0xffffffff, 0x00000100,
224         0x897c, 0xffffffff, 0x06000100,
225         0x8b28, 0xffffffff, 0x00000100,
226         0x9144, 0xffffffff, 0x00000100,
227         0x9a60, 0xffffffff, 0x00000100,
228         0x9868, 0xffffffff, 0x00000100,
229         0x8d58, 0xffffffff, 0x00000100,
230         0x9510, 0xffffffff, 0x00000100,
231         0x949c, 0xffffffff, 0x00000100,
232         0x9654, 0xffffffff, 0x00000100,
233         0x9030, 0xffffffff, 0x00000100,
234         0x9034, 0xffffffff, 0x00000100,
235         0x9038, 0xffffffff, 0x00000100,
236         0x903c, 0xffffffff, 0x00000100,
237         0x9040, 0xffffffff, 0x00000100,
238         0xa200, 0xffffffff, 0x00000100,
239         0xa204, 0xffffffff, 0x00000100,
240         0xa208, 0xffffffff, 0x00000100,
241         0xa20c, 0xffffffff, 0x00000100,
242         0x971c, 0xffffffff, 0x00000100,
243         0x977c, 0xffffffff, 0x00000100,
244         0x3f80, 0xffffffff, 0x00000100,
245         0xa210, 0xffffffff, 0x00000100,
246         0xa214, 0xffffffff, 0x00000100,
247         0x4d8, 0xffffffff, 0x00000100,
248         0x9784, 0xffffffff, 0x00000100,
249         0x9698, 0xffffffff, 0x00000100,
250         0x4d4, 0xffffffff, 0x00000200,
251         0x30cc, 0xffffffff, 0x00000100,
252         0xd0c0, 0xffffffff, 0xff000100,
253         0x802c, 0xffffffff, 0x40000000,
254         0x915c, 0xffffffff, 0x00010000,
255         0x9160, 0xffffffff, 0x00030002,
256         0x9178, 0xffffffff, 0x00070000,
257         0x917c, 0xffffffff, 0x00030002,
258         0x9180, 0xffffffff, 0x00050004,
259         0x918c, 0xffffffff, 0x00010006,
260         0x9190, 0xffffffff, 0x00090008,
261         0x9194, 0xffffffff, 0x00070000,
262         0x9198, 0xffffffff, 0x00030002,
263         0x919c, 0xffffffff, 0x00050004,
264         0x91a8, 0xffffffff, 0x00010006,
265         0x91ac, 0xffffffff, 0x00090008,
266         0x91b0, 0xffffffff, 0x00070000,
267         0x91b4, 0xffffffff, 0x00030002,
268         0x91b8, 0xffffffff, 0x00050004,
269         0x91c4, 0xffffffff, 0x00010006,
270         0x91c8, 0xffffffff, 0x00090008,
271         0x91cc, 0xffffffff, 0x00070000,
272         0x91d0, 0xffffffff, 0x00030002,
273         0x91d4, 0xffffffff, 0x00050004,
274         0x91e0, 0xffffffff, 0x00010006,
275         0x91e4, 0xffffffff, 0x00090008,
276         0x91e8, 0xffffffff, 0x00000000,
277         0x91ec, 0xffffffff, 0x00070000,
278         0x91f0, 0xffffffff, 0x00030002,
279         0x91f4, 0xffffffff, 0x00050004,
280         0x9200, 0xffffffff, 0x00010006,
281         0x9204, 0xffffffff, 0x00090008,
282         0x9208, 0xffffffff, 0x00070000,
283         0x920c, 0xffffffff, 0x00030002,
284         0x9210, 0xffffffff, 0x00050004,
285         0x921c, 0xffffffff, 0x00010006,
286         0x9220, 0xffffffff, 0x00090008,
287         0x9224, 0xffffffff, 0x00070000,
288         0x9228, 0xffffffff, 0x00030002,
289         0x922c, 0xffffffff, 0x00050004,
290         0x9238, 0xffffffff, 0x00010006,
291         0x923c, 0xffffffff, 0x00090008,
292         0x9240, 0xffffffff, 0x00070000,
293         0x9244, 0xffffffff, 0x00030002,
294         0x9248, 0xffffffff, 0x00050004,
295         0x9254, 0xffffffff, 0x00010006,
296         0x9258, 0xffffffff, 0x00090008,
297         0x925c, 0xffffffff, 0x00070000,
298         0x9260, 0xffffffff, 0x00030002,
299         0x9264, 0xffffffff, 0x00050004,
300         0x9270, 0xffffffff, 0x00010006,
301         0x9274, 0xffffffff, 0x00090008,
302         0x9278, 0xffffffff, 0x00070000,
303         0x927c, 0xffffffff, 0x00030002,
304         0x9280, 0xffffffff, 0x00050004,
305         0x928c, 0xffffffff, 0x00010006,
306         0x9290, 0xffffffff, 0x00090008,
307         0x9294, 0xffffffff, 0x00000000,
308         0x929c, 0xffffffff, 0x00000001,
309         0x802c, 0xffffffff, 0x40010000,
310         0x915c, 0xffffffff, 0x00010000,
311         0x9160, 0xffffffff, 0x00030002,
312         0x9178, 0xffffffff, 0x00070000,
313         0x917c, 0xffffffff, 0x00030002,
314         0x9180, 0xffffffff, 0x00050004,
315         0x918c, 0xffffffff, 0x00010006,
316         0x9190, 0xffffffff, 0x00090008,
317         0x9194, 0xffffffff, 0x00070000,
318         0x9198, 0xffffffff, 0x00030002,
319         0x919c, 0xffffffff, 0x00050004,
320         0x91a8, 0xffffffff, 0x00010006,
321         0x91ac, 0xffffffff, 0x00090008,
322         0x91b0, 0xffffffff, 0x00070000,
323         0x91b4, 0xffffffff, 0x00030002,
324         0x91b8, 0xffffffff, 0x00050004,
325         0x91c4, 0xffffffff, 0x00010006,
326         0x91c8, 0xffffffff, 0x00090008,
327         0x91cc, 0xffffffff, 0x00070000,
328         0x91d0, 0xffffffff, 0x00030002,
329         0x91d4, 0xffffffff, 0x00050004,
330         0x91e0, 0xffffffff, 0x00010006,
331         0x91e4, 0xffffffff, 0x00090008,
332         0x91e8, 0xffffffff, 0x00000000,
333         0x91ec, 0xffffffff, 0x00070000,
334         0x91f0, 0xffffffff, 0x00030002,
335         0x91f4, 0xffffffff, 0x00050004,
336         0x9200, 0xffffffff, 0x00010006,
337         0x9204, 0xffffffff, 0x00090008,
338         0x9208, 0xffffffff, 0x00070000,
339         0x920c, 0xffffffff, 0x00030002,
340         0x9210, 0xffffffff, 0x00050004,
341         0x921c, 0xffffffff, 0x00010006,
342         0x9220, 0xffffffff, 0x00090008,
343         0x9224, 0xffffffff, 0x00070000,
344         0x9228, 0xffffffff, 0x00030002,
345         0x922c, 0xffffffff, 0x00050004,
346         0x9238, 0xffffffff, 0x00010006,
347         0x923c, 0xffffffff, 0x00090008,
348         0x9240, 0xffffffff, 0x00070000,
349         0x9244, 0xffffffff, 0x00030002,
350         0x9248, 0xffffffff, 0x00050004,
351         0x9254, 0xffffffff, 0x00010006,
352         0x9258, 0xffffffff, 0x00090008,
353         0x925c, 0xffffffff, 0x00070000,
354         0x9260, 0xffffffff, 0x00030002,
355         0x9264, 0xffffffff, 0x00050004,
356         0x9270, 0xffffffff, 0x00010006,
357         0x9274, 0xffffffff, 0x00090008,
358         0x9278, 0xffffffff, 0x00070000,
359         0x927c, 0xffffffff, 0x00030002,
360         0x9280, 0xffffffff, 0x00050004,
361         0x928c, 0xffffffff, 0x00010006,
362         0x9290, 0xffffffff, 0x00090008,
363         0x9294, 0xffffffff, 0x00000000,
364         0x929c, 0xffffffff, 0x00000001,
365         0x802c, 0xffffffff, 0xc0000000
366 };
367
368 static const u32 redwood_mgcg_init[] =
369 {
370         0x802c, 0xffffffff, 0xc0000000,
371         0x5448, 0xffffffff, 0x00000100,
372         0x55e4, 0xffffffff, 0x00000100,
373         0x160c, 0xffffffff, 0x00000100,
374         0x5644, 0xffffffff, 0x00000100,
375         0xc164, 0xffffffff, 0x00000100,
376         0x8a18, 0xffffffff, 0x00000100,
377         0x897c, 0xffffffff, 0x06000100,
378         0x8b28, 0xffffffff, 0x00000100,
379         0x9144, 0xffffffff, 0x00000100,
380         0x9a60, 0xffffffff, 0x00000100,
381         0x9868, 0xffffffff, 0x00000100,
382         0x8d58, 0xffffffff, 0x00000100,
383         0x9510, 0xffffffff, 0x00000100,
384         0x949c, 0xffffffff, 0x00000100,
385         0x9654, 0xffffffff, 0x00000100,
386         0x9030, 0xffffffff, 0x00000100,
387         0x9034, 0xffffffff, 0x00000100,
388         0x9038, 0xffffffff, 0x00000100,
389         0x903c, 0xffffffff, 0x00000100,
390         0x9040, 0xffffffff, 0x00000100,
391         0xa200, 0xffffffff, 0x00000100,
392         0xa204, 0xffffffff, 0x00000100,
393         0xa208, 0xffffffff, 0x00000100,
394         0xa20c, 0xffffffff, 0x00000100,
395         0x971c, 0xffffffff, 0x00000100,
396         0x977c, 0xffffffff, 0x00000100,
397         0x3f80, 0xffffffff, 0x00000100,
398         0xa210, 0xffffffff, 0x00000100,
399         0xa214, 0xffffffff, 0x00000100,
400         0x4d8, 0xffffffff, 0x00000100,
401         0x9784, 0xffffffff, 0x00000100,
402         0x9698, 0xffffffff, 0x00000100,
403         0x4d4, 0xffffffff, 0x00000200,
404         0x30cc, 0xffffffff, 0x00000100,
405         0xd0c0, 0xffffffff, 0xff000100,
406         0x802c, 0xffffffff, 0x40000000,
407         0x915c, 0xffffffff, 0x00010000,
408         0x9160, 0xffffffff, 0x00030002,
409         0x9178, 0xffffffff, 0x00070000,
410         0x917c, 0xffffffff, 0x00030002,
411         0x9180, 0xffffffff, 0x00050004,
412         0x918c, 0xffffffff, 0x00010006,
413         0x9190, 0xffffffff, 0x00090008,
414         0x9194, 0xffffffff, 0x00070000,
415         0x9198, 0xffffffff, 0x00030002,
416         0x919c, 0xffffffff, 0x00050004,
417         0x91a8, 0xffffffff, 0x00010006,
418         0x91ac, 0xffffffff, 0x00090008,
419         0x91b0, 0xffffffff, 0x00070000,
420         0x91b4, 0xffffffff, 0x00030002,
421         0x91b8, 0xffffffff, 0x00050004,
422         0x91c4, 0xffffffff, 0x00010006,
423         0x91c8, 0xffffffff, 0x00090008,
424         0x91cc, 0xffffffff, 0x00070000,
425         0x91d0, 0xffffffff, 0x00030002,
426         0x91d4, 0xffffffff, 0x00050004,
427         0x91e0, 0xffffffff, 0x00010006,
428         0x91e4, 0xffffffff, 0x00090008,
429         0x91e8, 0xffffffff, 0x00000000,
430         0x91ec, 0xffffffff, 0x00070000,
431         0x91f0, 0xffffffff, 0x00030002,
432         0x91f4, 0xffffffff, 0x00050004,
433         0x9200, 0xffffffff, 0x00010006,
434         0x9204, 0xffffffff, 0x00090008,
435         0x9294, 0xffffffff, 0x00000000,
436         0x929c, 0xffffffff, 0x00000001,
437         0x802c, 0xffffffff, 0xc0000000
438 };
439
440 static const u32 cedar_golden_registers[] =
441 {
442         0x3f90, 0xffff0000, 0xff000000,
443         0x9148, 0xffff0000, 0xff000000,
444         0x3f94, 0xffff0000, 0xff000000,
445         0x914c, 0xffff0000, 0xff000000,
446         0x9b7c, 0xffffffff, 0x00000000,
447         0x8a14, 0xffffffff, 0x00000007,
448         0x8b10, 0xffffffff, 0x00000000,
449         0x960c, 0xffffffff, 0x54763210,
450         0x88c4, 0xffffffff, 0x000000c2,
451         0x88d4, 0xffffffff, 0x00000000,
452         0x8974, 0xffffffff, 0x00000000,
453         0xc78, 0x00000080, 0x00000080,
454         0x5eb4, 0xffffffff, 0x00000002,
455         0x5e78, 0xffffffff, 0x001000f0,
456         0x6104, 0x01000300, 0x00000000,
457         0x5bc0, 0x00300000, 0x00000000,
458         0x7030, 0xffffffff, 0x00000011,
459         0x7c30, 0xffffffff, 0x00000011,
460         0x10830, 0xffffffff, 0x00000011,
461         0x11430, 0xffffffff, 0x00000011,
462         0xd02c, 0xffffffff, 0x08421000,
463         0x240c, 0xffffffff, 0x00000380,
464         0x8b24, 0xffffffff, 0x00ff0fff,
465         0x28a4c, 0x06000000, 0x06000000,
466         0x10c, 0x00000001, 0x00000001,
467         0x8d00, 0xffffffff, 0x100e4848,
468         0x8d04, 0xffffffff, 0x00164745,
469         0x8c00, 0xffffffff, 0xe4000003,
470         0x8c04, 0xffffffff, 0x40600060,
471         0x8c08, 0xffffffff, 0x001c001c,
472         0x8cf0, 0xffffffff, 0x08e00410,
473         0x8c20, 0xffffffff, 0x00800080,
474         0x8c24, 0xffffffff, 0x00800080,
475         0x8c18, 0xffffffff, 0x20202078,
476         0x8c1c, 0xffffffff, 0x00001010,
477         0x28350, 0xffffffff, 0x00000000,
478         0xa008, 0xffffffff, 0x00010000,
479         0x5cc, 0xffffffff, 0x00000001,
480         0x9508, 0xffffffff, 0x00000002
481 };
482
483 static const u32 cedar_mgcg_init[] =
484 {
485         0x802c, 0xffffffff, 0xc0000000,
486         0x5448, 0xffffffff, 0x00000100,
487         0x55e4, 0xffffffff, 0x00000100,
488         0x160c, 0xffffffff, 0x00000100,
489         0x5644, 0xffffffff, 0x00000100,
490         0xc164, 0xffffffff, 0x00000100,
491         0x8a18, 0xffffffff, 0x00000100,
492         0x897c, 0xffffffff, 0x06000100,
493         0x8b28, 0xffffffff, 0x00000100,
494         0x9144, 0xffffffff, 0x00000100,
495         0x9a60, 0xffffffff, 0x00000100,
496         0x9868, 0xffffffff, 0x00000100,
497         0x8d58, 0xffffffff, 0x00000100,
498         0x9510, 0xffffffff, 0x00000100,
499         0x949c, 0xffffffff, 0x00000100,
500         0x9654, 0xffffffff, 0x00000100,
501         0x9030, 0xffffffff, 0x00000100,
502         0x9034, 0xffffffff, 0x00000100,
503         0x9038, 0xffffffff, 0x00000100,
504         0x903c, 0xffffffff, 0x00000100,
505         0x9040, 0xffffffff, 0x00000100,
506         0xa200, 0xffffffff, 0x00000100,
507         0xa204, 0xffffffff, 0x00000100,
508         0xa208, 0xffffffff, 0x00000100,
509         0xa20c, 0xffffffff, 0x00000100,
510         0x971c, 0xffffffff, 0x00000100,
511         0x977c, 0xffffffff, 0x00000100,
512         0x3f80, 0xffffffff, 0x00000100,
513         0xa210, 0xffffffff, 0x00000100,
514         0xa214, 0xffffffff, 0x00000100,
515         0x4d8, 0xffffffff, 0x00000100,
516         0x9784, 0xffffffff, 0x00000100,
517         0x9698, 0xffffffff, 0x00000100,
518         0x4d4, 0xffffffff, 0x00000200,
519         0x30cc, 0xffffffff, 0x00000100,
520         0xd0c0, 0xffffffff, 0xff000100,
521         0x802c, 0xffffffff, 0x40000000,
522         0x915c, 0xffffffff, 0x00010000,
523         0x9178, 0xffffffff, 0x00050000,
524         0x917c, 0xffffffff, 0x00030002,
525         0x918c, 0xffffffff, 0x00010004,
526         0x9190, 0xffffffff, 0x00070006,
527         0x9194, 0xffffffff, 0x00050000,
528         0x9198, 0xffffffff, 0x00030002,
529         0x91a8, 0xffffffff, 0x00010004,
530         0x91ac, 0xffffffff, 0x00070006,
531         0x91e8, 0xffffffff, 0x00000000,
532         0x9294, 0xffffffff, 0x00000000,
533         0x929c, 0xffffffff, 0x00000001,
534         0x802c, 0xffffffff, 0xc0000000
535 };
536
537 static const u32 juniper_mgcg_init[] =
538 {
539         0x802c, 0xffffffff, 0xc0000000,
540         0x5448, 0xffffffff, 0x00000100,
541         0x55e4, 0xffffffff, 0x00000100,
542         0x160c, 0xffffffff, 0x00000100,
543         0x5644, 0xffffffff, 0x00000100,
544         0xc164, 0xffffffff, 0x00000100,
545         0x8a18, 0xffffffff, 0x00000100,
546         0x897c, 0xffffffff, 0x06000100,
547         0x8b28, 0xffffffff, 0x00000100,
548         0x9144, 0xffffffff, 0x00000100,
549         0x9a60, 0xffffffff, 0x00000100,
550         0x9868, 0xffffffff, 0x00000100,
551         0x8d58, 0xffffffff, 0x00000100,
552         0x9510, 0xffffffff, 0x00000100,
553         0x949c, 0xffffffff, 0x00000100,
554         0x9654, 0xffffffff, 0x00000100,
555         0x9030, 0xffffffff, 0x00000100,
556         0x9034, 0xffffffff, 0x00000100,
557         0x9038, 0xffffffff, 0x00000100,
558         0x903c, 0xffffffff, 0x00000100,
559         0x9040, 0xffffffff, 0x00000100,
560         0xa200, 0xffffffff, 0x00000100,
561         0xa204, 0xffffffff, 0x00000100,
562         0xa208, 0xffffffff, 0x00000100,
563         0xa20c, 0xffffffff, 0x00000100,
564         0x971c, 0xffffffff, 0x00000100,
565         0xd0c0, 0xffffffff, 0xff000100,
566         0x802c, 0xffffffff, 0x40000000,
567         0x915c, 0xffffffff, 0x00010000,
568         0x9160, 0xffffffff, 0x00030002,
569         0x9178, 0xffffffff, 0x00070000,
570         0x917c, 0xffffffff, 0x00030002,
571         0x9180, 0xffffffff, 0x00050004,
572         0x918c, 0xffffffff, 0x00010006,
573         0x9190, 0xffffffff, 0x00090008,
574         0x9194, 0xffffffff, 0x00070000,
575         0x9198, 0xffffffff, 0x00030002,
576         0x919c, 0xffffffff, 0x00050004,
577         0x91a8, 0xffffffff, 0x00010006,
578         0x91ac, 0xffffffff, 0x00090008,
579         0x91b0, 0xffffffff, 0x00070000,
580         0x91b4, 0xffffffff, 0x00030002,
581         0x91b8, 0xffffffff, 0x00050004,
582         0x91c4, 0xffffffff, 0x00010006,
583         0x91c8, 0xffffffff, 0x00090008,
584         0x91cc, 0xffffffff, 0x00070000,
585         0x91d0, 0xffffffff, 0x00030002,
586         0x91d4, 0xffffffff, 0x00050004,
587         0x91e0, 0xffffffff, 0x00010006,
588         0x91e4, 0xffffffff, 0x00090008,
589         0x91e8, 0xffffffff, 0x00000000,
590         0x91ec, 0xffffffff, 0x00070000,
591         0x91f0, 0xffffffff, 0x00030002,
592         0x91f4, 0xffffffff, 0x00050004,
593         0x9200, 0xffffffff, 0x00010006,
594         0x9204, 0xffffffff, 0x00090008,
595         0x9208, 0xffffffff, 0x00070000,
596         0x920c, 0xffffffff, 0x00030002,
597         0x9210, 0xffffffff, 0x00050004,
598         0x921c, 0xffffffff, 0x00010006,
599         0x9220, 0xffffffff, 0x00090008,
600         0x9224, 0xffffffff, 0x00070000,
601         0x9228, 0xffffffff, 0x00030002,
602         0x922c, 0xffffffff, 0x00050004,
603         0x9238, 0xffffffff, 0x00010006,
604         0x923c, 0xffffffff, 0x00090008,
605         0x9240, 0xffffffff, 0x00070000,
606         0x9244, 0xffffffff, 0x00030002,
607         0x9248, 0xffffffff, 0x00050004,
608         0x9254, 0xffffffff, 0x00010006,
609         0x9258, 0xffffffff, 0x00090008,
610         0x925c, 0xffffffff, 0x00070000,
611         0x9260, 0xffffffff, 0x00030002,
612         0x9264, 0xffffffff, 0x00050004,
613         0x9270, 0xffffffff, 0x00010006,
614         0x9274, 0xffffffff, 0x00090008,
615         0x9278, 0xffffffff, 0x00070000,
616         0x927c, 0xffffffff, 0x00030002,
617         0x9280, 0xffffffff, 0x00050004,
618         0x928c, 0xffffffff, 0x00010006,
619         0x9290, 0xffffffff, 0x00090008,
620         0x9294, 0xffffffff, 0x00000000,
621         0x929c, 0xffffffff, 0x00000001,
622         0x802c, 0xffffffff, 0xc0000000,
623         0x977c, 0xffffffff, 0x00000100,
624         0x3f80, 0xffffffff, 0x00000100,
625         0xa210, 0xffffffff, 0x00000100,
626         0xa214, 0xffffffff, 0x00000100,
627         0x4d8, 0xffffffff, 0x00000100,
628         0x9784, 0xffffffff, 0x00000100,
629         0x9698, 0xffffffff, 0x00000100,
630         0x4d4, 0xffffffff, 0x00000200,
631         0x30cc, 0xffffffff, 0x00000100,
632         0x802c, 0xffffffff, 0xc0000000
633 };
634
635 static const u32 supersumo_golden_registers[] =
636 {
637         0x5eb4, 0xffffffff, 0x00000002,
638         0x5cc, 0xffffffff, 0x00000001,
639         0x7030, 0xffffffff, 0x00000011,
640         0x7c30, 0xffffffff, 0x00000011,
641         0x6104, 0x01000300, 0x00000000,
642         0x5bc0, 0x00300000, 0x00000000,
643         0x8c04, 0xffffffff, 0x40600060,
644         0x8c08, 0xffffffff, 0x001c001c,
645         0x8c20, 0xffffffff, 0x00800080,
646         0x8c24, 0xffffffff, 0x00800080,
647         0x8c18, 0xffffffff, 0x20202078,
648         0x8c1c, 0xffffffff, 0x00001010,
649         0x918c, 0xffffffff, 0x00010006,
650         0x91a8, 0xffffffff, 0x00010006,
651         0x91c4, 0xffffffff, 0x00010006,
652         0x91e0, 0xffffffff, 0x00010006,
653         0x9200, 0xffffffff, 0x00010006,
654         0x9150, 0xffffffff, 0x6e944040,
655         0x917c, 0xffffffff, 0x00030002,
656         0x9180, 0xffffffff, 0x00050004,
657         0x9198, 0xffffffff, 0x00030002,
658         0x919c, 0xffffffff, 0x00050004,
659         0x91b4, 0xffffffff, 0x00030002,
660         0x91b8, 0xffffffff, 0x00050004,
661         0x91d0, 0xffffffff, 0x00030002,
662         0x91d4, 0xffffffff, 0x00050004,
663         0x91f0, 0xffffffff, 0x00030002,
664         0x91f4, 0xffffffff, 0x00050004,
665         0x915c, 0xffffffff, 0x00010000,
666         0x9160, 0xffffffff, 0x00030002,
667         0x3f90, 0xffff0000, 0xff000000,
668         0x9178, 0xffffffff, 0x00070000,
669         0x9194, 0xffffffff, 0x00070000,
670         0x91b0, 0xffffffff, 0x00070000,
671         0x91cc, 0xffffffff, 0x00070000,
672         0x91ec, 0xffffffff, 0x00070000,
673         0x9148, 0xffff0000, 0xff000000,
674         0x9190, 0xffffffff, 0x00090008,
675         0x91ac, 0xffffffff, 0x00090008,
676         0x91c8, 0xffffffff, 0x00090008,
677         0x91e4, 0xffffffff, 0x00090008,
678         0x9204, 0xffffffff, 0x00090008,
679         0x3f94, 0xffff0000, 0xff000000,
680         0x914c, 0xffff0000, 0xff000000,
681         0x929c, 0xffffffff, 0x00000001,
682         0x8a18, 0xffffffff, 0x00000100,
683         0x8b28, 0xffffffff, 0x00000100,
684         0x9144, 0xffffffff, 0x00000100,
685         0x5644, 0xffffffff, 0x00000100,
686         0x9b7c, 0xffffffff, 0x00000000,
687         0x8030, 0xffffffff, 0x0000100a,
688         0x8a14, 0xffffffff, 0x00000007,
689         0x8b24, 0xffffffff, 0x00ff0fff,
690         0x8b10, 0xffffffff, 0x00000000,
691         0x28a4c, 0x06000000, 0x06000000,
692         0x4d8, 0xffffffff, 0x00000100,
693         0x913c, 0xffff000f, 0x0100000a,
694         0x960c, 0xffffffff, 0x54763210,
695         0x88c4, 0xffffffff, 0x000000c2,
696         0x88d4, 0xffffffff, 0x00000010,
697         0x8974, 0xffffffff, 0x00000000,
698         0xc78, 0x00000080, 0x00000080,
699         0x5e78, 0xffffffff, 0x001000f0,
700         0xd02c, 0xffffffff, 0x08421000,
701         0xa008, 0xffffffff, 0x00010000,
702         0x8d00, 0xffffffff, 0x100e4848,
703         0x8d04, 0xffffffff, 0x00164745,
704         0x8c00, 0xffffffff, 0xe4000003,
705         0x8cf0, 0x1fffffff, 0x08e00620,
706         0x28350, 0xffffffff, 0x00000000,
707         0x9508, 0xffffffff, 0x00000002
708 };
709
710 static const u32 sumo_golden_registers[] =
711 {
712         0x900c, 0x00ffffff, 0x0017071f,
713         0x8c18, 0xffffffff, 0x10101060,
714         0x8c1c, 0xffffffff, 0x00001010,
715         0x8c30, 0x0000000f, 0x00000005,
716         0x9688, 0x0000000f, 0x00000007
717 };
718
719 static const u32 wrestler_golden_registers[] =
720 {
721         0x5eb4, 0xffffffff, 0x00000002,
722         0x5cc, 0xffffffff, 0x00000001,
723         0x7030, 0xffffffff, 0x00000011,
724         0x7c30, 0xffffffff, 0x00000011,
725         0x6104, 0x01000300, 0x00000000,
726         0x5bc0, 0x00300000, 0x00000000,
727         0x918c, 0xffffffff, 0x00010006,
728         0x91a8, 0xffffffff, 0x00010006,
729         0x9150, 0xffffffff, 0x6e944040,
730         0x917c, 0xffffffff, 0x00030002,
731         0x9198, 0xffffffff, 0x00030002,
732         0x915c, 0xffffffff, 0x00010000,
733         0x3f90, 0xffff0000, 0xff000000,
734         0x9178, 0xffffffff, 0x00070000,
735         0x9194, 0xffffffff, 0x00070000,
736         0x9148, 0xffff0000, 0xff000000,
737         0x9190, 0xffffffff, 0x00090008,
738         0x91ac, 0xffffffff, 0x00090008,
739         0x3f94, 0xffff0000, 0xff000000,
740         0x914c, 0xffff0000, 0xff000000,
741         0x929c, 0xffffffff, 0x00000001,
742         0x8a18, 0xffffffff, 0x00000100,
743         0x8b28, 0xffffffff, 0x00000100,
744         0x9144, 0xffffffff, 0x00000100,
745         0x9b7c, 0xffffffff, 0x00000000,
746         0x8030, 0xffffffff, 0x0000100a,
747         0x8a14, 0xffffffff, 0x00000001,
748         0x8b24, 0xffffffff, 0x00ff0fff,
749         0x8b10, 0xffffffff, 0x00000000,
750         0x28a4c, 0x06000000, 0x06000000,
751         0x4d8, 0xffffffff, 0x00000100,
752         0x913c, 0xffff000f, 0x0100000a,
753         0x960c, 0xffffffff, 0x54763210,
754         0x88c4, 0xffffffff, 0x000000c2,
755         0x88d4, 0xffffffff, 0x00000010,
756         0x8974, 0xffffffff, 0x00000000,
757         0xc78, 0x00000080, 0x00000080,
758         0x5e78, 0xffffffff, 0x001000f0,
759         0xd02c, 0xffffffff, 0x08421000,
760         0xa008, 0xffffffff, 0x00010000,
761         0x8d00, 0xffffffff, 0x100e4848,
762         0x8d04, 0xffffffff, 0x00164745,
763         0x8c00, 0xffffffff, 0xe4000003,
764         0x8cf0, 0x1fffffff, 0x08e00410,
765         0x28350, 0xffffffff, 0x00000000,
766         0x9508, 0xffffffff, 0x00000002,
767         0x900c, 0xffffffff, 0x0017071f,
768         0x8c18, 0xffffffff, 0x10101060,
769         0x8c1c, 0xffffffff, 0x00001010
770 };
771
772 static const u32 barts_golden_registers[] =
773 {
774         0x5eb4, 0xffffffff, 0x00000002,
775         0x5e78, 0x8f311ff1, 0x001000f0,
776         0x3f90, 0xffff0000, 0xff000000,
777         0x9148, 0xffff0000, 0xff000000,
778         0x3f94, 0xffff0000, 0xff000000,
779         0x914c, 0xffff0000, 0xff000000,
780         0xc78, 0x00000080, 0x00000080,
781         0xbd4, 0x70073777, 0x00010001,
782         0xd02c, 0xbfffff1f, 0x08421000,
783         0xd0b8, 0x03773777, 0x02011003,
784         0x5bc0, 0x00200000, 0x50100000,
785         0x98f8, 0x33773777, 0x02011003,
786         0x98fc, 0xffffffff, 0x76543210,
787         0x7030, 0x31000311, 0x00000011,
788         0x2f48, 0x00000007, 0x02011003,
789         0x6b28, 0x00000010, 0x00000012,
790         0x7728, 0x00000010, 0x00000012,
791         0x10328, 0x00000010, 0x00000012,
792         0x10f28, 0x00000010, 0x00000012,
793         0x11b28, 0x00000010, 0x00000012,
794         0x12728, 0x00000010, 0x00000012,
795         0x240c, 0x000007ff, 0x00000380,
796         0x8a14, 0xf000001f, 0x00000007,
797         0x8b24, 0x3fff3fff, 0x00ff0fff,
798         0x8b10, 0x0000ff0f, 0x00000000,
799         0x28a4c, 0x07ffffff, 0x06000000,
800         0x10c, 0x00000001, 0x00010003,
801         0xa02c, 0xffffffff, 0x0000009b,
802         0x913c, 0x0000000f, 0x0100000a,
803         0x8d00, 0xffff7f7f, 0x100e4848,
804         0x8d04, 0x00ffffff, 0x00164745,
805         0x8c00, 0xfffc0003, 0xe4000003,
806         0x8c04, 0xf8ff00ff, 0x40600060,
807         0x8c08, 0x00ff00ff, 0x001c001c,
808         0x8cf0, 0x1fff1fff, 0x08e00620,
809         0x8c20, 0x0fff0fff, 0x00800080,
810         0x8c24, 0x0fff0fff, 0x00800080,
811         0x8c18, 0xffffffff, 0x20202078,
812         0x8c1c, 0x0000ffff, 0x00001010,
813         0x28350, 0x00000f01, 0x00000000,
814         0x9508, 0x3700001f, 0x00000002,
815         0x960c, 0xffffffff, 0x54763210,
816         0x88c4, 0x001f3ae3, 0x000000c2,
817         0x88d4, 0x0000001f, 0x00000010,
818         0x8974, 0xffffffff, 0x00000000
819 };
820
821 static const u32 turks_golden_registers[] =
822 {
823         0x5eb4, 0xffffffff, 0x00000002,
824         0x5e78, 0x8f311ff1, 0x001000f0,
825         0x8c8, 0x00003000, 0x00001070,
826         0x8cc, 0x000fffff, 0x00040035,
827         0x3f90, 0xffff0000, 0xfff00000,
828         0x9148, 0xffff0000, 0xfff00000,
829         0x3f94, 0xffff0000, 0xfff00000,
830         0x914c, 0xffff0000, 0xfff00000,
831         0xc78, 0x00000080, 0x00000080,
832         0xbd4, 0x00073007, 0x00010002,
833         0xd02c, 0xbfffff1f, 0x08421000,
834         0xd0b8, 0x03773777, 0x02010002,
835         0x5bc0, 0x00200000, 0x50100000,
836         0x98f8, 0x33773777, 0x00010002,
837         0x98fc, 0xffffffff, 0x33221100,
838         0x7030, 0x31000311, 0x00000011,
839         0x2f48, 0x33773777, 0x00010002,
840         0x6b28, 0x00000010, 0x00000012,
841         0x7728, 0x00000010, 0x00000012,
842         0x10328, 0x00000010, 0x00000012,
843         0x10f28, 0x00000010, 0x00000012,
844         0x11b28, 0x00000010, 0x00000012,
845         0x12728, 0x00000010, 0x00000012,
846         0x240c, 0x000007ff, 0x00000380,
847         0x8a14, 0xf000001f, 0x00000007,
848         0x8b24, 0x3fff3fff, 0x00ff0fff,
849         0x8b10, 0x0000ff0f, 0x00000000,
850         0x28a4c, 0x07ffffff, 0x06000000,
851         0x10c, 0x00000001, 0x00010003,
852         0xa02c, 0xffffffff, 0x0000009b,
853         0x913c, 0x0000000f, 0x0100000a,
854         0x8d00, 0xffff7f7f, 0x100e4848,
855         0x8d04, 0x00ffffff, 0x00164745,
856         0x8c00, 0xfffc0003, 0xe4000003,
857         0x8c04, 0xf8ff00ff, 0x40600060,
858         0x8c08, 0x00ff00ff, 0x001c001c,
859         0x8cf0, 0x1fff1fff, 0x08e00410,
860         0x8c20, 0x0fff0fff, 0x00800080,
861         0x8c24, 0x0fff0fff, 0x00800080,
862         0x8c18, 0xffffffff, 0x20202078,
863         0x8c1c, 0x0000ffff, 0x00001010,
864         0x28350, 0x00000f01, 0x00000000,
865         0x9508, 0x3700001f, 0x00000002,
866         0x960c, 0xffffffff, 0x54763210,
867         0x88c4, 0x001f3ae3, 0x000000c2,
868         0x88d4, 0x0000001f, 0x00000010,
869         0x8974, 0xffffffff, 0x00000000
870 };
871
872 static const u32 caicos_golden_registers[] =
873 {
874         0x5eb4, 0xffffffff, 0x00000002,
875         0x5e78, 0x8f311ff1, 0x001000f0,
876         0x8c8, 0x00003420, 0x00001450,
877         0x8cc, 0x000fffff, 0x00040035,
878         0x3f90, 0xffff0000, 0xfffc0000,
879         0x9148, 0xffff0000, 0xfffc0000,
880         0x3f94, 0xffff0000, 0xfffc0000,
881         0x914c, 0xffff0000, 0xfffc0000,
882         0xc78, 0x00000080, 0x00000080,
883         0xbd4, 0x00073007, 0x00010001,
884         0xd02c, 0xbfffff1f, 0x08421000,
885         0xd0b8, 0x03773777, 0x02010001,
886         0x5bc0, 0x00200000, 0x50100000,
887         0x98f8, 0x33773777, 0x02010001,
888         0x98fc, 0xffffffff, 0x33221100,
889         0x7030, 0x31000311, 0x00000011,
890         0x2f48, 0x33773777, 0x02010001,
891         0x6b28, 0x00000010, 0x00000012,
892         0x7728, 0x00000010, 0x00000012,
893         0x10328, 0x00000010, 0x00000012,
894         0x10f28, 0x00000010, 0x00000012,
895         0x11b28, 0x00000010, 0x00000012,
896         0x12728, 0x00000010, 0x00000012,
897         0x240c, 0x000007ff, 0x00000380,
898         0x8a14, 0xf000001f, 0x00000001,
899         0x8b24, 0x3fff3fff, 0x00ff0fff,
900         0x8b10, 0x0000ff0f, 0x00000000,
901         0x28a4c, 0x07ffffff, 0x06000000,
902         0x10c, 0x00000001, 0x00010003,
903         0xa02c, 0xffffffff, 0x0000009b,
904         0x913c, 0x0000000f, 0x0100000a,
905         0x8d00, 0xffff7f7f, 0x100e4848,
906         0x8d04, 0x00ffffff, 0x00164745,
907         0x8c00, 0xfffc0003, 0xe4000003,
908         0x8c04, 0xf8ff00ff, 0x40600060,
909         0x8c08, 0x00ff00ff, 0x001c001c,
910         0x8cf0, 0x1fff1fff, 0x08e00410,
911         0x8c20, 0x0fff0fff, 0x00800080,
912         0x8c24, 0x0fff0fff, 0x00800080,
913         0x8c18, 0xffffffff, 0x20202078,
914         0x8c1c, 0x0000ffff, 0x00001010,
915         0x28350, 0x00000f01, 0x00000000,
916         0x9508, 0x3700001f, 0x00000002,
917         0x960c, 0xffffffff, 0x54763210,
918         0x88c4, 0x001f3ae3, 0x000000c2,
919         0x88d4, 0x0000001f, 0x00000010,
920         0x8974, 0xffffffff, 0x00000000
921 };
922
923 static void evergreen_init_golden_registers(struct radeon_device *rdev)
924 {
925         switch (rdev->family) {
926         case CHIP_CYPRESS:
927         case CHIP_HEMLOCK:
928                 radeon_program_register_sequence(rdev,
929                                                  evergreen_golden_registers,
930                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
931                 radeon_program_register_sequence(rdev,
932                                                  evergreen_golden_registers2,
933                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
934                 radeon_program_register_sequence(rdev,
935                                                  cypress_mgcg_init,
936                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
937                 break;
938         case CHIP_JUNIPER:
939                 radeon_program_register_sequence(rdev,
940                                                  evergreen_golden_registers,
941                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
942                 radeon_program_register_sequence(rdev,
943                                                  evergreen_golden_registers2,
944                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
945                 radeon_program_register_sequence(rdev,
946                                                  juniper_mgcg_init,
947                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
948                 break;
949         case CHIP_REDWOOD:
950                 radeon_program_register_sequence(rdev,
951                                                  evergreen_golden_registers,
952                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
953                 radeon_program_register_sequence(rdev,
954                                                  evergreen_golden_registers2,
955                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
956                 radeon_program_register_sequence(rdev,
957                                                  redwood_mgcg_init,
958                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
959                 break;
960         case CHIP_CEDAR:
961                 radeon_program_register_sequence(rdev,
962                                                  cedar_golden_registers,
963                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
964                 radeon_program_register_sequence(rdev,
965                                                  evergreen_golden_registers2,
966                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
967                 radeon_program_register_sequence(rdev,
968                                                  cedar_mgcg_init,
969                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
970                 break;
971         case CHIP_PALM:
972                 radeon_program_register_sequence(rdev,
973                                                  wrestler_golden_registers,
974                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
975                 break;
976         case CHIP_SUMO:
977                 radeon_program_register_sequence(rdev,
978                                                  supersumo_golden_registers,
979                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
980                 break;
981         case CHIP_SUMO2:
982                 radeon_program_register_sequence(rdev,
983                                                  supersumo_golden_registers,
984                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
985                 radeon_program_register_sequence(rdev,
986                                                  sumo_golden_registers,
987                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
988                 break;
989         case CHIP_BARTS:
990                 radeon_program_register_sequence(rdev,
991                                                  barts_golden_registers,
992                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
993                 break;
994         case CHIP_TURKS:
995                 radeon_program_register_sequence(rdev,
996                                                  turks_golden_registers,
997                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
998                 break;
999         case CHIP_CAICOS:
1000                 radeon_program_register_sequence(rdev,
1001                                                  caicos_golden_registers,
1002                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
1003                 break;
1004         default:
1005                 break;
1006         }
1007 }
1008
1009 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1010                              unsigned *bankh, unsigned *mtaspect,
1011                              unsigned *tile_split)
1012 {
1013         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1014         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1015         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1016         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1017         switch (*bankw) {
1018         default:
1019         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1020         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1021         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1022         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1023         }
1024         switch (*bankh) {
1025         default:
1026         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1027         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1028         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1029         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1030         }
1031         switch (*mtaspect) {
1032         default:
1033         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1034         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1035         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1036         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1037         }
1038 }
1039
1040 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1041                               u32 cntl_reg, u32 status_reg)
1042 {
1043         int r, i;
1044         struct atom_clock_dividers dividers;
1045
1046         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1047                                            clock, false, &dividers);
1048         if (r)
1049                 return r;
1050
1051         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1052
1053         for (i = 0; i < 100; i++) {
1054                 if (RREG32(status_reg) & DCLK_STATUS)
1055                         break;
1056                 mdelay(10);
1057         }
1058         if (i == 100)
1059                 return -ETIMEDOUT;
1060
1061         return 0;
1062 }
1063
1064 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1065 {
1066         int r = 0;
1067         u32 cg_scratch = RREG32(CG_SCRATCH1);
1068
1069         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1070         if (r)
1071                 goto done;
1072         cg_scratch &= 0xffff0000;
1073         cg_scratch |= vclk / 100; /* Mhz */
1074
1075         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1076         if (r)
1077                 goto done;
1078         cg_scratch &= 0x0000ffff;
1079         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1080
1081 done:
1082         WREG32(CG_SCRATCH1, cg_scratch);
1083
1084         return r;
1085 }
1086
1087 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1088 {
1089         /* start off with something large */
1090         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1091         int r;
1092
1093         /* bypass vclk and dclk with bclk */
1094         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1095                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1096                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1097
1098         /* put PLL in bypass mode */
1099         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1100
1101         if (!vclk || !dclk) {
1102                 /* keep the Bypass mode, put PLL to sleep */
1103                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1104                 return 0;
1105         }
1106
1107         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1108                                           16384, 0x03FFFFFF, 0, 128, 5,
1109                                           &fb_div, &vclk_div, &dclk_div);
1110         if (r)
1111                 return r;
1112
1113         /* set VCO_MODE to 1 */
1114         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1115
1116         /* toggle UPLL_SLEEP to 1 then back to 0 */
1117         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1118         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1119
1120         /* deassert UPLL_RESET */
1121         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1122
1123         mdelay(1);
1124
1125         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1126         if (r)
1127                 return r;
1128
1129         /* assert UPLL_RESET again */
1130         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1131
1132         /* disable spread spectrum. */
1133         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1134
1135         /* set feedback divider */
1136         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1137
1138         /* set ref divider to 0 */
1139         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1140
1141         if (fb_div < 307200)
1142                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1143         else
1144                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1145
1146         /* set PDIV_A and PDIV_B */
1147         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1148                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1149                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1150
1151         /* give the PLL some time to settle */
1152         mdelay(15);
1153
1154         /* deassert PLL_RESET */
1155         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1156
1157         mdelay(15);
1158
1159         /* switch from bypass mode to normal mode */
1160         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1161
1162         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1163         if (r)
1164                 return r;
1165
1166         /* switch VCLK and DCLK selection */
1167         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1168                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1169                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1170
1171         mdelay(100);
1172
1173         return 0;
1174 }
1175
1176 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1177 {
1178         int readrq;
1179         u16 v;
1180
1181         readrq = pcie_get_readrq(rdev->pdev);
1182         v = ffs(readrq) - 8;
1183         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1184          * to avoid hangs or perfomance issues
1185          */
1186         if ((v == 0) || (v == 6) || (v == 7))
1187                 pcie_set_readrq(rdev->pdev, 512);
1188 }
1189
1190 void dce4_program_fmt(struct drm_encoder *encoder)
1191 {
1192         struct drm_device *dev = encoder->dev;
1193         struct radeon_device *rdev = dev->dev_private;
1194         struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1195         struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1196         struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1197         int bpc = 0;
1198         u32 tmp = 0;
1199         enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1200
1201         if (connector) {
1202                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1203                 bpc = radeon_get_monitor_bpc(connector);
1204                 dither = radeon_connector->dither;
1205         }
1206
1207         /* LVDS/eDP FMT is set up by atom */
1208         if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1209                 return;
1210
1211         /* not needed for analog */
1212         if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1213             (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1214                 return;
1215
1216         if (bpc == 0)
1217                 return;
1218
1219         switch (bpc) {
1220         case 6:
1221                 if (dither == RADEON_FMT_DITHER_ENABLE)
1222                         /* XXX sort out optimal dither settings */
1223                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1224                                 FMT_SPATIAL_DITHER_EN);
1225                 else
1226                         tmp |= FMT_TRUNCATE_EN;
1227                 break;
1228         case 8:
1229                 if (dither == RADEON_FMT_DITHER_ENABLE)
1230                         /* XXX sort out optimal dither settings */
1231                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1232                                 FMT_RGB_RANDOM_ENABLE |
1233                                 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1234                 else
1235                         tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1236                 break;
1237         case 10:
1238         default:
1239                 /* not needed */
1240                 break;
1241         }
1242
1243         WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1244 }
1245
1246 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1247 {
1248         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1249                 return true;
1250         else
1251                 return false;
1252 }
1253
1254 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1255 {
1256         u32 pos1, pos2;
1257
1258         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1259         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1260
1261         if (pos1 != pos2)
1262                 return true;
1263         else
1264                 return false;
1265 }
1266
1267 /**
1268  * dce4_wait_for_vblank - vblank wait asic callback.
1269  *
1270  * @rdev: radeon_device pointer
1271  * @crtc: crtc to wait for vblank on
1272  *
1273  * Wait for vblank on the requested crtc (evergreen+).
1274  */
1275 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1276 {
1277         unsigned i = 0;
1278
1279         if (crtc >= rdev->num_crtc)
1280                 return;
1281
1282         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1283                 return;
1284
1285         /* depending on when we hit vblank, we may be close to active; if so,
1286          * wait for another frame.
1287          */
1288         while (dce4_is_in_vblank(rdev, crtc)) {
1289                 if (i++ % 100 == 0) {
1290                         if (!dce4_is_counter_moving(rdev, crtc))
1291                                 break;
1292                 }
1293         }
1294
1295         while (!dce4_is_in_vblank(rdev, crtc)) {
1296                 if (i++ % 100 == 0) {
1297                         if (!dce4_is_counter_moving(rdev, crtc))
1298                                 break;
1299                 }
1300         }
1301 }
1302
1303 /**
1304  * evergreen_page_flip - pageflip callback.
1305  *
1306  * @rdev: radeon_device pointer
1307  * @crtc_id: crtc to cleanup pageflip on
1308  * @crtc_base: new address of the crtc (GPU MC address)
1309  *
1310  * Does the actual pageflip (evergreen+).
1311  * During vblank we take the crtc lock and wait for the update_pending
1312  * bit to go high, when it does, we release the lock, and allow the
1313  * double buffered update to take place.
1314  * Returns the current update pending status.
1315  */
1316 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1317 {
1318         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1319         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1320         int i;
1321
1322         /* Lock the graphics update lock */
1323         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1324         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1325
1326         /* update the scanout addresses */
1327         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1328                upper_32_bits(crtc_base));
1329         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1330                (u32)crtc_base);
1331
1332         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1333                upper_32_bits(crtc_base));
1334         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1335                (u32)crtc_base);
1336
1337         /* Wait for update_pending to go high. */
1338         for (i = 0; i < rdev->usec_timeout; i++) {
1339                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1340                         break;
1341                 udelay(1);
1342         }
1343         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1344
1345         /* Unlock the lock, so double-buffering can take place inside vblank */
1346         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1347         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1348
1349         /* Return current update_pending status: */
1350         return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1351 }
1352
1353 /* get temperature in millidegrees */
1354 int evergreen_get_temp(struct radeon_device *rdev)
1355 {
1356         u32 temp, toffset;
1357         int actual_temp = 0;
1358
1359         if (rdev->family == CHIP_JUNIPER) {
1360                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1361                         TOFFSET_SHIFT;
1362                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1363                         TS0_ADC_DOUT_SHIFT;
1364
1365                 if (toffset & 0x100)
1366                         actual_temp = temp / 2 - (0x200 - toffset);
1367                 else
1368                         actual_temp = temp / 2 + toffset;
1369
1370                 actual_temp = actual_temp * 1000;
1371
1372         } else {
1373                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1374                         ASIC_T_SHIFT;
1375
1376                 if (temp & 0x400)
1377                         actual_temp = -256;
1378                 else if (temp & 0x200)
1379                         actual_temp = 255;
1380                 else if (temp & 0x100) {
1381                         actual_temp = temp & 0x1ff;
1382                         actual_temp |= ~0x1ff;
1383                 } else
1384                         actual_temp = temp & 0xff;
1385
1386                 actual_temp = (actual_temp * 1000) / 2;
1387         }
1388
1389         return actual_temp;
1390 }
1391
1392 int sumo_get_temp(struct radeon_device *rdev)
1393 {
1394         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1395         int actual_temp = temp - 49;
1396
1397         return actual_temp * 1000;
1398 }
1399
1400 /**
1401  * sumo_pm_init_profile - Initialize power profiles callback.
1402  *
1403  * @rdev: radeon_device pointer
1404  *
1405  * Initialize the power states used in profile mode
1406  * (sumo, trinity, SI).
1407  * Used for profile mode only.
1408  */
1409 void sumo_pm_init_profile(struct radeon_device *rdev)
1410 {
1411         int idx;
1412
1413         /* default */
1414         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1415         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1416         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1417         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1418
1419         /* low,mid sh/mh */
1420         if (rdev->flags & RADEON_IS_MOBILITY)
1421                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1422         else
1423                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1424
1425         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1426         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1427         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1428         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1429
1430         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1431         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1432         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1433         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1434
1435         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1436         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1437         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1438         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1439
1440         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1441         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1442         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1443         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1444
1445         /* high sh/mh */
1446         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1447         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1448         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1449         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1450         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1451                 rdev->pm.power_state[idx].num_clock_modes - 1;
1452
1453         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1454         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1455         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1456         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1457                 rdev->pm.power_state[idx].num_clock_modes - 1;
1458 }
1459
1460 /**
1461  * btc_pm_init_profile - Initialize power profiles callback.
1462  *
1463  * @rdev: radeon_device pointer
1464  *
1465  * Initialize the power states used in profile mode
1466  * (BTC, cayman).
1467  * Used for profile mode only.
1468  */
1469 void btc_pm_init_profile(struct radeon_device *rdev)
1470 {
1471         int idx;
1472
1473         /* default */
1474         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1475         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1476         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1477         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1478         /* starting with BTC, there is one state that is used for both
1479          * MH and SH.  Difference is that we always use the high clock index for
1480          * mclk.
1481          */
1482         if (rdev->flags & RADEON_IS_MOBILITY)
1483                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1484         else
1485                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1486         /* low sh */
1487         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1488         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1489         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1490         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1491         /* mid sh */
1492         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1493         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1494         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1495         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1496         /* high sh */
1497         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1498         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1499         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1500         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1501         /* low mh */
1502         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1503         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1504         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1505         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1506         /* mid mh */
1507         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1508         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1509         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1510         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1511         /* high mh */
1512         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1513         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1514         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1515         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1516 }
1517
1518 /**
1519  * evergreen_pm_misc - set additional pm hw parameters callback.
1520  *
1521  * @rdev: radeon_device pointer
1522  *
1523  * Set non-clock parameters associated with a power state
1524  * (voltage, etc.) (evergreen+).
1525  */
1526 void evergreen_pm_misc(struct radeon_device *rdev)
1527 {
1528         int req_ps_idx = rdev->pm.requested_power_state_index;
1529         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1530         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1531         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1532
1533         if (voltage->type == VOLTAGE_SW) {
1534                 /* 0xff0x are flags rather then an actual voltage */
1535                 if ((voltage->voltage & 0xff00) == 0xff00)
1536                         return;
1537                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1538                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1539                         rdev->pm.current_vddc = voltage->voltage;
1540                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1541                 }
1542
1543                 /* starting with BTC, there is one state that is used for both
1544                  * MH and SH.  Difference is that we always use the high clock index for
1545                  * mclk and vddci.
1546                  */
1547                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1548                     (rdev->family >= CHIP_BARTS) &&
1549                     rdev->pm.active_crtc_count &&
1550                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1551                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1552                         voltage = &rdev->pm.power_state[req_ps_idx].
1553                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1554
1555                 /* 0xff0x are flags rather then an actual voltage */
1556                 if ((voltage->vddci & 0xff00) == 0xff00)
1557                         return;
1558                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1559                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1560                         rdev->pm.current_vddci = voltage->vddci;
1561                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1562                 }
1563         }
1564 }
1565
1566 /**
1567  * evergreen_pm_prepare - pre-power state change callback.
1568  *
1569  * @rdev: radeon_device pointer
1570  *
1571  * Prepare for a power state change (evergreen+).
1572  */
1573 void evergreen_pm_prepare(struct radeon_device *rdev)
1574 {
1575         struct drm_device *ddev = rdev->ddev;
1576         struct drm_crtc *crtc;
1577         struct radeon_crtc *radeon_crtc;
1578         u32 tmp;
1579
1580         /* disable any active CRTCs */
1581         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1582                 radeon_crtc = to_radeon_crtc(crtc);
1583                 if (radeon_crtc->enabled) {
1584                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1585                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1586                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1587                 }
1588         }
1589 }
1590
1591 /**
1592  * evergreen_pm_finish - post-power state change callback.
1593  *
1594  * @rdev: radeon_device pointer
1595  *
1596  * Clean up after a power state change (evergreen+).
1597  */
1598 void evergreen_pm_finish(struct radeon_device *rdev)
1599 {
1600         struct drm_device *ddev = rdev->ddev;
1601         struct drm_crtc *crtc;
1602         struct radeon_crtc *radeon_crtc;
1603         u32 tmp;
1604
1605         /* enable any active CRTCs */
1606         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1607                 radeon_crtc = to_radeon_crtc(crtc);
1608                 if (radeon_crtc->enabled) {
1609                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1610                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1611                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1612                 }
1613         }
1614 }
1615
1616 /**
1617  * evergreen_hpd_sense - hpd sense callback.
1618  *
1619  * @rdev: radeon_device pointer
1620  * @hpd: hpd (hotplug detect) pin
1621  *
1622  * Checks if a digital monitor is connected (evergreen+).
1623  * Returns true if connected, false if not connected.
1624  */
1625 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1626 {
1627         bool connected = false;
1628
1629         switch (hpd) {
1630         case RADEON_HPD_1:
1631                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1632                         connected = true;
1633                 break;
1634         case RADEON_HPD_2:
1635                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1636                         connected = true;
1637                 break;
1638         case RADEON_HPD_3:
1639                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1640                         connected = true;
1641                 break;
1642         case RADEON_HPD_4:
1643                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1644                         connected = true;
1645                 break;
1646         case RADEON_HPD_5:
1647                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1648                         connected = true;
1649                 break;
1650         case RADEON_HPD_6:
1651                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1652                         connected = true;
1653                 break;
1654         default:
1655                 break;
1656         }
1657
1658         return connected;
1659 }
1660
1661 /**
1662  * evergreen_hpd_set_polarity - hpd set polarity callback.
1663  *
1664  * @rdev: radeon_device pointer
1665  * @hpd: hpd (hotplug detect) pin
1666  *
1667  * Set the polarity of the hpd pin (evergreen+).
1668  */
1669 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1670                                 enum radeon_hpd_id hpd)
1671 {
1672         u32 tmp;
1673         bool connected = evergreen_hpd_sense(rdev, hpd);
1674
1675         switch (hpd) {
1676         case RADEON_HPD_1:
1677                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1678                 if (connected)
1679                         tmp &= ~DC_HPDx_INT_POLARITY;
1680                 else
1681                         tmp |= DC_HPDx_INT_POLARITY;
1682                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1683                 break;
1684         case RADEON_HPD_2:
1685                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1686                 if (connected)
1687                         tmp &= ~DC_HPDx_INT_POLARITY;
1688                 else
1689                         tmp |= DC_HPDx_INT_POLARITY;
1690                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1691                 break;
1692         case RADEON_HPD_3:
1693                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1694                 if (connected)
1695                         tmp &= ~DC_HPDx_INT_POLARITY;
1696                 else
1697                         tmp |= DC_HPDx_INT_POLARITY;
1698                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1699                 break;
1700         case RADEON_HPD_4:
1701                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1702                 if (connected)
1703                         tmp &= ~DC_HPDx_INT_POLARITY;
1704                 else
1705                         tmp |= DC_HPDx_INT_POLARITY;
1706                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1707                 break;
1708         case RADEON_HPD_5:
1709                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1710                 if (connected)
1711                         tmp &= ~DC_HPDx_INT_POLARITY;
1712                 else
1713                         tmp |= DC_HPDx_INT_POLARITY;
1714                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1715                         break;
1716         case RADEON_HPD_6:
1717                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1718                 if (connected)
1719                         tmp &= ~DC_HPDx_INT_POLARITY;
1720                 else
1721                         tmp |= DC_HPDx_INT_POLARITY;
1722                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1723                 break;
1724         default:
1725                 break;
1726         }
1727 }
1728
1729 /**
1730  * evergreen_hpd_init - hpd setup callback.
1731  *
1732  * @rdev: radeon_device pointer
1733  *
1734  * Setup the hpd pins used by the card (evergreen+).
1735  * Enable the pin, set the polarity, and enable the hpd interrupts.
1736  */
1737 void evergreen_hpd_init(struct radeon_device *rdev)
1738 {
1739         struct drm_device *dev = rdev->ddev;
1740         struct drm_connector *connector;
1741         unsigned enabled = 0;
1742         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1743                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1744
1745         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1746                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1747
1748                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1749                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1750                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1751                          * aux dp channel on imac and help (but not completely fix)
1752                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1753                          * also avoid interrupt storms during dpms.
1754                          */
1755                         continue;
1756                 }
1757                 switch (radeon_connector->hpd.hpd) {
1758                 case RADEON_HPD_1:
1759                         WREG32(DC_HPD1_CONTROL, tmp);
1760                         break;
1761                 case RADEON_HPD_2:
1762                         WREG32(DC_HPD2_CONTROL, tmp);
1763                         break;
1764                 case RADEON_HPD_3:
1765                         WREG32(DC_HPD3_CONTROL, tmp);
1766                         break;
1767                 case RADEON_HPD_4:
1768                         WREG32(DC_HPD4_CONTROL, tmp);
1769                         break;
1770                 case RADEON_HPD_5:
1771                         WREG32(DC_HPD5_CONTROL, tmp);
1772                         break;
1773                 case RADEON_HPD_6:
1774                         WREG32(DC_HPD6_CONTROL, tmp);
1775                         break;
1776                 default:
1777                         break;
1778                 }
1779                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1780                 enabled |= 1 << radeon_connector->hpd.hpd;
1781         }
1782         radeon_irq_kms_enable_hpd(rdev, enabled);
1783 }
1784
1785 /**
1786  * evergreen_hpd_fini - hpd tear down callback.
1787  *
1788  * @rdev: radeon_device pointer
1789  *
1790  * Tear down the hpd pins used by the card (evergreen+).
1791  * Disable the hpd interrupts.
1792  */
1793 void evergreen_hpd_fini(struct radeon_device *rdev)
1794 {
1795         struct drm_device *dev = rdev->ddev;
1796         struct drm_connector *connector;
1797         unsigned disabled = 0;
1798
1799         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1800                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1801                 switch (radeon_connector->hpd.hpd) {
1802                 case RADEON_HPD_1:
1803                         WREG32(DC_HPD1_CONTROL, 0);
1804                         break;
1805                 case RADEON_HPD_2:
1806                         WREG32(DC_HPD2_CONTROL, 0);
1807                         break;
1808                 case RADEON_HPD_3:
1809                         WREG32(DC_HPD3_CONTROL, 0);
1810                         break;
1811                 case RADEON_HPD_4:
1812                         WREG32(DC_HPD4_CONTROL, 0);
1813                         break;
1814                 case RADEON_HPD_5:
1815                         WREG32(DC_HPD5_CONTROL, 0);
1816                         break;
1817                 case RADEON_HPD_6:
1818                         WREG32(DC_HPD6_CONTROL, 0);
1819                         break;
1820                 default:
1821                         break;
1822                 }
1823                 disabled |= 1 << radeon_connector->hpd.hpd;
1824         }
1825         radeon_irq_kms_disable_hpd(rdev, disabled);
1826 }
1827
1828 /* watermark setup */
1829
1830 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1831                                         struct radeon_crtc *radeon_crtc,
1832                                         struct drm_display_mode *mode,
1833                                         struct drm_display_mode *other_mode)
1834 {
1835         u32 tmp, buffer_alloc, i;
1836         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1837         /*
1838          * Line Buffer Setup
1839          * There are 3 line buffers, each one shared by 2 display controllers.
1840          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1841          * the display controllers.  The paritioning is done via one of four
1842          * preset allocations specified in bits 2:0:
1843          * first display controller
1844          *  0 - first half of lb (3840 * 2)
1845          *  1 - first 3/4 of lb (5760 * 2)
1846          *  2 - whole lb (7680 * 2), other crtc must be disabled
1847          *  3 - first 1/4 of lb (1920 * 2)
1848          * second display controller
1849          *  4 - second half of lb (3840 * 2)
1850          *  5 - second 3/4 of lb (5760 * 2)
1851          *  6 - whole lb (7680 * 2), other crtc must be disabled
1852          *  7 - last 1/4 of lb (1920 * 2)
1853          */
1854         /* this can get tricky if we have two large displays on a paired group
1855          * of crtcs.  Ideally for multiple large displays we'd assign them to
1856          * non-linked crtcs for maximum line buffer allocation.
1857          */
1858         if (radeon_crtc->base.enabled && mode) {
1859                 if (other_mode) {
1860                         tmp = 0; /* 1/2 */
1861                         buffer_alloc = 1;
1862                 } else {
1863                         tmp = 2; /* whole */
1864                         buffer_alloc = 2;
1865                 }
1866         } else {
1867                 tmp = 0;
1868                 buffer_alloc = 0;
1869         }
1870
1871         /* second controller of the pair uses second half of the lb */
1872         if (radeon_crtc->crtc_id % 2)
1873                 tmp += 4;
1874         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1875
1876         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1877                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1878                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1879                 for (i = 0; i < rdev->usec_timeout; i++) {
1880                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1881                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1882                                 break;
1883                         udelay(1);
1884                 }
1885         }
1886
1887         if (radeon_crtc->base.enabled && mode) {
1888                 switch (tmp) {
1889                 case 0:
1890                 case 4:
1891                 default:
1892                         if (ASIC_IS_DCE5(rdev))
1893                                 return 4096 * 2;
1894                         else
1895                                 return 3840 * 2;
1896                 case 1:
1897                 case 5:
1898                         if (ASIC_IS_DCE5(rdev))
1899                                 return 6144 * 2;
1900                         else
1901                                 return 5760 * 2;
1902                 case 2:
1903                 case 6:
1904                         if (ASIC_IS_DCE5(rdev))
1905                                 return 8192 * 2;
1906                         else
1907                                 return 7680 * 2;
1908                 case 3:
1909                 case 7:
1910                         if (ASIC_IS_DCE5(rdev))
1911                                 return 2048 * 2;
1912                         else
1913                                 return 1920 * 2;
1914                 }
1915         }
1916
1917         /* controller not enabled, so no lb used */
1918         return 0;
1919 }
1920
1921 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1922 {
1923         u32 tmp = RREG32(MC_SHARED_CHMAP);
1924
1925         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1926         case 0:
1927         default:
1928                 return 1;
1929         case 1:
1930                 return 2;
1931         case 2:
1932                 return 4;
1933         case 3:
1934                 return 8;
1935         }
1936 }
1937
1938 struct evergreen_wm_params {
1939         u32 dram_channels; /* number of dram channels */
1940         u32 yclk;          /* bandwidth per dram data pin in kHz */
1941         u32 sclk;          /* engine clock in kHz */
1942         u32 disp_clk;      /* display clock in kHz */
1943         u32 src_width;     /* viewport width */
1944         u32 active_time;   /* active display time in ns */
1945         u32 blank_time;    /* blank time in ns */
1946         bool interlaced;    /* mode is interlaced */
1947         fixed20_12 vsc;    /* vertical scale ratio */
1948         u32 num_heads;     /* number of active crtcs */
1949         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1950         u32 lb_size;       /* line buffer allocated to pipe */
1951         u32 vtaps;         /* vertical scaler taps */
1952 };
1953
1954 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1955 {
1956         /* Calculate DRAM Bandwidth and the part allocated to display. */
1957         fixed20_12 dram_efficiency; /* 0.7 */
1958         fixed20_12 yclk, dram_channels, bandwidth;
1959         fixed20_12 a;
1960
1961         a.full = dfixed_const(1000);
1962         yclk.full = dfixed_const(wm->yclk);
1963         yclk.full = dfixed_div(yclk, a);
1964         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1965         a.full = dfixed_const(10);
1966         dram_efficiency.full = dfixed_const(7);
1967         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1968         bandwidth.full = dfixed_mul(dram_channels, yclk);
1969         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1970
1971         return dfixed_trunc(bandwidth);
1972 }
1973
1974 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1975 {
1976         /* Calculate DRAM Bandwidth and the part allocated to display. */
1977         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1978         fixed20_12 yclk, dram_channels, bandwidth;
1979         fixed20_12 a;
1980
1981         a.full = dfixed_const(1000);
1982         yclk.full = dfixed_const(wm->yclk);
1983         yclk.full = dfixed_div(yclk, a);
1984         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1985         a.full = dfixed_const(10);
1986         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1987         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1988         bandwidth.full = dfixed_mul(dram_channels, yclk);
1989         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1990
1991         return dfixed_trunc(bandwidth);
1992 }
1993
1994 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1995 {
1996         /* Calculate the display Data return Bandwidth */
1997         fixed20_12 return_efficiency; /* 0.8 */
1998         fixed20_12 sclk, bandwidth;
1999         fixed20_12 a;
2000
2001         a.full = dfixed_const(1000);
2002         sclk.full = dfixed_const(wm->sclk);
2003         sclk.full = dfixed_div(sclk, a);
2004         a.full = dfixed_const(10);
2005         return_efficiency.full = dfixed_const(8);
2006         return_efficiency.full = dfixed_div(return_efficiency, a);
2007         a.full = dfixed_const(32);
2008         bandwidth.full = dfixed_mul(a, sclk);
2009         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2010
2011         return dfixed_trunc(bandwidth);
2012 }
2013
2014 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2015 {
2016         /* Calculate the DMIF Request Bandwidth */
2017         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2018         fixed20_12 disp_clk, bandwidth;
2019         fixed20_12 a;
2020
2021         a.full = dfixed_const(1000);
2022         disp_clk.full = dfixed_const(wm->disp_clk);
2023         disp_clk.full = dfixed_div(disp_clk, a);
2024         a.full = dfixed_const(10);
2025         disp_clk_request_efficiency.full = dfixed_const(8);
2026         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2027         a.full = dfixed_const(32);
2028         bandwidth.full = dfixed_mul(a, disp_clk);
2029         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2030
2031         return dfixed_trunc(bandwidth);
2032 }
2033
2034 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2035 {
2036         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2037         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2038         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2039         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2040
2041         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2042 }
2043
2044 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2045 {
2046         /* Calculate the display mode Average Bandwidth
2047          * DisplayMode should contain the source and destination dimensions,
2048          * timing, etc.
2049          */
2050         fixed20_12 bpp;
2051         fixed20_12 line_time;
2052         fixed20_12 src_width;
2053         fixed20_12 bandwidth;
2054         fixed20_12 a;
2055
2056         a.full = dfixed_const(1000);
2057         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2058         line_time.full = dfixed_div(line_time, a);
2059         bpp.full = dfixed_const(wm->bytes_per_pixel);
2060         src_width.full = dfixed_const(wm->src_width);
2061         bandwidth.full = dfixed_mul(src_width, bpp);
2062         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2063         bandwidth.full = dfixed_div(bandwidth, line_time);
2064
2065         return dfixed_trunc(bandwidth);
2066 }
2067
2068 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2069 {
2070         /* First calcualte the latency in ns */
2071         u32 mc_latency = 2000; /* 2000 ns. */
2072         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2073         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2074         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2075         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2076         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2077                 (wm->num_heads * cursor_line_pair_return_time);
2078         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2079         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2080         fixed20_12 a, b, c;
2081
2082         if (wm->num_heads == 0)
2083                 return 0;
2084
2085         a.full = dfixed_const(2);
2086         b.full = dfixed_const(1);
2087         if ((wm->vsc.full > a.full) ||
2088             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2089             (wm->vtaps >= 5) ||
2090             ((wm->vsc.full >= a.full) && wm->interlaced))
2091                 max_src_lines_per_dst_line = 4;
2092         else
2093                 max_src_lines_per_dst_line = 2;
2094
2095         a.full = dfixed_const(available_bandwidth);
2096         b.full = dfixed_const(wm->num_heads);
2097         a.full = dfixed_div(a, b);
2098
2099         b.full = dfixed_const(1000);
2100         c.full = dfixed_const(wm->disp_clk);
2101         b.full = dfixed_div(c, b);
2102         c.full = dfixed_const(wm->bytes_per_pixel);
2103         b.full = dfixed_mul(b, c);
2104
2105         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2106
2107         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2108         b.full = dfixed_const(1000);
2109         c.full = dfixed_const(lb_fill_bw);
2110         b.full = dfixed_div(c, b);
2111         a.full = dfixed_div(a, b);
2112         line_fill_time = dfixed_trunc(a);
2113
2114         if (line_fill_time < wm->active_time)
2115                 return latency;
2116         else
2117                 return latency + (line_fill_time - wm->active_time);
2118
2119 }
2120
2121 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2122 {
2123         if (evergreen_average_bandwidth(wm) <=
2124             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2125                 return true;
2126         else
2127                 return false;
2128 };
2129
2130 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2131 {
2132         if (evergreen_average_bandwidth(wm) <=
2133             (evergreen_available_bandwidth(wm) / wm->num_heads))
2134                 return true;
2135         else
2136                 return false;
2137 };
2138
2139 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2140 {
2141         u32 lb_partitions = wm->lb_size / wm->src_width;
2142         u32 line_time = wm->active_time + wm->blank_time;
2143         u32 latency_tolerant_lines;
2144         u32 latency_hiding;
2145         fixed20_12 a;
2146
2147         a.full = dfixed_const(1);
2148         if (wm->vsc.full > a.full)
2149                 latency_tolerant_lines = 1;
2150         else {
2151                 if (lb_partitions <= (wm->vtaps + 1))
2152                         latency_tolerant_lines = 1;
2153                 else
2154                         latency_tolerant_lines = 2;
2155         }
2156
2157         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2158
2159         if (evergreen_latency_watermark(wm) <= latency_hiding)
2160                 return true;
2161         else
2162                 return false;
2163 }
2164
2165 static void evergreen_program_watermarks(struct radeon_device *rdev,
2166                                          struct radeon_crtc *radeon_crtc,
2167                                          u32 lb_size, u32 num_heads)
2168 {
2169         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2170         struct evergreen_wm_params wm_low, wm_high;
2171         u32 dram_channels;
2172         u32 pixel_period;
2173         u32 line_time = 0;
2174         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2175         u32 priority_a_mark = 0, priority_b_mark = 0;
2176         u32 priority_a_cnt = PRIORITY_OFF;
2177         u32 priority_b_cnt = PRIORITY_OFF;
2178         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2179         u32 tmp, arb_control3;
2180         fixed20_12 a, b, c;
2181
2182         if (radeon_crtc->base.enabled && num_heads && mode) {
2183                 pixel_period = 1000000 / (u32)mode->clock;
2184                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2185                 priority_a_cnt = 0;
2186                 priority_b_cnt = 0;
2187                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2188
2189                 /* watermark for high clocks */
2190                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2191                         wm_high.yclk =
2192                                 radeon_dpm_get_mclk(rdev, false) * 10;
2193                         wm_high.sclk =
2194                                 radeon_dpm_get_sclk(rdev, false) * 10;
2195                 } else {
2196                         wm_high.yclk = rdev->pm.current_mclk * 10;
2197                         wm_high.sclk = rdev->pm.current_sclk * 10;
2198                 }
2199
2200                 wm_high.disp_clk = mode->clock;
2201                 wm_high.src_width = mode->crtc_hdisplay;
2202                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2203                 wm_high.blank_time = line_time - wm_high.active_time;
2204                 wm_high.interlaced = false;
2205                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2206                         wm_high.interlaced = true;
2207                 wm_high.vsc = radeon_crtc->vsc;
2208                 wm_high.vtaps = 1;
2209                 if (radeon_crtc->rmx_type != RMX_OFF)
2210                         wm_high.vtaps = 2;
2211                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2212                 wm_high.lb_size = lb_size;
2213                 wm_high.dram_channels = dram_channels;
2214                 wm_high.num_heads = num_heads;
2215
2216                 /* watermark for low clocks */
2217                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2218                         wm_low.yclk =
2219                                 radeon_dpm_get_mclk(rdev, true) * 10;
2220                         wm_low.sclk =
2221                                 radeon_dpm_get_sclk(rdev, true) * 10;
2222                 } else {
2223                         wm_low.yclk = rdev->pm.current_mclk * 10;
2224                         wm_low.sclk = rdev->pm.current_sclk * 10;
2225                 }
2226
2227                 wm_low.disp_clk = mode->clock;
2228                 wm_low.src_width = mode->crtc_hdisplay;
2229                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2230                 wm_low.blank_time = line_time - wm_low.active_time;
2231                 wm_low.interlaced = false;
2232                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2233                         wm_low.interlaced = true;
2234                 wm_low.vsc = radeon_crtc->vsc;
2235                 wm_low.vtaps = 1;
2236                 if (radeon_crtc->rmx_type != RMX_OFF)
2237                         wm_low.vtaps = 2;
2238                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2239                 wm_low.lb_size = lb_size;
2240                 wm_low.dram_channels = dram_channels;
2241                 wm_low.num_heads = num_heads;
2242
2243                 /* set for high clocks */
2244                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2245                 /* set for low clocks */
2246                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2247
2248                 /* possibly force display priority to high */
2249                 /* should really do this at mode validation time... */
2250                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2251                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2252                     !evergreen_check_latency_hiding(&wm_high) ||
2253                     (rdev->disp_priority == 2)) {
2254                         DRM_DEBUG_KMS("force priority a to high\n");
2255                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2256                 }
2257                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2258                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2259                     !evergreen_check_latency_hiding(&wm_low) ||
2260                     (rdev->disp_priority == 2)) {
2261                         DRM_DEBUG_KMS("force priority b to high\n");
2262                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2263                 }
2264
2265                 a.full = dfixed_const(1000);
2266                 b.full = dfixed_const(mode->clock);
2267                 b.full = dfixed_div(b, a);
2268                 c.full = dfixed_const(latency_watermark_a);
2269                 c.full = dfixed_mul(c, b);
2270                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2271                 c.full = dfixed_div(c, a);
2272                 a.full = dfixed_const(16);
2273                 c.full = dfixed_div(c, a);
2274                 priority_a_mark = dfixed_trunc(c);
2275                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2276
2277                 a.full = dfixed_const(1000);
2278                 b.full = dfixed_const(mode->clock);
2279                 b.full = dfixed_div(b, a);
2280                 c.full = dfixed_const(latency_watermark_b);
2281                 c.full = dfixed_mul(c, b);
2282                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2283                 c.full = dfixed_div(c, a);
2284                 a.full = dfixed_const(16);
2285                 c.full = dfixed_div(c, a);
2286                 priority_b_mark = dfixed_trunc(c);
2287                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2288         }
2289
2290         /* select wm A */
2291         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2292         tmp = arb_control3;
2293         tmp &= ~LATENCY_WATERMARK_MASK(3);
2294         tmp |= LATENCY_WATERMARK_MASK(1);
2295         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2296         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2297                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2298                 LATENCY_HIGH_WATERMARK(line_time)));
2299         /* select wm B */
2300         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2301         tmp &= ~LATENCY_WATERMARK_MASK(3);
2302         tmp |= LATENCY_WATERMARK_MASK(2);
2303         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2304         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2305                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2306                 LATENCY_HIGH_WATERMARK(line_time)));
2307         /* restore original selection */
2308         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2309
2310         /* write the priority marks */
2311         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2312         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2313
2314         /* save values for DPM */
2315         radeon_crtc->line_time = line_time;
2316         radeon_crtc->wm_high = latency_watermark_a;
2317         radeon_crtc->wm_low = latency_watermark_b;
2318 }
2319
2320 /**
2321  * evergreen_bandwidth_update - update display watermarks callback.
2322  *
2323  * @rdev: radeon_device pointer
2324  *
2325  * Update the display watermarks based on the requested mode(s)
2326  * (evergreen+).
2327  */
2328 void evergreen_bandwidth_update(struct radeon_device *rdev)
2329 {
2330         struct drm_display_mode *mode0 = NULL;
2331         struct drm_display_mode *mode1 = NULL;
2332         u32 num_heads = 0, lb_size;
2333         int i;
2334
2335         radeon_update_display_priority(rdev);
2336
2337         for (i = 0; i < rdev->num_crtc; i++) {
2338                 if (rdev->mode_info.crtcs[i]->base.enabled)
2339                         num_heads++;
2340         }
2341         for (i = 0; i < rdev->num_crtc; i += 2) {
2342                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2343                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2344                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2345                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2346                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2347                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2348         }
2349 }
2350
2351 /**
2352  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2353  *
2354  * @rdev: radeon_device pointer
2355  *
2356  * Wait for the MC (memory controller) to be idle.
2357  * (evergreen+).
2358  * Returns 0 if the MC is idle, -1 if not.
2359  */
2360 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2361 {
2362         unsigned i;
2363         u32 tmp;
2364
2365         for (i = 0; i < rdev->usec_timeout; i++) {
2366                 /* read MC_STATUS */
2367                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2368                 if (!tmp)
2369                         return 0;
2370                 udelay(1);
2371         }
2372         return -1;
2373 }
2374
2375 /*
2376  * GART
2377  */
2378 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2379 {
2380         unsigned i;
2381         u32 tmp;
2382
2383         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2384
2385         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2386         for (i = 0; i < rdev->usec_timeout; i++) {
2387                 /* read MC_STATUS */
2388                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2389                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2390                 if (tmp == 2) {
2391                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2392                         return;
2393                 }
2394                 if (tmp) {
2395                         return;
2396                 }
2397                 udelay(1);
2398         }
2399 }
2400
2401 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2402 {
2403         u32 tmp;
2404         int r;
2405
2406         if (rdev->gart.robj == NULL) {
2407                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2408                 return -EINVAL;
2409         }
2410         r = radeon_gart_table_vram_pin(rdev);
2411         if (r)
2412                 return r;
2413         radeon_gart_restore(rdev);
2414         /* Setup L2 cache */
2415         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2416                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2417                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2418         WREG32(VM_L2_CNTL2, 0);
2419         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2420         /* Setup TLB control */
2421         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2422                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2423                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2424                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2425         if (rdev->flags & RADEON_IS_IGP) {
2426                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2427                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2428                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2429         } else {
2430                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2431                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2432                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2433                 if ((rdev->family == CHIP_JUNIPER) ||
2434                     (rdev->family == CHIP_CYPRESS) ||
2435                     (rdev->family == CHIP_HEMLOCK) ||
2436                     (rdev->family == CHIP_BARTS))
2437                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2438         }
2439         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2440         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2441         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2442         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2443         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2444         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2445         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2446         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2447                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2448         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2449                         (u32)(rdev->dummy_page.addr >> 12));
2450         WREG32(VM_CONTEXT1_CNTL, 0);
2451
2452         evergreen_pcie_gart_tlb_flush(rdev);
2453         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2454                  (unsigned)(rdev->mc.gtt_size >> 20),
2455                  (unsigned long long)rdev->gart.table_addr);
2456         rdev->gart.ready = true;
2457         return 0;
2458 }
2459
2460 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2461 {
2462         u32 tmp;
2463
2464         /* Disable all tables */
2465         WREG32(VM_CONTEXT0_CNTL, 0);
2466         WREG32(VM_CONTEXT1_CNTL, 0);
2467
2468         /* Setup L2 cache */
2469         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2470                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2471         WREG32(VM_L2_CNTL2, 0);
2472         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2473         /* Setup TLB control */
2474         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2475         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2476         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2477         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2478         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2479         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2480         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2481         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2482         radeon_gart_table_vram_unpin(rdev);
2483 }
2484
2485 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2486 {
2487         evergreen_pcie_gart_disable(rdev);
2488         radeon_gart_table_vram_free(rdev);
2489         radeon_gart_fini(rdev);
2490 }
2491
2492
2493 static void evergreen_agp_enable(struct radeon_device *rdev)
2494 {
2495         u32 tmp;
2496
2497         /* Setup L2 cache */
2498         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2499                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2500                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2501         WREG32(VM_L2_CNTL2, 0);
2502         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2503         /* Setup TLB control */
2504         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2505                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2506                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2507                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2508         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2509         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2510         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2511         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2512         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2513         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2514         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2515         WREG32(VM_CONTEXT0_CNTL, 0);
2516         WREG32(VM_CONTEXT1_CNTL, 0);
2517 }
2518
2519 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2520 {
2521         u32 crtc_enabled, tmp, frame_count, blackout;
2522         int i, j;
2523
2524         if (!ASIC_IS_NODCE(rdev)) {
2525                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2526                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2527
2528                 /* disable VGA render */
2529                 WREG32(VGA_RENDER_CONTROL, 0);
2530         }
2531         /* blank the display controllers */
2532         for (i = 0; i < rdev->num_crtc; i++) {
2533                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2534                 if (crtc_enabled) {
2535                         save->crtc_enabled[i] = true;
2536                         if (ASIC_IS_DCE6(rdev)) {
2537                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2538                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2539                                         radeon_wait_for_vblank(rdev, i);
2540                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2541                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2542                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2543                                 }
2544                         } else {
2545                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2546                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2547                                         radeon_wait_for_vblank(rdev, i);
2548                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2549                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2550                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2551                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2552                                 }
2553                         }
2554                         /* wait for the next frame */
2555                         frame_count = radeon_get_vblank_counter(rdev, i);
2556                         for (j = 0; j < rdev->usec_timeout; j++) {
2557                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2558                                         break;
2559                                 udelay(1);
2560                         }
2561
2562                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2563                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2564                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2565                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2566                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2567                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2568                         save->crtc_enabled[i] = false;
2569                         /* ***** */
2570                 } else {
2571                         save->crtc_enabled[i] = false;
2572                 }
2573         }
2574
2575         radeon_mc_wait_for_idle(rdev);
2576
2577         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2578         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2579                 /* Block CPU access */
2580                 WREG32(BIF_FB_EN, 0);
2581                 /* blackout the MC */
2582                 blackout &= ~BLACKOUT_MODE_MASK;
2583                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2584         }
2585         /* wait for the MC to settle */
2586         udelay(100);
2587
2588         /* lock double buffered regs */
2589         for (i = 0; i < rdev->num_crtc; i++) {
2590                 if (save->crtc_enabled[i]) {
2591                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2592                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2593                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2594                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2595                         }
2596                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2597                         if (!(tmp & 1)) {
2598                                 tmp |= 1;
2599                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2600                         }
2601                 }
2602         }
2603 }
2604
2605 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2606 {
2607         u32 tmp, frame_count;
2608         int i, j;
2609
2610         /* update crtc base addresses */
2611         for (i = 0; i < rdev->num_crtc; i++) {
2612                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2613                        upper_32_bits(rdev->mc.vram_start));
2614                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2615                        upper_32_bits(rdev->mc.vram_start));
2616                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2617                        (u32)rdev->mc.vram_start);
2618                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2619                        (u32)rdev->mc.vram_start);
2620         }
2621
2622         if (!ASIC_IS_NODCE(rdev)) {
2623                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2624                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2625         }
2626
2627         /* unlock regs and wait for update */
2628         for (i = 0; i < rdev->num_crtc; i++) {
2629                 if (save->crtc_enabled[i]) {
2630                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2631                         if ((tmp & 0x3) != 0) {
2632                                 tmp &= ~0x3;
2633                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2634                         }
2635                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2636                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2637                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2638                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2639                         }
2640                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2641                         if (tmp & 1) {
2642                                 tmp &= ~1;
2643                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2644                         }
2645                         for (j = 0; j < rdev->usec_timeout; j++) {
2646                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2647                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2648                                         break;
2649                                 udelay(1);
2650                         }
2651                 }
2652         }
2653
2654         /* unblackout the MC */
2655         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2656         tmp &= ~BLACKOUT_MODE_MASK;
2657         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2658         /* allow CPU access */
2659         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2660
2661         for (i = 0; i < rdev->num_crtc; i++) {
2662                 if (save->crtc_enabled[i]) {
2663                         if (ASIC_IS_DCE6(rdev)) {
2664                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2665                                 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2666                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2667                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2668                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2669                         } else {
2670                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2671                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2672                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2673                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2674                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2675                         }
2676                         /* wait for the next frame */
2677                         frame_count = radeon_get_vblank_counter(rdev, i);
2678                         for (j = 0; j < rdev->usec_timeout; j++) {
2679                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2680                                         break;
2681                                 udelay(1);
2682                         }
2683                 }
2684         }
2685         if (!ASIC_IS_NODCE(rdev)) {
2686                 /* Unlock vga access */
2687                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2688                 mdelay(1);
2689                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2690         }
2691 }
2692
2693 void evergreen_mc_program(struct radeon_device *rdev)
2694 {
2695         struct evergreen_mc_save save;
2696         u32 tmp;
2697         int i, j;
2698
2699         /* Initialize HDP */
2700         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2701                 WREG32((0x2c14 + j), 0x00000000);
2702                 WREG32((0x2c18 + j), 0x00000000);
2703                 WREG32((0x2c1c + j), 0x00000000);
2704                 WREG32((0x2c20 + j), 0x00000000);
2705                 WREG32((0x2c24 + j), 0x00000000);
2706         }
2707         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2708
2709         evergreen_mc_stop(rdev, &save);
2710         if (evergreen_mc_wait_for_idle(rdev)) {
2711                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2712         }
2713         /* Lockout access through VGA aperture*/
2714         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2715         /* Update configuration */
2716         if (rdev->flags & RADEON_IS_AGP) {
2717                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2718                         /* VRAM before AGP */
2719                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2720                                 rdev->mc.vram_start >> 12);
2721                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2722                                 rdev->mc.gtt_end >> 12);
2723                 } else {
2724                         /* VRAM after AGP */
2725                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2726                                 rdev->mc.gtt_start >> 12);
2727                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2728                                 rdev->mc.vram_end >> 12);
2729                 }
2730         } else {
2731                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2732                         rdev->mc.vram_start >> 12);
2733                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2734                         rdev->mc.vram_end >> 12);
2735         }
2736         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2737         /* llano/ontario only */
2738         if ((rdev->family == CHIP_PALM) ||
2739             (rdev->family == CHIP_SUMO) ||
2740             (rdev->family == CHIP_SUMO2)) {
2741                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2742                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2743                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2744                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2745         }
2746         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2747         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2748         WREG32(MC_VM_FB_LOCATION, tmp);
2749         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2750         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2751         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2752         if (rdev->flags & RADEON_IS_AGP) {
2753                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2754                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2755                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2756         } else {
2757                 WREG32(MC_VM_AGP_BASE, 0);
2758                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2759                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2760         }
2761         if (evergreen_mc_wait_for_idle(rdev)) {
2762                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2763         }
2764         evergreen_mc_resume(rdev, &save);
2765         /* we need to own VRAM, so turn off the VGA renderer here
2766          * to stop it overwriting our objects */
2767         rv515_vga_render_disable(rdev);
2768 }
2769
2770 /*
2771  * CP.
2772  */
2773 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2774 {
2775         struct radeon_ring *ring = &rdev->ring[ib->ring];
2776         u32 next_rptr;
2777
2778         /* set to DX10/11 mode */
2779         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2780         radeon_ring_write(ring, 1);
2781
2782         if (ring->rptr_save_reg) {
2783                 next_rptr = ring->wptr + 3 + 4;
2784                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2785                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2786                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2787                 radeon_ring_write(ring, next_rptr);
2788         } else if (rdev->wb.enabled) {
2789                 next_rptr = ring->wptr + 5 + 4;
2790                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2791                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2792                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2793                 radeon_ring_write(ring, next_rptr);
2794                 radeon_ring_write(ring, 0);
2795         }
2796
2797         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2798         radeon_ring_write(ring,
2799 #ifdef __BIG_ENDIAN
2800                           (2 << 0) |
2801 #endif
2802                           (ib->gpu_addr & 0xFFFFFFFC));
2803         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2804         radeon_ring_write(ring, ib->length_dw);
2805 }
2806
2807
2808 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2809 {
2810         const __be32 *fw_data;
2811         int i;
2812
2813         if (!rdev->me_fw || !rdev->pfp_fw)
2814                 return -EINVAL;
2815
2816         r700_cp_stop(rdev);
2817         WREG32(CP_RB_CNTL,
2818 #ifdef __BIG_ENDIAN
2819                BUF_SWAP_32BIT |
2820 #endif
2821                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2822
2823         fw_data = (const __be32 *)rdev->pfp_fw->data;
2824         WREG32(CP_PFP_UCODE_ADDR, 0);
2825         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2826                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2827         WREG32(CP_PFP_UCODE_ADDR, 0);
2828
2829         fw_data = (const __be32 *)rdev->me_fw->data;
2830         WREG32(CP_ME_RAM_WADDR, 0);
2831         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2832                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2833
2834         WREG32(CP_PFP_UCODE_ADDR, 0);
2835         WREG32(CP_ME_RAM_WADDR, 0);
2836         WREG32(CP_ME_RAM_RADDR, 0);
2837         return 0;
2838 }
2839
2840 static int evergreen_cp_start(struct radeon_device *rdev)
2841 {
2842         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2843         int r, i;
2844         uint32_t cp_me;
2845
2846         r = radeon_ring_lock(rdev, ring, 7);
2847         if (r) {
2848                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2849                 return r;
2850         }
2851         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2852         radeon_ring_write(ring, 0x1);
2853         radeon_ring_write(ring, 0x0);
2854         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2855         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2856         radeon_ring_write(ring, 0);
2857         radeon_ring_write(ring, 0);
2858         radeon_ring_unlock_commit(rdev, ring);
2859
2860         cp_me = 0xff;
2861         WREG32(CP_ME_CNTL, cp_me);
2862
2863         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2864         if (r) {
2865                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2866                 return r;
2867         }
2868
2869         /* setup clear context state */
2870         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2871         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2872
2873         for (i = 0; i < evergreen_default_size; i++)
2874                 radeon_ring_write(ring, evergreen_default_state[i]);
2875
2876         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2877         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2878
2879         /* set clear context state */
2880         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2881         radeon_ring_write(ring, 0);
2882
2883         /* SQ_VTX_BASE_VTX_LOC */
2884         radeon_ring_write(ring, 0xc0026f00);
2885         radeon_ring_write(ring, 0x00000000);
2886         radeon_ring_write(ring, 0x00000000);
2887         radeon_ring_write(ring, 0x00000000);
2888
2889         /* Clear consts */
2890         radeon_ring_write(ring, 0xc0036f00);
2891         radeon_ring_write(ring, 0x00000bc4);
2892         radeon_ring_write(ring, 0xffffffff);
2893         radeon_ring_write(ring, 0xffffffff);
2894         radeon_ring_write(ring, 0xffffffff);
2895
2896         radeon_ring_write(ring, 0xc0026900);
2897         radeon_ring_write(ring, 0x00000316);
2898         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2899         radeon_ring_write(ring, 0x00000010); /*  */
2900
2901         radeon_ring_unlock_commit(rdev, ring);
2902
2903         return 0;
2904 }
2905
2906 static int evergreen_cp_resume(struct radeon_device *rdev)
2907 {
2908         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2909         u32 tmp;
2910         u32 rb_bufsz;
2911         int r;
2912
2913         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2914         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2915                                  SOFT_RESET_PA |
2916                                  SOFT_RESET_SH |
2917                                  SOFT_RESET_VGT |
2918                                  SOFT_RESET_SPI |
2919                                  SOFT_RESET_SX));
2920         RREG32(GRBM_SOFT_RESET);
2921         mdelay(15);
2922         WREG32(GRBM_SOFT_RESET, 0);
2923         RREG32(GRBM_SOFT_RESET);
2924
2925         /* Set ring buffer size */
2926         rb_bufsz = order_base_2(ring->ring_size / 8);
2927         tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2928 #ifdef __BIG_ENDIAN
2929         tmp |= BUF_SWAP_32BIT;
2930 #endif
2931         WREG32(CP_RB_CNTL, tmp);
2932         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2933         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2934
2935         /* Set the write pointer delay */
2936         WREG32(CP_RB_WPTR_DELAY, 0);
2937
2938         /* Initialize the ring buffer's read and write pointers */
2939         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2940         WREG32(CP_RB_RPTR_WR, 0);
2941         ring->wptr = 0;
2942         WREG32(CP_RB_WPTR, ring->wptr);
2943
2944         /* set the wb address whether it's enabled or not */
2945         WREG32(CP_RB_RPTR_ADDR,
2946                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2947         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2948         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2949
2950         if (rdev->wb.enabled)
2951                 WREG32(SCRATCH_UMSK, 0xff);
2952         else {
2953                 tmp |= RB_NO_UPDATE;
2954                 WREG32(SCRATCH_UMSK, 0);
2955         }
2956
2957         mdelay(1);
2958         WREG32(CP_RB_CNTL, tmp);
2959
2960         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2961         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2962
2963         evergreen_cp_start(rdev);
2964         ring->ready = true;
2965         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2966         if (r) {
2967                 ring->ready = false;
2968                 return r;
2969         }
2970         return 0;
2971 }
2972
2973 /*
2974  * Core functions
2975  */
2976 static void evergreen_gpu_init(struct radeon_device *rdev)
2977 {
2978         u32 gb_addr_config;
2979         u32 mc_shared_chmap, mc_arb_ramcfg;
2980         u32 sx_debug_1;
2981         u32 smx_dc_ctl0;
2982         u32 sq_config;
2983         u32 sq_lds_resource_mgmt;
2984         u32 sq_gpr_resource_mgmt_1;
2985         u32 sq_gpr_resource_mgmt_2;
2986         u32 sq_gpr_resource_mgmt_3;
2987         u32 sq_thread_resource_mgmt;
2988         u32 sq_thread_resource_mgmt_2;
2989         u32 sq_stack_resource_mgmt_1;
2990         u32 sq_stack_resource_mgmt_2;
2991         u32 sq_stack_resource_mgmt_3;
2992         u32 vgt_cache_invalidation;
2993         u32 hdp_host_path_cntl, tmp;
2994         u32 disabled_rb_mask;
2995         int i, j, num_shader_engines, ps_thread_count;
2996
2997         switch (rdev->family) {
2998         case CHIP_CYPRESS:
2999         case CHIP_HEMLOCK:
3000                 rdev->config.evergreen.num_ses = 2;
3001                 rdev->config.evergreen.max_pipes = 4;
3002                 rdev->config.evergreen.max_tile_pipes = 8;
3003                 rdev->config.evergreen.max_simds = 10;
3004                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3005                 rdev->config.evergreen.max_gprs = 256;
3006                 rdev->config.evergreen.max_threads = 248;
3007                 rdev->config.evergreen.max_gs_threads = 32;
3008                 rdev->config.evergreen.max_stack_entries = 512;
3009                 rdev->config.evergreen.sx_num_of_sets = 4;
3010                 rdev->config.evergreen.sx_max_export_size = 256;
3011                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3012                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3013                 rdev->config.evergreen.max_hw_contexts = 8;
3014                 rdev->config.evergreen.sq_num_cf_insts = 2;
3015
3016                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3017                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3018                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3019                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3020                 break;
3021         case CHIP_JUNIPER:
3022                 rdev->config.evergreen.num_ses = 1;
3023                 rdev->config.evergreen.max_pipes = 4;
3024                 rdev->config.evergreen.max_tile_pipes = 4;
3025                 rdev->config.evergreen.max_simds = 10;
3026                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3027                 rdev->config.evergreen.max_gprs = 256;
3028                 rdev->config.evergreen.max_threads = 248;
3029                 rdev->config.evergreen.max_gs_threads = 32;
3030                 rdev->config.evergreen.max_stack_entries = 512;
3031                 rdev->config.evergreen.sx_num_of_sets = 4;
3032                 rdev->config.evergreen.sx_max_export_size = 256;
3033                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3034                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3035                 rdev->config.evergreen.max_hw_contexts = 8;
3036                 rdev->config.evergreen.sq_num_cf_insts = 2;
3037
3038                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3039                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3040                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3041                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3042                 break;
3043         case CHIP_REDWOOD:
3044                 rdev->config.evergreen.num_ses = 1;
3045                 rdev->config.evergreen.max_pipes = 4;
3046                 rdev->config.evergreen.max_tile_pipes = 4;
3047                 rdev->config.evergreen.max_simds = 5;
3048                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3049                 rdev->config.evergreen.max_gprs = 256;
3050                 rdev->config.evergreen.max_threads = 248;
3051                 rdev->config.evergreen.max_gs_threads = 32;
3052                 rdev->config.evergreen.max_stack_entries = 256;
3053                 rdev->config.evergreen.sx_num_of_sets = 4;
3054                 rdev->config.evergreen.sx_max_export_size = 256;
3055                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3056                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3057                 rdev->config.evergreen.max_hw_contexts = 8;
3058                 rdev->config.evergreen.sq_num_cf_insts = 2;
3059
3060                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3061                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3062                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3063                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3064                 break;
3065         case CHIP_CEDAR:
3066         default:
3067                 rdev->config.evergreen.num_ses = 1;
3068                 rdev->config.evergreen.max_pipes = 2;
3069                 rdev->config.evergreen.max_tile_pipes = 2;
3070                 rdev->config.evergreen.max_simds = 2;
3071                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3072                 rdev->config.evergreen.max_gprs = 256;
3073                 rdev->config.evergreen.max_threads = 192;
3074                 rdev->config.evergreen.max_gs_threads = 16;
3075                 rdev->config.evergreen.max_stack_entries = 256;
3076                 rdev->config.evergreen.sx_num_of_sets = 4;
3077                 rdev->config.evergreen.sx_max_export_size = 128;
3078                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3079                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3080                 rdev->config.evergreen.max_hw_contexts = 4;
3081                 rdev->config.evergreen.sq_num_cf_insts = 1;
3082
3083                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3084                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3085                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3086                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3087                 break;
3088         case CHIP_PALM:
3089                 rdev->config.evergreen.num_ses = 1;
3090                 rdev->config.evergreen.max_pipes = 2;
3091                 rdev->config.evergreen.max_tile_pipes = 2;
3092                 rdev->config.evergreen.max_simds = 2;
3093                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3094                 rdev->config.evergreen.max_gprs = 256;
3095                 rdev->config.evergreen.max_threads = 192;
3096                 rdev->config.evergreen.max_gs_threads = 16;
3097                 rdev->config.evergreen.max_stack_entries = 256;
3098                 rdev->config.evergreen.sx_num_of_sets = 4;
3099                 rdev->config.evergreen.sx_max_export_size = 128;
3100                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3101                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3102                 rdev->config.evergreen.max_hw_contexts = 4;
3103                 rdev->config.evergreen.sq_num_cf_insts = 1;
3104
3105                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3106                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3107                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3108                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3109                 break;
3110         case CHIP_SUMO:
3111                 rdev->config.evergreen.num_ses = 1;
3112                 rdev->config.evergreen.max_pipes = 4;
3113                 rdev->config.evergreen.max_tile_pipes = 4;
3114                 if (rdev->pdev->device == 0x9648)
3115                         rdev->config.evergreen.max_simds = 3;
3116                 else if ((rdev->pdev->device == 0x9647) ||
3117                          (rdev->pdev->device == 0x964a))
3118                         rdev->config.evergreen.max_simds = 4;
3119                 else
3120                         rdev->config.evergreen.max_simds = 5;
3121                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3122                 rdev->config.evergreen.max_gprs = 256;
3123                 rdev->config.evergreen.max_threads = 248;
3124                 rdev->config.evergreen.max_gs_threads = 32;
3125                 rdev->config.evergreen.max_stack_entries = 256;
3126                 rdev->config.evergreen.sx_num_of_sets = 4;
3127                 rdev->config.evergreen.sx_max_export_size = 256;
3128                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3129                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3130                 rdev->config.evergreen.max_hw_contexts = 8;
3131                 rdev->config.evergreen.sq_num_cf_insts = 2;
3132
3133                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3134                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3135                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3136                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3137                 break;
3138         case CHIP_SUMO2:
3139                 rdev->config.evergreen.num_ses = 1;
3140                 rdev->config.evergreen.max_pipes = 4;
3141                 rdev->config.evergreen.max_tile_pipes = 4;
3142                 rdev->config.evergreen.max_simds = 2;
3143                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3144                 rdev->config.evergreen.max_gprs = 256;
3145                 rdev->config.evergreen.max_threads = 248;
3146                 rdev->config.evergreen.max_gs_threads = 32;
3147                 rdev->config.evergreen.max_stack_entries = 512;
3148                 rdev->config.evergreen.sx_num_of_sets = 4;
3149                 rdev->config.evergreen.sx_max_export_size = 256;
3150                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3151                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3152                 rdev->config.evergreen.max_hw_contexts = 4;
3153                 rdev->config.evergreen.sq_num_cf_insts = 2;
3154
3155                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3156                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3157                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3158                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3159                 break;
3160         case CHIP_BARTS:
3161                 rdev->config.evergreen.num_ses = 2;
3162                 rdev->config.evergreen.max_pipes = 4;
3163                 rdev->config.evergreen.max_tile_pipes = 8;
3164                 rdev->config.evergreen.max_simds = 7;
3165                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3166                 rdev->config.evergreen.max_gprs = 256;
3167                 rdev->config.evergreen.max_threads = 248;
3168                 rdev->config.evergreen.max_gs_threads = 32;
3169                 rdev->config.evergreen.max_stack_entries = 512;
3170                 rdev->config.evergreen.sx_num_of_sets = 4;
3171                 rdev->config.evergreen.sx_max_export_size = 256;
3172                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3173                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3174                 rdev->config.evergreen.max_hw_contexts = 8;
3175                 rdev->config.evergreen.sq_num_cf_insts = 2;
3176
3177                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3178                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3179                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3180                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3181                 break;
3182         case CHIP_TURKS:
3183                 rdev->config.evergreen.num_ses = 1;
3184                 rdev->config.evergreen.max_pipes = 4;
3185                 rdev->config.evergreen.max_tile_pipes = 4;
3186                 rdev->config.evergreen.max_simds = 6;
3187                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3188                 rdev->config.evergreen.max_gprs = 256;
3189                 rdev->config.evergreen.max_threads = 248;
3190                 rdev->config.evergreen.max_gs_threads = 32;
3191                 rdev->config.evergreen.max_stack_entries = 256;
3192                 rdev->config.evergreen.sx_num_of_sets = 4;
3193                 rdev->config.evergreen.sx_max_export_size = 256;
3194                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3195                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3196                 rdev->config.evergreen.max_hw_contexts = 8;
3197                 rdev->config.evergreen.sq_num_cf_insts = 2;
3198
3199                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3200                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3201                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3202                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3203                 break;
3204         case CHIP_CAICOS:
3205                 rdev->config.evergreen.num_ses = 1;
3206                 rdev->config.evergreen.max_pipes = 2;
3207                 rdev->config.evergreen.max_tile_pipes = 2;
3208                 rdev->config.evergreen.max_simds = 2;
3209                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3210                 rdev->config.evergreen.max_gprs = 256;
3211                 rdev->config.evergreen.max_threads = 192;
3212                 rdev->config.evergreen.max_gs_threads = 16;
3213                 rdev->config.evergreen.max_stack_entries = 256;
3214                 rdev->config.evergreen.sx_num_of_sets = 4;
3215                 rdev->config.evergreen.sx_max_export_size = 128;
3216                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3217                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3218                 rdev->config.evergreen.max_hw_contexts = 4;
3219                 rdev->config.evergreen.sq_num_cf_insts = 1;
3220
3221                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3222                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3223                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3224                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3225                 break;
3226         }
3227
3228         /* Initialize HDP */
3229         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3230                 WREG32((0x2c14 + j), 0x00000000);
3231                 WREG32((0x2c18 + j), 0x00000000);
3232                 WREG32((0x2c1c + j), 0x00000000);
3233                 WREG32((0x2c20 + j), 0x00000000);
3234                 WREG32((0x2c24 + j), 0x00000000);
3235         }
3236
3237         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3238
3239         evergreen_fix_pci_max_read_req_size(rdev);
3240
3241         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3242         if ((rdev->family == CHIP_PALM) ||
3243             (rdev->family == CHIP_SUMO) ||
3244             (rdev->family == CHIP_SUMO2))
3245                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3246         else
3247                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3248
3249         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3250          * not have bank info, so create a custom tiling dword.
3251          * bits 3:0   num_pipes
3252          * bits 7:4   num_banks
3253          * bits 11:8  group_size
3254          * bits 15:12 row_size
3255          */
3256         rdev->config.evergreen.tile_config = 0;
3257         switch (rdev->config.evergreen.max_tile_pipes) {
3258         case 1:
3259         default:
3260                 rdev->config.evergreen.tile_config |= (0 << 0);
3261                 break;
3262         case 2:
3263                 rdev->config.evergreen.tile_config |= (1 << 0);
3264                 break;
3265         case 4:
3266                 rdev->config.evergreen.tile_config |= (2 << 0);
3267                 break;
3268         case 8:
3269                 rdev->config.evergreen.tile_config |= (3 << 0);
3270                 break;
3271         }
3272         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3273         if (rdev->flags & RADEON_IS_IGP)
3274                 rdev->config.evergreen.tile_config |= 1 << 4;
3275         else {
3276                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3277                 case 0: /* four banks */
3278                         rdev->config.evergreen.tile_config |= 0 << 4;
3279                         break;
3280                 case 1: /* eight banks */
3281                         rdev->config.evergreen.tile_config |= 1 << 4;
3282                         break;
3283                 case 2: /* sixteen banks */
3284                 default:
3285                         rdev->config.evergreen.tile_config |= 2 << 4;
3286                         break;
3287                 }
3288         }
3289         rdev->config.evergreen.tile_config |= 0 << 8;
3290         rdev->config.evergreen.tile_config |=
3291                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3292
3293         num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3294
3295         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3296                 u32 efuse_straps_4;
3297                 u32 efuse_straps_3;
3298
3299                 efuse_straps_4 = RREG32_RCU(0x204);
3300                 efuse_straps_3 = RREG32_RCU(0x203);
3301                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3302                       ((efuse_straps_3 & 0xf0000000) >> 28));
3303         } else {
3304                 tmp = 0;
3305                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3306                         u32 rb_disable_bitmap;
3307
3308                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3309                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3310                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3311                         tmp <<= 4;
3312                         tmp |= rb_disable_bitmap;
3313                 }
3314         }
3315         /* enabled rb are just the one not disabled :) */
3316         disabled_rb_mask = tmp;
3317         tmp = 0;
3318         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3319                 tmp |= (1 << i);
3320         /* if all the backends are disabled, fix it up here */
3321         if ((disabled_rb_mask & tmp) == tmp) {
3322                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3323                         disabled_rb_mask &= ~(1 << i);
3324         }
3325
3326         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3327         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3328
3329         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3330         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3331         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3332         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3333         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3334         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3335         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3336
3337         if ((rdev->config.evergreen.max_backends == 1) &&
3338             (rdev->flags & RADEON_IS_IGP)) {
3339                 if ((disabled_rb_mask & 3) == 1) {
3340                         /* RB0 disabled, RB1 enabled */
3341                         tmp = 0x11111111;
3342                 } else {
3343                         /* RB1 disabled, RB0 enabled */
3344                         tmp = 0x00000000;
3345                 }
3346         } else {
3347                 tmp = gb_addr_config & NUM_PIPES_MASK;
3348                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3349                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3350         }
3351         WREG32(GB_BACKEND_MAP, tmp);
3352
3353         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3354         WREG32(CGTS_TCC_DISABLE, 0);
3355         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3356         WREG32(CGTS_USER_TCC_DISABLE, 0);
3357
3358         /* set HW defaults for 3D engine */
3359         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3360                                      ROQ_IB2_START(0x2b)));
3361
3362         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3363
3364         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3365                              SYNC_GRADIENT |
3366                              SYNC_WALKER |
3367                              SYNC_ALIGNER));
3368
3369         sx_debug_1 = RREG32(SX_DEBUG_1);
3370         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3371         WREG32(SX_DEBUG_1, sx_debug_1);
3372
3373
3374         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3375         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3376         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3377         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3378
3379         if (rdev->family <= CHIP_SUMO2)
3380                 WREG32(SMX_SAR_CTL0, 0x00010000);
3381
3382         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3383                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3384                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3385
3386         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3387                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3388                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3389
3390         WREG32(VGT_NUM_INSTANCES, 1);
3391         WREG32(SPI_CONFIG_CNTL, 0);
3392         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3393         WREG32(CP_PERFMON_CNTL, 0);
3394
3395         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3396                                   FETCH_FIFO_HIWATER(0x4) |
3397                                   DONE_FIFO_HIWATER(0xe0) |
3398                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3399
3400         sq_config = RREG32(SQ_CONFIG);
3401         sq_config &= ~(PS_PRIO(3) |
3402                        VS_PRIO(3) |
3403                        GS_PRIO(3) |
3404                        ES_PRIO(3));
3405         sq_config |= (VC_ENABLE |
3406                       EXPORT_SRC_C |
3407                       PS_PRIO(0) |
3408                       VS_PRIO(1) |
3409                       GS_PRIO(2) |
3410                       ES_PRIO(3));
3411
3412         switch (rdev->family) {
3413         case CHIP_CEDAR:
3414         case CHIP_PALM:
3415         case CHIP_SUMO:
3416         case CHIP_SUMO2:
3417         case CHIP_CAICOS:
3418                 /* no vertex cache */
3419                 sq_config &= ~VC_ENABLE;
3420                 break;
3421         default:
3422                 break;
3423         }
3424
3425         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3426
3427         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3428         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3429         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3430         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3431         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3432         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3433         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3434
3435         switch (rdev->family) {
3436         case CHIP_CEDAR:
3437         case CHIP_PALM:
3438         case CHIP_SUMO:
3439         case CHIP_SUMO2:
3440                 ps_thread_count = 96;
3441                 break;
3442         default:
3443                 ps_thread_count = 128;
3444                 break;
3445         }
3446
3447         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3448         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3449         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3450         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3451         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3452         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3453
3454         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3455         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3456         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3457         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3458         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3459         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3460
3461         WREG32(SQ_CONFIG, sq_config);
3462         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3463         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3464         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3465         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3466         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3467         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3468         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3469         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3470         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3471         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3472
3473         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3474                                           FORCE_EOV_MAX_REZ_CNT(255)));
3475
3476         switch (rdev->family) {
3477         case CHIP_CEDAR:
3478         case CHIP_PALM:
3479         case CHIP_SUMO:
3480         case CHIP_SUMO2:
3481         case CHIP_CAICOS:
3482                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3483                 break;
3484         default:
3485                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3486                 break;
3487         }
3488         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3489         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3490
3491         WREG32(VGT_GS_VERTEX_REUSE, 16);
3492         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3493         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3494
3495         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3496         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3497
3498         WREG32(CB_PERF_CTR0_SEL_0, 0);
3499         WREG32(CB_PERF_CTR0_SEL_1, 0);
3500         WREG32(CB_PERF_CTR1_SEL_0, 0);
3501         WREG32(CB_PERF_CTR1_SEL_1, 0);
3502         WREG32(CB_PERF_CTR2_SEL_0, 0);
3503         WREG32(CB_PERF_CTR2_SEL_1, 0);
3504         WREG32(CB_PERF_CTR3_SEL_0, 0);
3505         WREG32(CB_PERF_CTR3_SEL_1, 0);
3506
3507         /* clear render buffer base addresses */
3508         WREG32(CB_COLOR0_BASE, 0);
3509         WREG32(CB_COLOR1_BASE, 0);
3510         WREG32(CB_COLOR2_BASE, 0);
3511         WREG32(CB_COLOR3_BASE, 0);
3512         WREG32(CB_COLOR4_BASE, 0);
3513         WREG32(CB_COLOR5_BASE, 0);
3514         WREG32(CB_COLOR6_BASE, 0);
3515         WREG32(CB_COLOR7_BASE, 0);
3516         WREG32(CB_COLOR8_BASE, 0);
3517         WREG32(CB_COLOR9_BASE, 0);
3518         WREG32(CB_COLOR10_BASE, 0);
3519         WREG32(CB_COLOR11_BASE, 0);
3520
3521         /* set the shader const cache sizes to 0 */
3522         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3523                 WREG32(i, 0);
3524         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3525                 WREG32(i, 0);
3526
3527         tmp = RREG32(HDP_MISC_CNTL);
3528         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3529         WREG32(HDP_MISC_CNTL, tmp);
3530
3531         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3532         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3533
3534         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3535
3536         udelay(50);
3537
3538 }
3539
3540 int evergreen_mc_init(struct radeon_device *rdev)
3541 {
3542         u32 tmp;
3543         int chansize, numchan;
3544
3545         /* Get VRAM informations */
3546         rdev->mc.vram_is_ddr = true;
3547         if ((rdev->family == CHIP_PALM) ||
3548             (rdev->family == CHIP_SUMO) ||
3549             (rdev->family == CHIP_SUMO2))
3550                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3551         else
3552                 tmp = RREG32(MC_ARB_RAMCFG);
3553         if (tmp & CHANSIZE_OVERRIDE) {
3554                 chansize = 16;
3555         } else if (tmp & CHANSIZE_MASK) {
3556                 chansize = 64;
3557         } else {
3558                 chansize = 32;
3559         }
3560         tmp = RREG32(MC_SHARED_CHMAP);
3561         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3562         case 0:
3563         default:
3564                 numchan = 1;
3565                 break;
3566         case 1:
3567                 numchan = 2;
3568                 break;
3569         case 2:
3570                 numchan = 4;
3571                 break;
3572         case 3:
3573                 numchan = 8;
3574                 break;
3575         }
3576         rdev->mc.vram_width = numchan * chansize;
3577         /* Could aper size report 0 ? */
3578         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3579         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3580         /* Setup GPU memory space */
3581         if ((rdev->family == CHIP_PALM) ||
3582             (rdev->family == CHIP_SUMO) ||
3583             (rdev->family == CHIP_SUMO2)) {
3584                 /* size in bytes on fusion */
3585                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3586                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3587         } else {
3588                 /* size in MB on evergreen/cayman/tn */
3589                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3590                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3591         }
3592         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3593         r700_vram_gtt_location(rdev, &rdev->mc);
3594         radeon_update_bandwidth_info(rdev);
3595
3596         return 0;
3597 }
3598
3599 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3600 {
3601         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3602                 RREG32(GRBM_STATUS));
3603         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3604                 RREG32(GRBM_STATUS_SE0));
3605         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3606                 RREG32(GRBM_STATUS_SE1));
3607         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3608                 RREG32(SRBM_STATUS));
3609         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3610                 RREG32(SRBM_STATUS2));
3611         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3612                 RREG32(CP_STALLED_STAT1));
3613         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3614                 RREG32(CP_STALLED_STAT2));
3615         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3616                 RREG32(CP_BUSY_STAT));
3617         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3618                 RREG32(CP_STAT));
3619         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3620                 RREG32(DMA_STATUS_REG));
3621         if (rdev->family >= CHIP_CAYMAN) {
3622                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3623                          RREG32(DMA_STATUS_REG + 0x800));
3624         }
3625 }
3626
3627 bool evergreen_is_display_hung(struct radeon_device *rdev)
3628 {
3629         u32 crtc_hung = 0;
3630         u32 crtc_status[6];
3631         u32 i, j, tmp;
3632
3633         for (i = 0; i < rdev->num_crtc; i++) {
3634                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3635                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3636                         crtc_hung |= (1 << i);
3637                 }
3638         }
3639
3640         for (j = 0; j < 10; j++) {
3641                 for (i = 0; i < rdev->num_crtc; i++) {
3642                         if (crtc_hung & (1 << i)) {
3643                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3644                                 if (tmp != crtc_status[i])
3645                                         crtc_hung &= ~(1 << i);
3646                         }
3647                 }
3648                 if (crtc_hung == 0)
3649                         return false;
3650                 udelay(100);
3651         }
3652
3653         return true;
3654 }
3655
3656 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3657 {
3658         u32 reset_mask = 0;
3659         u32 tmp;
3660
3661         /* GRBM_STATUS */
3662         tmp = RREG32(GRBM_STATUS);
3663         if (tmp & (PA_BUSY | SC_BUSY |
3664                    SH_BUSY | SX_BUSY |
3665                    TA_BUSY | VGT_BUSY |
3666                    DB_BUSY | CB_BUSY |
3667                    SPI_BUSY | VGT_BUSY_NO_DMA))
3668                 reset_mask |= RADEON_RESET_GFX;
3669
3670         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3671                    CP_BUSY | CP_COHERENCY_BUSY))
3672                 reset_mask |= RADEON_RESET_CP;
3673
3674         if (tmp & GRBM_EE_BUSY)
3675                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3676
3677         /* DMA_STATUS_REG */
3678         tmp = RREG32(DMA_STATUS_REG);
3679         if (!(tmp & DMA_IDLE))
3680                 reset_mask |= RADEON_RESET_DMA;
3681
3682         /* SRBM_STATUS2 */
3683         tmp = RREG32(SRBM_STATUS2);
3684         if (tmp & DMA_BUSY)
3685                 reset_mask |= RADEON_RESET_DMA;
3686
3687         /* SRBM_STATUS */
3688         tmp = RREG32(SRBM_STATUS);
3689         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3690                 reset_mask |= RADEON_RESET_RLC;
3691
3692         if (tmp & IH_BUSY)
3693                 reset_mask |= RADEON_RESET_IH;
3694
3695         if (tmp & SEM_BUSY)
3696                 reset_mask |= RADEON_RESET_SEM;
3697
3698         if (tmp & GRBM_RQ_PENDING)
3699                 reset_mask |= RADEON_RESET_GRBM;
3700
3701         if (tmp & VMC_BUSY)
3702                 reset_mask |= RADEON_RESET_VMC;
3703
3704         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3705                    MCC_BUSY | MCD_BUSY))
3706                 reset_mask |= RADEON_RESET_MC;
3707
3708         if (evergreen_is_display_hung(rdev))
3709                 reset_mask |= RADEON_RESET_DISPLAY;
3710
3711         /* VM_L2_STATUS */
3712         tmp = RREG32(VM_L2_STATUS);
3713         if (tmp & L2_BUSY)
3714                 reset_mask |= RADEON_RESET_VMC;
3715
3716         /* Skip MC reset as it's mostly likely not hung, just busy */
3717         if (reset_mask & RADEON_RESET_MC) {
3718                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3719                 reset_mask &= ~RADEON_RESET_MC;
3720         }
3721
3722         return reset_mask;
3723 }
3724
3725 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3726 {
3727         struct evergreen_mc_save save;
3728         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3729         u32 tmp;
3730
3731         if (reset_mask == 0)
3732                 return;
3733
3734         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3735
3736         evergreen_print_gpu_status_regs(rdev);
3737
3738         /* Disable CP parsing/prefetching */
3739         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3740
3741         if (reset_mask & RADEON_RESET_DMA) {
3742                 /* Disable DMA */
3743                 tmp = RREG32(DMA_RB_CNTL);
3744                 tmp &= ~DMA_RB_ENABLE;
3745                 WREG32(DMA_RB_CNTL, tmp);
3746         }
3747
3748         udelay(50);
3749
3750         evergreen_mc_stop(rdev, &save);
3751         if (evergreen_mc_wait_for_idle(rdev)) {
3752                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3753         }
3754
3755         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3756                 grbm_soft_reset |= SOFT_RESET_DB |
3757                         SOFT_RESET_CB |
3758                         SOFT_RESET_PA |
3759                         SOFT_RESET_SC |
3760                         SOFT_RESET_SPI |
3761                         SOFT_RESET_SX |
3762                         SOFT_RESET_SH |
3763                         SOFT_RESET_TC |
3764                         SOFT_RESET_TA |
3765                         SOFT_RESET_VC |
3766                         SOFT_RESET_VGT;
3767         }
3768
3769         if (reset_mask & RADEON_RESET_CP) {
3770                 grbm_soft_reset |= SOFT_RESET_CP |
3771                         SOFT_RESET_VGT;
3772
3773                 srbm_soft_reset |= SOFT_RESET_GRBM;
3774         }
3775
3776         if (reset_mask & RADEON_RESET_DMA)
3777                 srbm_soft_reset |= SOFT_RESET_DMA;
3778
3779         if (reset_mask & RADEON_RESET_DISPLAY)
3780                 srbm_soft_reset |= SOFT_RESET_DC;
3781
3782         if (reset_mask & RADEON_RESET_RLC)
3783                 srbm_soft_reset |= SOFT_RESET_RLC;
3784
3785         if (reset_mask & RADEON_RESET_SEM)
3786                 srbm_soft_reset |= SOFT_RESET_SEM;
3787
3788         if (reset_mask & RADEON_RESET_IH)
3789                 srbm_soft_reset |= SOFT_RESET_IH;
3790
3791         if (reset_mask & RADEON_RESET_GRBM)
3792                 srbm_soft_reset |= SOFT_RESET_GRBM;
3793
3794         if (reset_mask & RADEON_RESET_VMC)
3795                 srbm_soft_reset |= SOFT_RESET_VMC;
3796
3797         if (!(rdev->flags & RADEON_IS_IGP)) {
3798                 if (reset_mask & RADEON_RESET_MC)
3799                         srbm_soft_reset |= SOFT_RESET_MC;
3800         }
3801
3802         if (grbm_soft_reset) {
3803                 tmp = RREG32(GRBM_SOFT_RESET);
3804                 tmp |= grbm_soft_reset;
3805                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3806                 WREG32(GRBM_SOFT_RESET, tmp);
3807                 tmp = RREG32(GRBM_SOFT_RESET);
3808
3809                 udelay(50);
3810
3811                 tmp &= ~grbm_soft_reset;
3812                 WREG32(GRBM_SOFT_RESET, tmp);
3813                 tmp = RREG32(GRBM_SOFT_RESET);
3814         }
3815
3816         if (srbm_soft_reset) {
3817                 tmp = RREG32(SRBM_SOFT_RESET);
3818                 tmp |= srbm_soft_reset;
3819                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3820                 WREG32(SRBM_SOFT_RESET, tmp);
3821                 tmp = RREG32(SRBM_SOFT_RESET);
3822
3823                 udelay(50);
3824
3825                 tmp &= ~srbm_soft_reset;
3826                 WREG32(SRBM_SOFT_RESET, tmp);
3827                 tmp = RREG32(SRBM_SOFT_RESET);
3828         }
3829
3830         /* Wait a little for things to settle down */
3831         udelay(50);
3832
3833         evergreen_mc_resume(rdev, &save);
3834         udelay(50);
3835
3836         evergreen_print_gpu_status_regs(rdev);
3837 }
3838
3839 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3840 {
3841         struct evergreen_mc_save save;
3842         u32 tmp, i;
3843
3844         dev_info(rdev->dev, "GPU pci config reset\n");
3845
3846         /* disable dpm? */
3847
3848         /* Disable CP parsing/prefetching */
3849         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3850         udelay(50);
3851         /* Disable DMA */
3852         tmp = RREG32(DMA_RB_CNTL);
3853         tmp &= ~DMA_RB_ENABLE;
3854         WREG32(DMA_RB_CNTL, tmp);
3855         /* XXX other engines? */
3856
3857         /* halt the rlc */
3858         r600_rlc_stop(rdev);
3859
3860         udelay(50);
3861
3862         /* set mclk/sclk to bypass */
3863         rv770_set_clk_bypass_mode(rdev);
3864         /* disable BM */
3865         pci_clear_master(rdev->pdev);
3866         /* disable mem access */
3867         evergreen_mc_stop(rdev, &save);
3868         if (evergreen_mc_wait_for_idle(rdev)) {
3869                 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3870         }
3871         /* reset */
3872         radeon_pci_config_reset(rdev);
3873         /* wait for asic to come out of reset */
3874         for (i = 0; i < rdev->usec_timeout; i++) {
3875                 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3876                         break;
3877                 udelay(1);
3878         }
3879 }
3880
3881 int evergreen_asic_reset(struct radeon_device *rdev)
3882 {
3883         u32 reset_mask;
3884
3885         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3886
3887         if (reset_mask)
3888                 r600_set_bios_scratch_engine_hung(rdev, true);
3889
3890         /* try soft reset */
3891         evergreen_gpu_soft_reset(rdev, reset_mask);
3892
3893         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3894
3895         /* try pci config reset */
3896         if (reset_mask && radeon_hard_reset)
3897                 evergreen_gpu_pci_config_reset(rdev);
3898
3899         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3900
3901         if (!reset_mask)
3902                 r600_set_bios_scratch_engine_hung(rdev, false);
3903
3904         return 0;
3905 }
3906
3907 /**
3908  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3909  *
3910  * @rdev: radeon_device pointer
3911  * @ring: radeon_ring structure holding ring information
3912  *
3913  * Check if the GFX engine is locked up.
3914  * Returns true if the engine appears to be locked up, false if not.
3915  */
3916 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3917 {
3918         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3919
3920         if (!(reset_mask & (RADEON_RESET_GFX |
3921                             RADEON_RESET_COMPUTE |
3922                             RADEON_RESET_CP))) {
3923                 radeon_ring_lockup_update(rdev, ring);
3924                 return false;
3925         }
3926         return radeon_ring_test_lockup(rdev, ring);
3927 }
3928
3929 /*
3930  * RLC
3931  */
3932 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3933 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3934
3935 void sumo_rlc_fini(struct radeon_device *rdev)
3936 {
3937         int r;
3938
3939         /* save restore block */
3940         if (rdev->rlc.save_restore_obj) {
3941                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3942                 if (unlikely(r != 0))
3943                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3944                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3945                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3946
3947                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3948                 rdev->rlc.save_restore_obj = NULL;
3949         }
3950
3951         /* clear state block */
3952         if (rdev->rlc.clear_state_obj) {
3953                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3954                 if (unlikely(r != 0))
3955                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3956                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3957                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3958
3959                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3960                 rdev->rlc.clear_state_obj = NULL;
3961         }
3962
3963         /* clear state block */
3964         if (rdev->rlc.cp_table_obj) {
3965                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3966                 if (unlikely(r != 0))
3967                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3968                 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3969                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3970
3971                 radeon_bo_unref(&rdev->rlc.cp_table_obj);
3972                 rdev->rlc.cp_table_obj = NULL;
3973         }
3974 }
3975
3976 #define CP_ME_TABLE_SIZE    96
3977
3978 int sumo_rlc_init(struct radeon_device *rdev)
3979 {
3980         const u32 *src_ptr;
3981         volatile u32 *dst_ptr;
3982         u32 dws, data, i, j, k, reg_num;
3983         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
3984         u64 reg_list_mc_addr;
3985         const struct cs_section_def *cs_data;
3986         int r;
3987
3988         src_ptr = rdev->rlc.reg_list;
3989         dws = rdev->rlc.reg_list_size;
3990         if (rdev->family >= CHIP_BONAIRE) {
3991                 dws += (5 * 16) + 48 + 48 + 64;
3992         }
3993         cs_data = rdev->rlc.cs_data;
3994
3995         if (src_ptr) {
3996                 /* save restore block */
3997                 if (rdev->rlc.save_restore_obj == NULL) {
3998                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3999                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
4000                         if (r) {
4001                                 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4002                                 return r;
4003                         }
4004                 }
4005
4006                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4007                 if (unlikely(r != 0)) {
4008                         sumo_rlc_fini(rdev);
4009                         return r;
4010                 }
4011                 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4012                                   &rdev->rlc.save_restore_gpu_addr);
4013                 if (r) {
4014                         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4015                         dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4016                         sumo_rlc_fini(rdev);
4017                         return r;
4018                 }
4019
4020                 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4021                 if (r) {
4022                         dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4023                         sumo_rlc_fini(rdev);
4024                         return r;
4025                 }
4026                 /* write the sr buffer */
4027                 dst_ptr = rdev->rlc.sr_ptr;
4028                 if (rdev->family >= CHIP_TAHITI) {
4029                         /* SI */
4030                         for (i = 0; i < rdev->rlc.reg_list_size; i++)
4031                                 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4032                 } else {
4033                         /* ON/LN/TN */
4034                         /* format:
4035                          * dw0: (reg2 << 16) | reg1
4036                          * dw1: reg1 save space
4037                          * dw2: reg2 save space
4038                          */
4039                         for (i = 0; i < dws; i++) {
4040                                 data = src_ptr[i] >> 2;
4041                                 i++;
4042                                 if (i < dws)
4043                                         data |= (src_ptr[i] >> 2) << 16;
4044                                 j = (((i - 1) * 3) / 2);
4045                                 dst_ptr[j] = cpu_to_le32(data);
4046                         }
4047                         j = ((i * 3) / 2);
4048                         dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4049                 }
4050                 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4051                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4052         }
4053
4054         if (cs_data) {
4055                 /* clear state block */
4056                 if (rdev->family >= CHIP_BONAIRE) {
4057                         rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4058                 } else if (rdev->family >= CHIP_TAHITI) {
4059                         rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4060                         dws = rdev->rlc.clear_state_size + (256 / 4);
4061                 } else {
4062                         reg_list_num = 0;
4063                         dws = 0;
4064                         for (i = 0; cs_data[i].section != NULL; i++) {
4065                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4066                                         reg_list_num++;
4067                                         dws += cs_data[i].section[j].reg_count;
4068                                 }
4069                         }
4070                         reg_list_blk_index = (3 * reg_list_num + 2);
4071                         dws += reg_list_blk_index;
4072                         rdev->rlc.clear_state_size = dws;
4073                 }
4074
4075                 if (rdev->rlc.clear_state_obj == NULL) {
4076                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4077                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
4078                         if (r) {
4079                                 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4080                                 sumo_rlc_fini(rdev);
4081                                 return r;
4082                         }
4083                 }
4084                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4085                 if (unlikely(r != 0)) {
4086                         sumo_rlc_fini(rdev);
4087                         return r;
4088                 }
4089                 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4090                                   &rdev->rlc.clear_state_gpu_addr);
4091                 if (r) {
4092                         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4093                         dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4094                         sumo_rlc_fini(rdev);
4095                         return r;
4096                 }
4097
4098                 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4099                 if (r) {
4100                         dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4101                         sumo_rlc_fini(rdev);
4102                         return r;
4103                 }
4104                 /* set up the cs buffer */
4105                 dst_ptr = rdev->rlc.cs_ptr;
4106                 if (rdev->family >= CHIP_BONAIRE) {
4107                         cik_get_csb_buffer(rdev, dst_ptr);
4108                 } else if (rdev->family >= CHIP_TAHITI) {
4109                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4110                         dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4111                         dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4112                         dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4113                         si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4114                 } else {
4115                         reg_list_hdr_blk_index = 0;
4116                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4117                         data = upper_32_bits(reg_list_mc_addr);
4118                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4119                         reg_list_hdr_blk_index++;
4120                         for (i = 0; cs_data[i].section != NULL; i++) {
4121                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4122                                         reg_num = cs_data[i].section[j].reg_count;
4123                                         data = reg_list_mc_addr & 0xffffffff;
4124                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4125                                         reg_list_hdr_blk_index++;
4126
4127                                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4128                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4129                                         reg_list_hdr_blk_index++;
4130
4131                                         data = 0x08000000 | (reg_num * 4);
4132                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4133                                         reg_list_hdr_blk_index++;
4134
4135                                         for (k = 0; k < reg_num; k++) {
4136                                                 data = cs_data[i].section[j].extent[k];
4137                                                 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4138                                         }
4139                                         reg_list_mc_addr += reg_num * 4;
4140                                         reg_list_blk_index += reg_num;
4141                                 }
4142                         }
4143                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4144                 }
4145                 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4146                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4147         }
4148
4149         if (rdev->rlc.cp_table_size) {
4150                 if (rdev->rlc.cp_table_obj == NULL) {
4151                         r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4152                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4153                         if (r) {
4154                                 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4155                                 sumo_rlc_fini(rdev);
4156                                 return r;
4157                         }
4158                 }
4159
4160                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4161                 if (unlikely(r != 0)) {
4162                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4163                         sumo_rlc_fini(rdev);
4164                         return r;
4165                 }
4166                 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4167                                   &rdev->rlc.cp_table_gpu_addr);
4168                 if (r) {
4169                         radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4170                         dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4171                         sumo_rlc_fini(rdev);
4172                         return r;
4173                 }
4174                 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4175                 if (r) {
4176                         dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4177                         sumo_rlc_fini(rdev);
4178                         return r;
4179                 }
4180
4181                 cik_init_cp_pg_table(rdev);
4182
4183                 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4184                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4185
4186         }
4187
4188         return 0;
4189 }
4190
4191 static void evergreen_rlc_start(struct radeon_device *rdev)
4192 {
4193         u32 mask = RLC_ENABLE;
4194
4195         if (rdev->flags & RADEON_IS_IGP) {
4196                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4197         }
4198
4199         WREG32(RLC_CNTL, mask);
4200 }
4201
4202 int evergreen_rlc_resume(struct radeon_device *rdev)
4203 {
4204         u32 i;
4205         const __be32 *fw_data;
4206
4207         if (!rdev->rlc_fw)
4208                 return -EINVAL;
4209
4210         r600_rlc_stop(rdev);
4211
4212         WREG32(RLC_HB_CNTL, 0);
4213
4214         if (rdev->flags & RADEON_IS_IGP) {
4215                 if (rdev->family == CHIP_ARUBA) {
4216                         u32 always_on_bitmap =
4217                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4218                         /* find out the number of active simds */
4219                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4220                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4221                         tmp = hweight32(~tmp);
4222                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4223                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4224                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4225                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4226                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4227                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4228                         }
4229                 } else {
4230                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4231                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4232                 }
4233                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4234                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4235         } else {
4236                 WREG32(RLC_HB_BASE, 0);
4237                 WREG32(RLC_HB_RPTR, 0);
4238                 WREG32(RLC_HB_WPTR, 0);
4239                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4240                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4241         }
4242         WREG32(RLC_MC_CNTL, 0);
4243         WREG32(RLC_UCODE_CNTL, 0);
4244
4245         fw_data = (const __be32 *)rdev->rlc_fw->data;
4246         if (rdev->family >= CHIP_ARUBA) {
4247                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4248                         WREG32(RLC_UCODE_ADDR, i);
4249                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4250                 }
4251         } else if (rdev->family >= CHIP_CAYMAN) {
4252                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4253                         WREG32(RLC_UCODE_ADDR, i);
4254                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4255                 }
4256         } else {
4257                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4258                         WREG32(RLC_UCODE_ADDR, i);
4259                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4260                 }
4261         }
4262         WREG32(RLC_UCODE_ADDR, 0);
4263
4264         evergreen_rlc_start(rdev);
4265
4266         return 0;
4267 }
4268
4269 /* Interrupts */
4270
4271 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4272 {
4273         if (crtc >= rdev->num_crtc)
4274                 return 0;
4275         else
4276                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4277 }
4278
4279 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4280 {
4281         u32 tmp;
4282
4283         if (rdev->family >= CHIP_CAYMAN) {
4284                 cayman_cp_int_cntl_setup(rdev, 0,
4285                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4286                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4287                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4288                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4289                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4290         } else
4291                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4292         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4293         WREG32(DMA_CNTL, tmp);
4294         WREG32(GRBM_INT_CNTL, 0);
4295         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4296         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4297         if (rdev->num_crtc >= 4) {
4298                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4299                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4300         }
4301         if (rdev->num_crtc >= 6) {
4302                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4303                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4304         }
4305
4306         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4307         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4308         if (rdev->num_crtc >= 4) {
4309                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4310                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4311         }
4312         if (rdev->num_crtc >= 6) {
4313                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4314                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4315         }
4316
4317         /* only one DAC on DCE5 */
4318         if (!ASIC_IS_DCE5(rdev))
4319                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4320         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4321
4322         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4323         WREG32(DC_HPD1_INT_CONTROL, tmp);
4324         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4325         WREG32(DC_HPD2_INT_CONTROL, tmp);
4326         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4327         WREG32(DC_HPD3_INT_CONTROL, tmp);
4328         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4329         WREG32(DC_HPD4_INT_CONTROL, tmp);
4330         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4331         WREG32(DC_HPD5_INT_CONTROL, tmp);
4332         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4333         WREG32(DC_HPD6_INT_CONTROL, tmp);
4334
4335 }
4336
4337 int evergreen_irq_set(struct radeon_device *rdev)
4338 {
4339         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4340         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4341         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4342         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4343         u32 grbm_int_cntl = 0;
4344         u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
4345         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4346         u32 dma_cntl, dma_cntl1 = 0;
4347         u32 thermal_int = 0;
4348
4349         if (!rdev->irq.installed) {
4350                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4351                 return -EINVAL;
4352         }
4353         /* don't enable anything if the ih is disabled */
4354         if (!rdev->ih.enabled) {
4355                 r600_disable_interrupts(rdev);
4356                 /* force the active interrupt state to all disabled */
4357                 evergreen_disable_interrupt_state(rdev);
4358                 return 0;
4359         }
4360
4361         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4362         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4363         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4364         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4365         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4366         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4367         if (rdev->family == CHIP_ARUBA)
4368                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4369                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4370         else
4371                 thermal_int = RREG32(CG_THERMAL_INT) &
4372                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4373
4374         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4375         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4376         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4377         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4378         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4379         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4380
4381         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4382
4383         if (rdev->family >= CHIP_CAYMAN) {
4384                 /* enable CP interrupts on all rings */
4385                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4386                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4387                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4388                 }
4389                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4390                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4391                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4392                 }
4393                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4394                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4395                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4396                 }
4397         } else {
4398                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4399                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4400                         cp_int_cntl |= RB_INT_ENABLE;
4401                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4402                 }
4403         }
4404
4405         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4406                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4407                 dma_cntl |= TRAP_ENABLE;
4408         }
4409
4410         if (rdev->family >= CHIP_CAYMAN) {
4411                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4412                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4413                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4414                         dma_cntl1 |= TRAP_ENABLE;
4415                 }
4416         }
4417
4418         if (rdev->irq.dpm_thermal) {
4419                 DRM_DEBUG("dpm thermal\n");
4420                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4421         }
4422
4423         if (rdev->irq.crtc_vblank_int[0] ||
4424             atomic_read(&rdev->irq.pflip[0])) {
4425                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4426                 crtc1 |= VBLANK_INT_MASK;
4427         }
4428         if (rdev->irq.crtc_vblank_int[1] ||
4429             atomic_read(&rdev->irq.pflip[1])) {
4430                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4431                 crtc2 |= VBLANK_INT_MASK;
4432         }
4433         if (rdev->irq.crtc_vblank_int[2] ||
4434             atomic_read(&rdev->irq.pflip[2])) {
4435                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4436                 crtc3 |= VBLANK_INT_MASK;
4437         }
4438         if (rdev->irq.crtc_vblank_int[3] ||
4439             atomic_read(&rdev->irq.pflip[3])) {
4440                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4441                 crtc4 |= VBLANK_INT_MASK;
4442         }
4443         if (rdev->irq.crtc_vblank_int[4] ||
4444             atomic_read(&rdev->irq.pflip[4])) {
4445                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4446                 crtc5 |= VBLANK_INT_MASK;
4447         }
4448         if (rdev->irq.crtc_vblank_int[5] ||
4449             atomic_read(&rdev->irq.pflip[5])) {
4450                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4451                 crtc6 |= VBLANK_INT_MASK;
4452         }
4453         if (rdev->irq.hpd[0]) {
4454                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4455                 hpd1 |= DC_HPDx_INT_EN;
4456         }
4457         if (rdev->irq.hpd[1]) {
4458                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4459                 hpd2 |= DC_HPDx_INT_EN;
4460         }
4461         if (rdev->irq.hpd[2]) {
4462                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4463                 hpd3 |= DC_HPDx_INT_EN;
4464         }
4465         if (rdev->irq.hpd[3]) {
4466                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4467                 hpd4 |= DC_HPDx_INT_EN;
4468         }
4469         if (rdev->irq.hpd[4]) {
4470                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4471                 hpd5 |= DC_HPDx_INT_EN;
4472         }
4473         if (rdev->irq.hpd[5]) {
4474                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4475                 hpd6 |= DC_HPDx_INT_EN;
4476         }
4477         if (rdev->irq.afmt[0]) {
4478                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4479                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4480         }
4481         if (rdev->irq.afmt[1]) {
4482                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4483                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4484         }
4485         if (rdev->irq.afmt[2]) {
4486                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4487                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4488         }
4489         if (rdev->irq.afmt[3]) {
4490                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4491                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4492         }
4493         if (rdev->irq.afmt[4]) {
4494                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4495                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4496         }
4497         if (rdev->irq.afmt[5]) {
4498                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4499                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4500         }
4501
4502         if (rdev->family >= CHIP_CAYMAN) {
4503                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4504                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4505                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4506         } else
4507                 WREG32(CP_INT_CNTL, cp_int_cntl);
4508
4509         WREG32(DMA_CNTL, dma_cntl);
4510
4511         if (rdev->family >= CHIP_CAYMAN)
4512                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4513
4514         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4515
4516         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4517         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4518         if (rdev->num_crtc >= 4) {
4519                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4520                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4521         }
4522         if (rdev->num_crtc >= 6) {
4523                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4524                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4525         }
4526
4527         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4528         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
4529         if (rdev->num_crtc >= 4) {
4530                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4531                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4532         }
4533         if (rdev->num_crtc >= 6) {
4534                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4535                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4536         }
4537
4538         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4539         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4540         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4541         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4542         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4543         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4544         if (rdev->family == CHIP_ARUBA)
4545                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4546         else
4547                 WREG32(CG_THERMAL_INT, thermal_int);
4548
4549         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4550         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4551         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4552         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4553         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4554         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4555
4556         return 0;
4557 }
4558
4559 static void evergreen_irq_ack(struct radeon_device *rdev)
4560 {
4561         u32 tmp;
4562
4563         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4564         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4565         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4566         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4567         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4568         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4569         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4570         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4571         if (rdev->num_crtc >= 4) {
4572                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4573                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4574         }
4575         if (rdev->num_crtc >= 6) {
4576                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4577                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4578         }
4579
4580         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4581         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4582         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4583         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4584         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4585         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4586
4587         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4588                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4589         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4590                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4591         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4592                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4593         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4594                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4595         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4596                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4597         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4598                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4599
4600         if (rdev->num_crtc >= 4) {
4601                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4602                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4603                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4604                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4605                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4606                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4607                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4608                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4609                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4610                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4611                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4612                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4613         }
4614
4615         if (rdev->num_crtc >= 6) {
4616                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4617                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4618                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4619                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4620                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4621                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4622                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4623                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4624                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4625                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4626                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4627                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4628         }
4629
4630         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4631                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4632                 tmp |= DC_HPDx_INT_ACK;
4633                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4634         }
4635         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4636                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4637                 tmp |= DC_HPDx_INT_ACK;
4638                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4639         }
4640         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4641                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4642                 tmp |= DC_HPDx_INT_ACK;
4643                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4644         }
4645         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4646                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4647                 tmp |= DC_HPDx_INT_ACK;
4648                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4649         }
4650         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4651                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4652                 tmp |= DC_HPDx_INT_ACK;
4653                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4654         }
4655         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4656                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4657                 tmp |= DC_HPDx_INT_ACK;
4658                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4659         }
4660         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4661                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4662                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4663                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4664         }
4665         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4666                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4667                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4668                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4669         }
4670         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4671                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4672                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4673                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4674         }
4675         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4676                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4677                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4678                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4679         }
4680         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4681                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4682                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4683                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4684         }
4685         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4686                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4687                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4688                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4689         }
4690 }
4691
4692 static void evergreen_irq_disable(struct radeon_device *rdev)
4693 {
4694         r600_disable_interrupts(rdev);
4695         /* Wait and acknowledge irq */
4696         mdelay(1);
4697         evergreen_irq_ack(rdev);
4698         evergreen_disable_interrupt_state(rdev);
4699 }
4700
4701 void evergreen_irq_suspend(struct radeon_device *rdev)
4702 {
4703         evergreen_irq_disable(rdev);
4704         r600_rlc_stop(rdev);
4705 }
4706
4707 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4708 {
4709         u32 wptr, tmp;
4710
4711         if (rdev->wb.enabled)
4712                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4713         else
4714                 wptr = RREG32(IH_RB_WPTR);
4715
4716         if (wptr & RB_OVERFLOW) {
4717                 /* When a ring buffer overflow happen start parsing interrupt
4718                  * from the last not overwritten vector (wptr + 16). Hopefully
4719                  * this should allow us to catchup.
4720                  */
4721                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4722                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4723                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4724                 tmp = RREG32(IH_RB_CNTL);
4725                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4726                 WREG32(IH_RB_CNTL, tmp);
4727         }
4728         return (wptr & rdev->ih.ptr_mask);
4729 }
4730
4731 int evergreen_irq_process(struct radeon_device *rdev)
4732 {
4733         u32 wptr;
4734         u32 rptr;
4735         u32 src_id, src_data;
4736         u32 ring_index;
4737         bool queue_hotplug = false;
4738         bool queue_hdmi = false;
4739         bool queue_thermal = false;
4740         u32 status, addr;
4741
4742         if (!rdev->ih.enabled || rdev->shutdown)
4743                 return IRQ_NONE;
4744
4745         wptr = evergreen_get_ih_wptr(rdev);
4746
4747 restart_ih:
4748         /* is somebody else already processing irqs? */
4749         if (atomic_xchg(&rdev->ih.lock, 1))
4750                 return IRQ_NONE;
4751
4752         rptr = rdev->ih.rptr;
4753         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4754
4755         /* Order reading of wptr vs. reading of IH ring data */
4756         rmb();
4757
4758         /* display interrupts */
4759         evergreen_irq_ack(rdev);
4760
4761         while (rptr != wptr) {
4762                 /* wptr/rptr are in bytes! */
4763                 ring_index = rptr / 4;
4764                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4765                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4766
4767                 switch (src_id) {
4768                 case 1: /* D1 vblank/vline */
4769                         switch (src_data) {
4770                         case 0: /* D1 vblank */
4771                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4772                                         if (rdev->irq.crtc_vblank_int[0]) {
4773                                                 drm_handle_vblank(rdev->ddev, 0);
4774                                                 rdev->pm.vblank_sync = true;
4775                                                 wake_up(&rdev->irq.vblank_queue);
4776                                         }
4777                                         if (atomic_read(&rdev->irq.pflip[0]))
4778                                                 radeon_crtc_handle_flip(rdev, 0);
4779                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4780                                         DRM_DEBUG("IH: D1 vblank\n");
4781                                 }
4782                                 break;
4783                         case 1: /* D1 vline */
4784                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4785                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4786                                         DRM_DEBUG("IH: D1 vline\n");
4787                                 }
4788                                 break;
4789                         default:
4790                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4791                                 break;
4792                         }
4793                         break;
4794                 case 2: /* D2 vblank/vline */
4795                         switch (src_data) {
4796                         case 0: /* D2 vblank */
4797                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4798                                         if (rdev->irq.crtc_vblank_int[1]) {
4799                                                 drm_handle_vblank(rdev->ddev, 1);
4800                                                 rdev->pm.vblank_sync = true;
4801                                                 wake_up(&rdev->irq.vblank_queue);
4802                                         }
4803                                         if (atomic_read(&rdev->irq.pflip[1]))
4804                                                 radeon_crtc_handle_flip(rdev, 1);
4805                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4806                                         DRM_DEBUG("IH: D2 vblank\n");
4807                                 }
4808                                 break;
4809                         case 1: /* D2 vline */
4810                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4811                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4812                                         DRM_DEBUG("IH: D2 vline\n");
4813                                 }
4814                                 break;
4815                         default:
4816                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4817                                 break;
4818                         }
4819                         break;
4820                 case 3: /* D3 vblank/vline */
4821                         switch (src_data) {
4822                         case 0: /* D3 vblank */
4823                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4824                                         if (rdev->irq.crtc_vblank_int[2]) {
4825                                                 drm_handle_vblank(rdev->ddev, 2);
4826                                                 rdev->pm.vblank_sync = true;
4827                                                 wake_up(&rdev->irq.vblank_queue);
4828                                         }
4829                                         if (atomic_read(&rdev->irq.pflip[2]))
4830                                                 radeon_crtc_handle_flip(rdev, 2);
4831                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4832                                         DRM_DEBUG("IH: D3 vblank\n");
4833                                 }
4834                                 break;
4835                         case 1: /* D3 vline */
4836                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4837                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4838                                         DRM_DEBUG("IH: D3 vline\n");
4839                                 }
4840                                 break;
4841                         default:
4842                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4843                                 break;
4844                         }
4845                         break;
4846                 case 4: /* D4 vblank/vline */
4847                         switch (src_data) {
4848                         case 0: /* D4 vblank */
4849                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4850                                         if (rdev->irq.crtc_vblank_int[3]) {
4851                                                 drm_handle_vblank(rdev->ddev, 3);
4852                                                 rdev->pm.vblank_sync = true;
4853                                                 wake_up(&rdev->irq.vblank_queue);
4854                                         }
4855                                         if (atomic_read(&rdev->irq.pflip[3]))
4856                                                 radeon_crtc_handle_flip(rdev, 3);
4857                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4858                                         DRM_DEBUG("IH: D4 vblank\n");
4859                                 }
4860                                 break;
4861                         case 1: /* D4 vline */
4862                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4863                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4864                                         DRM_DEBUG("IH: D4 vline\n");
4865                                 }
4866                                 break;
4867                         default:
4868                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4869                                 break;
4870                         }
4871                         break;
4872                 case 5: /* D5 vblank/vline */
4873                         switch (src_data) {
4874                         case 0: /* D5 vblank */
4875                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4876                                         if (rdev->irq.crtc_vblank_int[4]) {
4877                                                 drm_handle_vblank(rdev->ddev, 4);
4878                                                 rdev->pm.vblank_sync = true;
4879                                                 wake_up(&rdev->irq.vblank_queue);
4880                                         }
4881                                         if (atomic_read(&rdev->irq.pflip[4]))
4882                                                 radeon_crtc_handle_flip(rdev, 4);
4883                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4884                                         DRM_DEBUG("IH: D5 vblank\n");
4885                                 }
4886                                 break;
4887                         case 1: /* D5 vline */
4888                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4889                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4890                                         DRM_DEBUG("IH: D5 vline\n");
4891                                 }
4892                                 break;
4893                         default:
4894                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4895                                 break;
4896                         }
4897                         break;
4898                 case 6: /* D6 vblank/vline */
4899                         switch (src_data) {
4900                         case 0: /* D6 vblank */
4901                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4902                                         if (rdev->irq.crtc_vblank_int[5]) {
4903                                                 drm_handle_vblank(rdev->ddev, 5);
4904                                                 rdev->pm.vblank_sync = true;
4905                                                 wake_up(&rdev->irq.vblank_queue);
4906                                         }
4907                                         if (atomic_read(&rdev->irq.pflip[5]))
4908                                                 radeon_crtc_handle_flip(rdev, 5);
4909                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4910                                         DRM_DEBUG("IH: D6 vblank\n");
4911                                 }
4912                                 break;
4913                         case 1: /* D6 vline */
4914                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4915                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4916                                         DRM_DEBUG("IH: D6 vline\n");
4917                                 }
4918                                 break;
4919                         default:
4920                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4921                                 break;
4922                         }
4923                         break;
4924                 case 42: /* HPD hotplug */
4925                         switch (src_data) {
4926                         case 0:
4927                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4928                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4929                                         queue_hotplug = true;
4930                                         DRM_DEBUG("IH: HPD1\n");
4931                                 }
4932                                 break;
4933                         case 1:
4934                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4935                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4936                                         queue_hotplug = true;
4937                                         DRM_DEBUG("IH: HPD2\n");
4938                                 }
4939                                 break;
4940                         case 2:
4941                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4942                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4943                                         queue_hotplug = true;
4944                                         DRM_DEBUG("IH: HPD3\n");
4945                                 }
4946                                 break;
4947                         case 3:
4948                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4949                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4950                                         queue_hotplug = true;
4951                                         DRM_DEBUG("IH: HPD4\n");
4952                                 }
4953                                 break;
4954                         case 4:
4955                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4956                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4957                                         queue_hotplug = true;
4958                                         DRM_DEBUG("IH: HPD5\n");
4959                                 }
4960                                 break;
4961                         case 5:
4962                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4963                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4964                                         queue_hotplug = true;
4965                                         DRM_DEBUG("IH: HPD6\n");
4966                                 }
4967                                 break;
4968                         default:
4969                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4970                                 break;
4971                         }
4972                         break;
4973                 case 44: /* hdmi */
4974                         switch (src_data) {
4975                         case 0:
4976                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4977                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4978                                         queue_hdmi = true;
4979                                         DRM_DEBUG("IH: HDMI0\n");
4980                                 }
4981                                 break;
4982                         case 1:
4983                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4984                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4985                                         queue_hdmi = true;
4986                                         DRM_DEBUG("IH: HDMI1\n");
4987                                 }
4988                                 break;
4989                         case 2:
4990                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4991                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4992                                         queue_hdmi = true;
4993                                         DRM_DEBUG("IH: HDMI2\n");
4994                                 }
4995                                 break;
4996                         case 3:
4997                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4998                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4999                                         queue_hdmi = true;
5000                                         DRM_DEBUG("IH: HDMI3\n");
5001                                 }
5002                                 break;
5003                         case 4:
5004                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
5005                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5006                                         queue_hdmi = true;
5007                                         DRM_DEBUG("IH: HDMI4\n");
5008                                 }
5009                                 break;
5010                         case 5:
5011                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5012                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5013                                         queue_hdmi = true;
5014                                         DRM_DEBUG("IH: HDMI5\n");
5015                                 }
5016                                 break;
5017                         default:
5018                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5019                                 break;
5020                         }
5021                 case 124: /* UVD */
5022                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5023                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5024                         break;
5025                 case 146:
5026                 case 147:
5027                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5028                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5029                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5030                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5031                                 addr);
5032                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5033                                 status);
5034                         cayman_vm_decode_fault(rdev, status, addr);
5035                         /* reset addr and status */
5036                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5037                         break;
5038                 case 176: /* CP_INT in ring buffer */
5039                 case 177: /* CP_INT in IB1 */
5040                 case 178: /* CP_INT in IB2 */
5041                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5042                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5043                         break;
5044                 case 181: /* CP EOP event */
5045                         DRM_DEBUG("IH: CP EOP\n");
5046                         if (rdev->family >= CHIP_CAYMAN) {
5047                                 switch (src_data) {
5048                                 case 0:
5049                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5050                                         break;
5051                                 case 1:
5052                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5053                                         break;
5054                                 case 2:
5055                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5056                                         break;
5057                                 }
5058                         } else
5059                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5060                         break;
5061                 case 224: /* DMA trap event */
5062                         DRM_DEBUG("IH: DMA trap\n");
5063                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5064                         break;
5065                 case 230: /* thermal low to high */
5066                         DRM_DEBUG("IH: thermal low to high\n");
5067                         rdev->pm.dpm.thermal.high_to_low = false;
5068                         queue_thermal = true;
5069                         break;
5070                 case 231: /* thermal high to low */
5071                         DRM_DEBUG("IH: thermal high to low\n");
5072                         rdev->pm.dpm.thermal.high_to_low = true;
5073                         queue_thermal = true;
5074                         break;
5075                 case 233: /* GUI IDLE */
5076                         DRM_DEBUG("IH: GUI idle\n");
5077                         break;
5078                 case 244: /* DMA trap event */
5079                         if (rdev->family >= CHIP_CAYMAN) {
5080                                 DRM_DEBUG("IH: DMA1 trap\n");
5081                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5082                         }
5083                         break;
5084                 default:
5085                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5086                         break;
5087                 }
5088
5089                 /* wptr/rptr are in bytes! */
5090                 rptr += 16;
5091                 rptr &= rdev->ih.ptr_mask;
5092         }
5093         if (queue_hotplug)
5094                 schedule_work(&rdev->hotplug_work);
5095         if (queue_hdmi)
5096                 schedule_work(&rdev->audio_work);
5097         if (queue_thermal && rdev->pm.dpm_enabled)
5098                 schedule_work(&rdev->pm.dpm.thermal.work);
5099         rdev->ih.rptr = rptr;
5100         WREG32(IH_RB_RPTR, rdev->ih.rptr);
5101         atomic_set(&rdev->ih.lock, 0);
5102
5103         /* make sure wptr hasn't changed while processing */
5104         wptr = evergreen_get_ih_wptr(rdev);
5105         if (wptr != rptr)
5106                 goto restart_ih;
5107
5108         return IRQ_HANDLED;
5109 }
5110
5111 static int evergreen_startup(struct radeon_device *rdev)
5112 {
5113         struct radeon_ring *ring;
5114         int r;
5115
5116         /* enable pcie gen2 link */
5117         evergreen_pcie_gen2_enable(rdev);
5118         /* enable aspm */
5119         evergreen_program_aspm(rdev);
5120
5121         /* scratch needs to be initialized before MC */
5122         r = r600_vram_scratch_init(rdev);
5123         if (r)
5124                 return r;
5125
5126         evergreen_mc_program(rdev);
5127
5128         if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5129                 r = ni_mc_load_microcode(rdev);
5130                 if (r) {
5131                         DRM_ERROR("Failed to load MC firmware!\n");
5132                         return r;
5133                 }
5134         }
5135
5136         if (rdev->flags & RADEON_IS_AGP) {
5137                 evergreen_agp_enable(rdev);
5138         } else {
5139                 r = evergreen_pcie_gart_enable(rdev);
5140                 if (r)
5141                         return r;
5142         }
5143         evergreen_gpu_init(rdev);
5144
5145         /* allocate rlc buffers */
5146         if (rdev->flags & RADEON_IS_IGP) {
5147                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5148                 rdev->rlc.reg_list_size =
5149                         (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5150                 rdev->rlc.cs_data = evergreen_cs_data;
5151                 r = sumo_rlc_init(rdev);
5152                 if (r) {
5153                         DRM_ERROR("Failed to init rlc BOs!\n");
5154                         return r;
5155                 }
5156         }
5157
5158         /* allocate wb buffer */
5159         r = radeon_wb_init(rdev);
5160         if (r)
5161                 return r;
5162
5163         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5164         if (r) {
5165                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5166                 return r;
5167         }
5168
5169         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5170         if (r) {
5171                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5172                 return r;
5173         }
5174
5175         r = uvd_v2_2_resume(rdev);
5176         if (!r) {
5177                 r = radeon_fence_driver_start_ring(rdev,
5178                                                    R600_RING_TYPE_UVD_INDEX);
5179                 if (r)
5180                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5181         }
5182
5183         if (r)
5184                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5185
5186         /* Enable IRQ */
5187         if (!rdev->irq.installed) {
5188                 r = radeon_irq_kms_init(rdev);
5189                 if (r)
5190                         return r;
5191         }
5192
5193         r = r600_irq_init(rdev);
5194         if (r) {
5195                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5196                 radeon_irq_kms_fini(rdev);
5197                 return r;
5198         }
5199         evergreen_irq_set(rdev);
5200
5201         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5202         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5203                              RADEON_CP_PACKET2);
5204         if (r)
5205                 return r;
5206
5207         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5208         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5209                              DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5210         if (r)
5211                 return r;
5212
5213         r = evergreen_cp_load_microcode(rdev);
5214         if (r)
5215                 return r;
5216         r = evergreen_cp_resume(rdev);
5217         if (r)
5218                 return r;
5219         r = r600_dma_resume(rdev);
5220         if (r)
5221                 return r;
5222
5223         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5224         if (ring->ring_size) {
5225                 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5226                                      RADEON_CP_PACKET2);
5227                 if (!r)
5228                         r = uvd_v1_0_init(rdev);
5229
5230                 if (r)
5231                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5232         }
5233
5234         r = radeon_ib_pool_init(rdev);
5235         if (r) {
5236                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5237                 return r;
5238         }
5239
5240         r = r600_audio_init(rdev);
5241         if (r) {
5242                 DRM_ERROR("radeon: audio init failed\n");
5243                 return r;
5244         }
5245
5246         return 0;
5247 }
5248
5249 int evergreen_resume(struct radeon_device *rdev)
5250 {
5251         int r;
5252
5253         /* reset the asic, the gfx blocks are often in a bad state
5254          * after the driver is unloaded or after a resume
5255          */
5256         if (radeon_asic_reset(rdev))
5257                 dev_warn(rdev->dev, "GPU reset failed !\n");
5258         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5259          * posting will perform necessary task to bring back GPU into good
5260          * shape.
5261          */
5262         /* post card */
5263         atom_asic_init(rdev->mode_info.atom_context);
5264
5265         /* init golden registers */
5266         evergreen_init_golden_registers(rdev);
5267
5268         if (rdev->pm.pm_method == PM_METHOD_DPM)
5269                 radeon_pm_resume(rdev);
5270
5271         rdev->accel_working = true;
5272         r = evergreen_startup(rdev);
5273         if (r) {
5274                 DRM_ERROR("evergreen startup failed on resume\n");
5275                 rdev->accel_working = false;
5276                 return r;
5277         }
5278
5279         return r;
5280
5281 }
5282
5283 int evergreen_suspend(struct radeon_device *rdev)
5284 {
5285         radeon_pm_suspend(rdev);
5286         r600_audio_fini(rdev);
5287         uvd_v1_0_fini(rdev);
5288         radeon_uvd_suspend(rdev);
5289         r700_cp_stop(rdev);
5290         r600_dma_stop(rdev);
5291         evergreen_irq_suspend(rdev);
5292         radeon_wb_disable(rdev);
5293         evergreen_pcie_gart_disable(rdev);
5294
5295         return 0;
5296 }
5297
5298 /* Plan is to move initialization in that function and use
5299  * helper function so that radeon_device_init pretty much
5300  * do nothing more than calling asic specific function. This
5301  * should also allow to remove a bunch of callback function
5302  * like vram_info.
5303  */
5304 int evergreen_init(struct radeon_device *rdev)
5305 {
5306         int r;
5307
5308         /* Read BIOS */
5309         if (!radeon_get_bios(rdev)) {
5310                 if (ASIC_IS_AVIVO(rdev))
5311                         return -EINVAL;
5312         }
5313         /* Must be an ATOMBIOS */
5314         if (!rdev->is_atom_bios) {
5315                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5316                 return -EINVAL;
5317         }
5318         r = radeon_atombios_init(rdev);
5319         if (r)
5320                 return r;
5321         /* reset the asic, the gfx blocks are often in a bad state
5322          * after the driver is unloaded or after a resume
5323          */
5324         if (radeon_asic_reset(rdev))
5325                 dev_warn(rdev->dev, "GPU reset failed !\n");
5326         /* Post card if necessary */
5327         if (!radeon_card_posted(rdev)) {
5328                 if (!rdev->bios) {
5329                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5330                         return -EINVAL;
5331                 }
5332                 DRM_INFO("GPU not posted. posting now...\n");
5333                 atom_asic_init(rdev->mode_info.atom_context);
5334         }
5335         /* init golden registers */
5336         evergreen_init_golden_registers(rdev);
5337         /* Initialize scratch registers */
5338         r600_scratch_init(rdev);
5339         /* Initialize surface registers */
5340         radeon_surface_init(rdev);
5341         /* Initialize clocks */
5342         radeon_get_clock_info(rdev->ddev);
5343         /* Fence driver */
5344         r = radeon_fence_driver_init(rdev);
5345         if (r)
5346                 return r;
5347         /* initialize AGP */
5348         if (rdev->flags & RADEON_IS_AGP) {
5349                 r = radeon_agp_init(rdev);
5350                 if (r)
5351                         radeon_agp_disable(rdev);
5352         }
5353         /* initialize memory controller */
5354         r = evergreen_mc_init(rdev);
5355         if (r)
5356                 return r;
5357         /* Memory manager */
5358         r = radeon_bo_init(rdev);
5359         if (r)
5360                 return r;
5361
5362         if (ASIC_IS_DCE5(rdev)) {
5363                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5364                         r = ni_init_microcode(rdev);
5365                         if (r) {
5366                                 DRM_ERROR("Failed to load firmware!\n");
5367                                 return r;
5368                         }
5369                 }
5370         } else {
5371                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5372                         r = r600_init_microcode(rdev);
5373                         if (r) {
5374                                 DRM_ERROR("Failed to load firmware!\n");
5375                                 return r;
5376                         }
5377                 }
5378         }
5379
5380         /* Initialize power management */
5381         radeon_pm_init(rdev);
5382
5383         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5384         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5385
5386         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5387         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5388
5389         r = radeon_uvd_init(rdev);
5390         if (!r) {
5391                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5392                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5393                                4096);
5394         }
5395
5396         rdev->ih.ring_obj = NULL;
5397         r600_ih_ring_init(rdev, 64 * 1024);
5398
5399         r = r600_pcie_gart_init(rdev);
5400         if (r)
5401                 return r;
5402
5403         rdev->accel_working = true;
5404         r = evergreen_startup(rdev);
5405         if (r) {
5406                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5407                 r700_cp_fini(rdev);
5408                 r600_dma_fini(rdev);
5409                 r600_irq_fini(rdev);
5410                 if (rdev->flags & RADEON_IS_IGP)
5411                         sumo_rlc_fini(rdev);
5412                 radeon_wb_fini(rdev);
5413                 radeon_ib_pool_fini(rdev);
5414                 radeon_irq_kms_fini(rdev);
5415                 evergreen_pcie_gart_fini(rdev);
5416                 rdev->accel_working = false;
5417         }
5418
5419         /* Don't start up if the MC ucode is missing on BTC parts.
5420          * The default clocks and voltages before the MC ucode
5421          * is loaded are not suffient for advanced operations.
5422          */
5423         if (ASIC_IS_DCE5(rdev)) {
5424                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5425                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5426                         return -EINVAL;
5427                 }
5428         }
5429
5430         return 0;
5431 }
5432
5433 void evergreen_fini(struct radeon_device *rdev)
5434 {
5435         radeon_pm_fini(rdev);
5436         r600_audio_fini(rdev);
5437         r700_cp_fini(rdev);
5438         r600_dma_fini(rdev);
5439         r600_irq_fini(rdev);
5440         if (rdev->flags & RADEON_IS_IGP)
5441                 sumo_rlc_fini(rdev);
5442         radeon_wb_fini(rdev);
5443         radeon_ib_pool_fini(rdev);
5444         radeon_irq_kms_fini(rdev);
5445         uvd_v1_0_fini(rdev);
5446         radeon_uvd_fini(rdev);
5447         evergreen_pcie_gart_fini(rdev);
5448         r600_vram_scratch_fini(rdev);
5449         radeon_gem_fini(rdev);
5450         radeon_fence_driver_fini(rdev);
5451         radeon_agp_fini(rdev);
5452         radeon_bo_fini(rdev);
5453         radeon_atombios_fini(rdev);
5454         kfree(rdev->bios);
5455         rdev->bios = NULL;
5456 }
5457
5458 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5459 {
5460         u32 link_width_cntl, speed_cntl;
5461
5462         if (radeon_pcie_gen2 == 0)
5463                 return;
5464
5465         if (rdev->flags & RADEON_IS_IGP)
5466                 return;
5467
5468         if (!(rdev->flags & RADEON_IS_PCIE))
5469                 return;
5470
5471         /* x2 cards have a special sequence */
5472         if (ASIC_IS_X2(rdev))
5473                 return;
5474
5475         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5476                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5477                 return;
5478
5479         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5480         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5481                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5482                 return;
5483         }
5484
5485         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5486
5487         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5488             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5489
5490                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5491                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5492                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5493
5494                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5495                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5496                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5497
5498                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5499                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5500                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5501
5502                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5503                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5504                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5505
5506                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5507                 speed_cntl |= LC_GEN2_EN_STRAP;
5508                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5509
5510         } else {
5511                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5512                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5513                 if (1)
5514                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5515                 else
5516                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5517                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5518         }
5519 }
5520
5521 void evergreen_program_aspm(struct radeon_device *rdev)
5522 {
5523         u32 data, orig;
5524         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5525         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5526         /* fusion_platform = true
5527          * if the system is a fusion system
5528          * (APU or DGPU in a fusion system).
5529          * todo: check if the system is a fusion platform.
5530          */
5531         bool fusion_platform = false;
5532
5533         if (radeon_aspm == 0)
5534                 return;
5535
5536         if (!(rdev->flags & RADEON_IS_PCIE))
5537                 return;
5538
5539         switch (rdev->family) {
5540         case CHIP_CYPRESS:
5541         case CHIP_HEMLOCK:
5542         case CHIP_JUNIPER:
5543         case CHIP_REDWOOD:
5544         case CHIP_CEDAR:
5545         case CHIP_SUMO:
5546         case CHIP_SUMO2:
5547         case CHIP_PALM:
5548         case CHIP_ARUBA:
5549                 disable_l0s = true;
5550                 break;
5551         default:
5552                 disable_l0s = false;
5553                 break;
5554         }
5555
5556         if (rdev->flags & RADEON_IS_IGP)
5557                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5558
5559         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5560         if (fusion_platform)
5561                 data &= ~MULTI_PIF;
5562         else
5563                 data |= MULTI_PIF;
5564         if (data != orig)
5565                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5566
5567         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5568         if (fusion_platform)
5569                 data &= ~MULTI_PIF;
5570         else
5571                 data |= MULTI_PIF;
5572         if (data != orig)
5573                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5574
5575         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5576         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5577         if (!disable_l0s) {
5578                 if (rdev->family >= CHIP_BARTS)
5579                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5580                 else
5581                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5582         }
5583
5584         if (!disable_l1) {
5585                 if (rdev->family >= CHIP_BARTS)
5586                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5587                 else
5588                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5589
5590                 if (!disable_plloff_in_l1) {
5591                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5592                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5593                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5594                         if (data != orig)
5595                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5596
5597                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5598                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5599                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5600                         if (data != orig)
5601                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5602
5603                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5604                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5605                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5606                         if (data != orig)
5607                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5608
5609                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5610                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5611                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5612                         if (data != orig)
5613                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5614
5615                         if (rdev->family >= CHIP_BARTS) {
5616                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5617                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5618                                 data |= PLL_RAMP_UP_TIME_0(4);
5619                                 if (data != orig)
5620                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5621
5622                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5623                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5624                                 data |= PLL_RAMP_UP_TIME_1(4);
5625                                 if (data != orig)
5626                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5627
5628                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5629                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5630                                 data |= PLL_RAMP_UP_TIME_0(4);
5631                                 if (data != orig)
5632                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5633
5634                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5635                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5636                                 data |= PLL_RAMP_UP_TIME_1(4);
5637                                 if (data != orig)
5638                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5639                         }
5640
5641                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5642                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5643                         data |= LC_DYN_LANES_PWR_STATE(3);
5644                         if (data != orig)
5645                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5646
5647                         if (rdev->family >= CHIP_BARTS) {
5648                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5649                                 data &= ~LS2_EXIT_TIME_MASK;
5650                                 data |= LS2_EXIT_TIME(1);
5651                                 if (data != orig)
5652                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5653
5654                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5655                                 data &= ~LS2_EXIT_TIME_MASK;
5656                                 data |= LS2_EXIT_TIME(1);
5657                                 if (data != orig)
5658                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5659                         }
5660                 }
5661         }
5662
5663         /* evergreen parts only */
5664         if (rdev->family < CHIP_BARTS)
5665                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5666
5667         if (pcie_lc_cntl != pcie_lc_cntl_old)
5668                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5669 }