2 * Copyright 2010 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
22 * Authors: Alex Deucher
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
37 #define EVERGREEN_PFP_UCODE_SIZE 1120
38 #define EVERGREEN_PM4_UCODE_SIZE 1376
40 static const u32 crtc_offsets[6] =
42 EVERGREEN_CRTC0_REGISTER_OFFSET,
43 EVERGREEN_CRTC1_REGISTER_OFFSET,
44 EVERGREEN_CRTC2_REGISTER_OFFSET,
45 EVERGREEN_CRTC3_REGISTER_OFFSET,
46 EVERGREEN_CRTC4_REGISTER_OFFSET,
47 EVERGREEN_CRTC5_REGISTER_OFFSET
50 static void evergreen_gpu_init(struct radeon_device *rdev);
51 void evergreen_fini(struct radeon_device *rdev);
52 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
53 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
54 int ring, u32 cp_int_cntl);
56 static const u32 evergreen_golden_registers[] =
58 0x3f90, 0xffff0000, 0xff000000,
59 0x9148, 0xffff0000, 0xff000000,
60 0x3f94, 0xffff0000, 0xff000000,
61 0x914c, 0xffff0000, 0xff000000,
62 0x9b7c, 0xffffffff, 0x00000000,
63 0x8a14, 0xffffffff, 0x00000007,
64 0x8b10, 0xffffffff, 0x00000000,
65 0x960c, 0xffffffff, 0x54763210,
66 0x88c4, 0xffffffff, 0x000000c2,
67 0x88d4, 0xffffffff, 0x00000010,
68 0x8974, 0xffffffff, 0x00000000,
69 0xc78, 0x00000080, 0x00000080,
70 0x5eb4, 0xffffffff, 0x00000002,
71 0x5e78, 0xffffffff, 0x001000f0,
72 0x6104, 0x01000300, 0x00000000,
73 0x5bc0, 0x00300000, 0x00000000,
74 0x7030, 0xffffffff, 0x00000011,
75 0x7c30, 0xffffffff, 0x00000011,
76 0x10830, 0xffffffff, 0x00000011,
77 0x11430, 0xffffffff, 0x00000011,
78 0x12030, 0xffffffff, 0x00000011,
79 0x12c30, 0xffffffff, 0x00000011,
80 0xd02c, 0xffffffff, 0x08421000,
81 0x240c, 0xffffffff, 0x00000380,
82 0x8b24, 0xffffffff, 0x00ff0fff,
83 0x28a4c, 0x06000000, 0x06000000,
84 0x10c, 0x00000001, 0x00000001,
85 0x8d00, 0xffffffff, 0x100e4848,
86 0x8d04, 0xffffffff, 0x00164745,
87 0x8c00, 0xffffffff, 0xe4000003,
88 0x8c04, 0xffffffff, 0x40600060,
89 0x8c08, 0xffffffff, 0x001c001c,
90 0x8cf0, 0xffffffff, 0x08e00620,
91 0x8c20, 0xffffffff, 0x00800080,
92 0x8c24, 0xffffffff, 0x00800080,
93 0x8c18, 0xffffffff, 0x20202078,
94 0x8c1c, 0xffffffff, 0x00001010,
95 0x28350, 0xffffffff, 0x00000000,
96 0xa008, 0xffffffff, 0x00010000,
97 0x5c4, 0xffffffff, 0x00000001,
98 0x9508, 0xffffffff, 0x00000002,
99 0x913c, 0x0000000f, 0x0000000a
102 static const u32 evergreen_golden_registers2[] =
104 0x2f4c, 0xffffffff, 0x00000000,
105 0x54f4, 0xffffffff, 0x00000000,
106 0x54f0, 0xffffffff, 0x00000000,
107 0x5498, 0xffffffff, 0x00000000,
108 0x549c, 0xffffffff, 0x00000000,
109 0x5494, 0xffffffff, 0x00000000,
110 0x53cc, 0xffffffff, 0x00000000,
111 0x53c8, 0xffffffff, 0x00000000,
112 0x53c4, 0xffffffff, 0x00000000,
113 0x53c0, 0xffffffff, 0x00000000,
114 0x53bc, 0xffffffff, 0x00000000,
115 0x53b8, 0xffffffff, 0x00000000,
116 0x53b4, 0xffffffff, 0x00000000,
117 0x53b0, 0xffffffff, 0x00000000
120 static const u32 cypress_mgcg_init[] =
122 0x802c, 0xffffffff, 0xc0000000,
123 0x5448, 0xffffffff, 0x00000100,
124 0x55e4, 0xffffffff, 0x00000100,
125 0x160c, 0xffffffff, 0x00000100,
126 0x5644, 0xffffffff, 0x00000100,
127 0xc164, 0xffffffff, 0x00000100,
128 0x8a18, 0xffffffff, 0x00000100,
129 0x897c, 0xffffffff, 0x06000100,
130 0x8b28, 0xffffffff, 0x00000100,
131 0x9144, 0xffffffff, 0x00000100,
132 0x9a60, 0xffffffff, 0x00000100,
133 0x9868, 0xffffffff, 0x00000100,
134 0x8d58, 0xffffffff, 0x00000100,
135 0x9510, 0xffffffff, 0x00000100,
136 0x949c, 0xffffffff, 0x00000100,
137 0x9654, 0xffffffff, 0x00000100,
138 0x9030, 0xffffffff, 0x00000100,
139 0x9034, 0xffffffff, 0x00000100,
140 0x9038, 0xffffffff, 0x00000100,
141 0x903c, 0xffffffff, 0x00000100,
142 0x9040, 0xffffffff, 0x00000100,
143 0xa200, 0xffffffff, 0x00000100,
144 0xa204, 0xffffffff, 0x00000100,
145 0xa208, 0xffffffff, 0x00000100,
146 0xa20c, 0xffffffff, 0x00000100,
147 0x971c, 0xffffffff, 0x00000100,
148 0x977c, 0xffffffff, 0x00000100,
149 0x3f80, 0xffffffff, 0x00000100,
150 0xa210, 0xffffffff, 0x00000100,
151 0xa214, 0xffffffff, 0x00000100,
152 0x4d8, 0xffffffff, 0x00000100,
153 0x9784, 0xffffffff, 0x00000100,
154 0x9698, 0xffffffff, 0x00000100,
155 0x4d4, 0xffffffff, 0x00000200,
156 0x30cc, 0xffffffff, 0x00000100,
157 0xd0c0, 0xffffffff, 0xff000100,
158 0x802c, 0xffffffff, 0x40000000,
159 0x915c, 0xffffffff, 0x00010000,
160 0x9160, 0xffffffff, 0x00030002,
161 0x9178, 0xffffffff, 0x00070000,
162 0x917c, 0xffffffff, 0x00030002,
163 0x9180, 0xffffffff, 0x00050004,
164 0x918c, 0xffffffff, 0x00010006,
165 0x9190, 0xffffffff, 0x00090008,
166 0x9194, 0xffffffff, 0x00070000,
167 0x9198, 0xffffffff, 0x00030002,
168 0x919c, 0xffffffff, 0x00050004,
169 0x91a8, 0xffffffff, 0x00010006,
170 0x91ac, 0xffffffff, 0x00090008,
171 0x91b0, 0xffffffff, 0x00070000,
172 0x91b4, 0xffffffff, 0x00030002,
173 0x91b8, 0xffffffff, 0x00050004,
174 0x91c4, 0xffffffff, 0x00010006,
175 0x91c8, 0xffffffff, 0x00090008,
176 0x91cc, 0xffffffff, 0x00070000,
177 0x91d0, 0xffffffff, 0x00030002,
178 0x91d4, 0xffffffff, 0x00050004,
179 0x91e0, 0xffffffff, 0x00010006,
180 0x91e4, 0xffffffff, 0x00090008,
181 0x91e8, 0xffffffff, 0x00000000,
182 0x91ec, 0xffffffff, 0x00070000,
183 0x91f0, 0xffffffff, 0x00030002,
184 0x91f4, 0xffffffff, 0x00050004,
185 0x9200, 0xffffffff, 0x00010006,
186 0x9204, 0xffffffff, 0x00090008,
187 0x9208, 0xffffffff, 0x00070000,
188 0x920c, 0xffffffff, 0x00030002,
189 0x9210, 0xffffffff, 0x00050004,
190 0x921c, 0xffffffff, 0x00010006,
191 0x9220, 0xffffffff, 0x00090008,
192 0x9224, 0xffffffff, 0x00070000,
193 0x9228, 0xffffffff, 0x00030002,
194 0x922c, 0xffffffff, 0x00050004,
195 0x9238, 0xffffffff, 0x00010006,
196 0x923c, 0xffffffff, 0x00090008,
197 0x9240, 0xffffffff, 0x00070000,
198 0x9244, 0xffffffff, 0x00030002,
199 0x9248, 0xffffffff, 0x00050004,
200 0x9254, 0xffffffff, 0x00010006,
201 0x9258, 0xffffffff, 0x00090008,
202 0x925c, 0xffffffff, 0x00070000,
203 0x9260, 0xffffffff, 0x00030002,
204 0x9264, 0xffffffff, 0x00050004,
205 0x9270, 0xffffffff, 0x00010006,
206 0x9274, 0xffffffff, 0x00090008,
207 0x9278, 0xffffffff, 0x00070000,
208 0x927c, 0xffffffff, 0x00030002,
209 0x9280, 0xffffffff, 0x00050004,
210 0x928c, 0xffffffff, 0x00010006,
211 0x9290, 0xffffffff, 0x00090008,
212 0x9294, 0xffffffff, 0x00000000,
213 0x929c, 0xffffffff, 0x00000001,
214 0x802c, 0xffffffff, 0x40010000,
215 0x915c, 0xffffffff, 0x00010000,
216 0x9160, 0xffffffff, 0x00030002,
217 0x9178, 0xffffffff, 0x00070000,
218 0x917c, 0xffffffff, 0x00030002,
219 0x9180, 0xffffffff, 0x00050004,
220 0x918c, 0xffffffff, 0x00010006,
221 0x9190, 0xffffffff, 0x00090008,
222 0x9194, 0xffffffff, 0x00070000,
223 0x9198, 0xffffffff, 0x00030002,
224 0x919c, 0xffffffff, 0x00050004,
225 0x91a8, 0xffffffff, 0x00010006,
226 0x91ac, 0xffffffff, 0x00090008,
227 0x91b0, 0xffffffff, 0x00070000,
228 0x91b4, 0xffffffff, 0x00030002,
229 0x91b8, 0xffffffff, 0x00050004,
230 0x91c4, 0xffffffff, 0x00010006,
231 0x91c8, 0xffffffff, 0x00090008,
232 0x91cc, 0xffffffff, 0x00070000,
233 0x91d0, 0xffffffff, 0x00030002,
234 0x91d4, 0xffffffff, 0x00050004,
235 0x91e0, 0xffffffff, 0x00010006,
236 0x91e4, 0xffffffff, 0x00090008,
237 0x91e8, 0xffffffff, 0x00000000,
238 0x91ec, 0xffffffff, 0x00070000,
239 0x91f0, 0xffffffff, 0x00030002,
240 0x91f4, 0xffffffff, 0x00050004,
241 0x9200, 0xffffffff, 0x00010006,
242 0x9204, 0xffffffff, 0x00090008,
243 0x9208, 0xffffffff, 0x00070000,
244 0x920c, 0xffffffff, 0x00030002,
245 0x9210, 0xffffffff, 0x00050004,
246 0x921c, 0xffffffff, 0x00010006,
247 0x9220, 0xffffffff, 0x00090008,
248 0x9224, 0xffffffff, 0x00070000,
249 0x9228, 0xffffffff, 0x00030002,
250 0x922c, 0xffffffff, 0x00050004,
251 0x9238, 0xffffffff, 0x00010006,
252 0x923c, 0xffffffff, 0x00090008,
253 0x9240, 0xffffffff, 0x00070000,
254 0x9244, 0xffffffff, 0x00030002,
255 0x9248, 0xffffffff, 0x00050004,
256 0x9254, 0xffffffff, 0x00010006,
257 0x9258, 0xffffffff, 0x00090008,
258 0x925c, 0xffffffff, 0x00070000,
259 0x9260, 0xffffffff, 0x00030002,
260 0x9264, 0xffffffff, 0x00050004,
261 0x9270, 0xffffffff, 0x00010006,
262 0x9274, 0xffffffff, 0x00090008,
263 0x9278, 0xffffffff, 0x00070000,
264 0x927c, 0xffffffff, 0x00030002,
265 0x9280, 0xffffffff, 0x00050004,
266 0x928c, 0xffffffff, 0x00010006,
267 0x9290, 0xffffffff, 0x00090008,
268 0x9294, 0xffffffff, 0x00000000,
269 0x929c, 0xffffffff, 0x00000001,
270 0x802c, 0xffffffff, 0xc0000000
273 static const u32 redwood_mgcg_init[] =
275 0x802c, 0xffffffff, 0xc0000000,
276 0x5448, 0xffffffff, 0x00000100,
277 0x55e4, 0xffffffff, 0x00000100,
278 0x160c, 0xffffffff, 0x00000100,
279 0x5644, 0xffffffff, 0x00000100,
280 0xc164, 0xffffffff, 0x00000100,
281 0x8a18, 0xffffffff, 0x00000100,
282 0x897c, 0xffffffff, 0x06000100,
283 0x8b28, 0xffffffff, 0x00000100,
284 0x9144, 0xffffffff, 0x00000100,
285 0x9a60, 0xffffffff, 0x00000100,
286 0x9868, 0xffffffff, 0x00000100,
287 0x8d58, 0xffffffff, 0x00000100,
288 0x9510, 0xffffffff, 0x00000100,
289 0x949c, 0xffffffff, 0x00000100,
290 0x9654, 0xffffffff, 0x00000100,
291 0x9030, 0xffffffff, 0x00000100,
292 0x9034, 0xffffffff, 0x00000100,
293 0x9038, 0xffffffff, 0x00000100,
294 0x903c, 0xffffffff, 0x00000100,
295 0x9040, 0xffffffff, 0x00000100,
296 0xa200, 0xffffffff, 0x00000100,
297 0xa204, 0xffffffff, 0x00000100,
298 0xa208, 0xffffffff, 0x00000100,
299 0xa20c, 0xffffffff, 0x00000100,
300 0x971c, 0xffffffff, 0x00000100,
301 0x977c, 0xffffffff, 0x00000100,
302 0x3f80, 0xffffffff, 0x00000100,
303 0xa210, 0xffffffff, 0x00000100,
304 0xa214, 0xffffffff, 0x00000100,
305 0x4d8, 0xffffffff, 0x00000100,
306 0x9784, 0xffffffff, 0x00000100,
307 0x9698, 0xffffffff, 0x00000100,
308 0x4d4, 0xffffffff, 0x00000200,
309 0x30cc, 0xffffffff, 0x00000100,
310 0xd0c0, 0xffffffff, 0xff000100,
311 0x802c, 0xffffffff, 0x40000000,
312 0x915c, 0xffffffff, 0x00010000,
313 0x9160, 0xffffffff, 0x00030002,
314 0x9178, 0xffffffff, 0x00070000,
315 0x917c, 0xffffffff, 0x00030002,
316 0x9180, 0xffffffff, 0x00050004,
317 0x918c, 0xffffffff, 0x00010006,
318 0x9190, 0xffffffff, 0x00090008,
319 0x9194, 0xffffffff, 0x00070000,
320 0x9198, 0xffffffff, 0x00030002,
321 0x919c, 0xffffffff, 0x00050004,
322 0x91a8, 0xffffffff, 0x00010006,
323 0x91ac, 0xffffffff, 0x00090008,
324 0x91b0, 0xffffffff, 0x00070000,
325 0x91b4, 0xffffffff, 0x00030002,
326 0x91b8, 0xffffffff, 0x00050004,
327 0x91c4, 0xffffffff, 0x00010006,
328 0x91c8, 0xffffffff, 0x00090008,
329 0x91cc, 0xffffffff, 0x00070000,
330 0x91d0, 0xffffffff, 0x00030002,
331 0x91d4, 0xffffffff, 0x00050004,
332 0x91e0, 0xffffffff, 0x00010006,
333 0x91e4, 0xffffffff, 0x00090008,
334 0x91e8, 0xffffffff, 0x00000000,
335 0x91ec, 0xffffffff, 0x00070000,
336 0x91f0, 0xffffffff, 0x00030002,
337 0x91f4, 0xffffffff, 0x00050004,
338 0x9200, 0xffffffff, 0x00010006,
339 0x9204, 0xffffffff, 0x00090008,
340 0x9294, 0xffffffff, 0x00000000,
341 0x929c, 0xffffffff, 0x00000001,
342 0x802c, 0xffffffff, 0xc0000000
345 static const u32 cedar_golden_registers[] =
347 0x3f90, 0xffff0000, 0xff000000,
348 0x9148, 0xffff0000, 0xff000000,
349 0x3f94, 0xffff0000, 0xff000000,
350 0x914c, 0xffff0000, 0xff000000,
351 0x9b7c, 0xffffffff, 0x00000000,
352 0x8a14, 0xffffffff, 0x00000007,
353 0x8b10, 0xffffffff, 0x00000000,
354 0x960c, 0xffffffff, 0x54763210,
355 0x88c4, 0xffffffff, 0x000000c2,
356 0x88d4, 0xffffffff, 0x00000000,
357 0x8974, 0xffffffff, 0x00000000,
358 0xc78, 0x00000080, 0x00000080,
359 0x5eb4, 0xffffffff, 0x00000002,
360 0x5e78, 0xffffffff, 0x001000f0,
361 0x6104, 0x01000300, 0x00000000,
362 0x5bc0, 0x00300000, 0x00000000,
363 0x7030, 0xffffffff, 0x00000011,
364 0x7c30, 0xffffffff, 0x00000011,
365 0x10830, 0xffffffff, 0x00000011,
366 0x11430, 0xffffffff, 0x00000011,
367 0xd02c, 0xffffffff, 0x08421000,
368 0x240c, 0xffffffff, 0x00000380,
369 0x8b24, 0xffffffff, 0x00ff0fff,
370 0x28a4c, 0x06000000, 0x06000000,
371 0x10c, 0x00000001, 0x00000001,
372 0x8d00, 0xffffffff, 0x100e4848,
373 0x8d04, 0xffffffff, 0x00164745,
374 0x8c00, 0xffffffff, 0xe4000003,
375 0x8c04, 0xffffffff, 0x40600060,
376 0x8c08, 0xffffffff, 0x001c001c,
377 0x8cf0, 0xffffffff, 0x08e00410,
378 0x8c20, 0xffffffff, 0x00800080,
379 0x8c24, 0xffffffff, 0x00800080,
380 0x8c18, 0xffffffff, 0x20202078,
381 0x8c1c, 0xffffffff, 0x00001010,
382 0x28350, 0xffffffff, 0x00000000,
383 0xa008, 0xffffffff, 0x00010000,
384 0x5c4, 0xffffffff, 0x00000001,
385 0x9508, 0xffffffff, 0x00000002
388 static const u32 cedar_mgcg_init[] =
390 0x802c, 0xffffffff, 0xc0000000,
391 0x5448, 0xffffffff, 0x00000100,
392 0x55e4, 0xffffffff, 0x00000100,
393 0x160c, 0xffffffff, 0x00000100,
394 0x5644, 0xffffffff, 0x00000100,
395 0xc164, 0xffffffff, 0x00000100,
396 0x8a18, 0xffffffff, 0x00000100,
397 0x897c, 0xffffffff, 0x06000100,
398 0x8b28, 0xffffffff, 0x00000100,
399 0x9144, 0xffffffff, 0x00000100,
400 0x9a60, 0xffffffff, 0x00000100,
401 0x9868, 0xffffffff, 0x00000100,
402 0x8d58, 0xffffffff, 0x00000100,
403 0x9510, 0xffffffff, 0x00000100,
404 0x949c, 0xffffffff, 0x00000100,
405 0x9654, 0xffffffff, 0x00000100,
406 0x9030, 0xffffffff, 0x00000100,
407 0x9034, 0xffffffff, 0x00000100,
408 0x9038, 0xffffffff, 0x00000100,
409 0x903c, 0xffffffff, 0x00000100,
410 0x9040, 0xffffffff, 0x00000100,
411 0xa200, 0xffffffff, 0x00000100,
412 0xa204, 0xffffffff, 0x00000100,
413 0xa208, 0xffffffff, 0x00000100,
414 0xa20c, 0xffffffff, 0x00000100,
415 0x971c, 0xffffffff, 0x00000100,
416 0x977c, 0xffffffff, 0x00000100,
417 0x3f80, 0xffffffff, 0x00000100,
418 0xa210, 0xffffffff, 0x00000100,
419 0xa214, 0xffffffff, 0x00000100,
420 0x4d8, 0xffffffff, 0x00000100,
421 0x9784, 0xffffffff, 0x00000100,
422 0x9698, 0xffffffff, 0x00000100,
423 0x4d4, 0xffffffff, 0x00000200,
424 0x30cc, 0xffffffff, 0x00000100,
425 0xd0c0, 0xffffffff, 0xff000100,
426 0x802c, 0xffffffff, 0x40000000,
427 0x915c, 0xffffffff, 0x00010000,
428 0x9178, 0xffffffff, 0x00050000,
429 0x917c, 0xffffffff, 0x00030002,
430 0x918c, 0xffffffff, 0x00010004,
431 0x9190, 0xffffffff, 0x00070006,
432 0x9194, 0xffffffff, 0x00050000,
433 0x9198, 0xffffffff, 0x00030002,
434 0x91a8, 0xffffffff, 0x00010004,
435 0x91ac, 0xffffffff, 0x00070006,
436 0x91e8, 0xffffffff, 0x00000000,
437 0x9294, 0xffffffff, 0x00000000,
438 0x929c, 0xffffffff, 0x00000001,
439 0x802c, 0xffffffff, 0xc0000000
442 static const u32 juniper_mgcg_init[] =
444 0x802c, 0xffffffff, 0xc0000000,
445 0x5448, 0xffffffff, 0x00000100,
446 0x55e4, 0xffffffff, 0x00000100,
447 0x160c, 0xffffffff, 0x00000100,
448 0x5644, 0xffffffff, 0x00000100,
449 0xc164, 0xffffffff, 0x00000100,
450 0x8a18, 0xffffffff, 0x00000100,
451 0x897c, 0xffffffff, 0x06000100,
452 0x8b28, 0xffffffff, 0x00000100,
453 0x9144, 0xffffffff, 0x00000100,
454 0x9a60, 0xffffffff, 0x00000100,
455 0x9868, 0xffffffff, 0x00000100,
456 0x8d58, 0xffffffff, 0x00000100,
457 0x9510, 0xffffffff, 0x00000100,
458 0x949c, 0xffffffff, 0x00000100,
459 0x9654, 0xffffffff, 0x00000100,
460 0x9030, 0xffffffff, 0x00000100,
461 0x9034, 0xffffffff, 0x00000100,
462 0x9038, 0xffffffff, 0x00000100,
463 0x903c, 0xffffffff, 0x00000100,
464 0x9040, 0xffffffff, 0x00000100,
465 0xa200, 0xffffffff, 0x00000100,
466 0xa204, 0xffffffff, 0x00000100,
467 0xa208, 0xffffffff, 0x00000100,
468 0xa20c, 0xffffffff, 0x00000100,
469 0x971c, 0xffffffff, 0x00000100,
470 0xd0c0, 0xffffffff, 0xff000100,
471 0x802c, 0xffffffff, 0x40000000,
472 0x915c, 0xffffffff, 0x00010000,
473 0x9160, 0xffffffff, 0x00030002,
474 0x9178, 0xffffffff, 0x00070000,
475 0x917c, 0xffffffff, 0x00030002,
476 0x9180, 0xffffffff, 0x00050004,
477 0x918c, 0xffffffff, 0x00010006,
478 0x9190, 0xffffffff, 0x00090008,
479 0x9194, 0xffffffff, 0x00070000,
480 0x9198, 0xffffffff, 0x00030002,
481 0x919c, 0xffffffff, 0x00050004,
482 0x91a8, 0xffffffff, 0x00010006,
483 0x91ac, 0xffffffff, 0x00090008,
484 0x91b0, 0xffffffff, 0x00070000,
485 0x91b4, 0xffffffff, 0x00030002,
486 0x91b8, 0xffffffff, 0x00050004,
487 0x91c4, 0xffffffff, 0x00010006,
488 0x91c8, 0xffffffff, 0x00090008,
489 0x91cc, 0xffffffff, 0x00070000,
490 0x91d0, 0xffffffff, 0x00030002,
491 0x91d4, 0xffffffff, 0x00050004,
492 0x91e0, 0xffffffff, 0x00010006,
493 0x91e4, 0xffffffff, 0x00090008,
494 0x91e8, 0xffffffff, 0x00000000,
495 0x91ec, 0xffffffff, 0x00070000,
496 0x91f0, 0xffffffff, 0x00030002,
497 0x91f4, 0xffffffff, 0x00050004,
498 0x9200, 0xffffffff, 0x00010006,
499 0x9204, 0xffffffff, 0x00090008,
500 0x9208, 0xffffffff, 0x00070000,
501 0x920c, 0xffffffff, 0x00030002,
502 0x9210, 0xffffffff, 0x00050004,
503 0x921c, 0xffffffff, 0x00010006,
504 0x9220, 0xffffffff, 0x00090008,
505 0x9224, 0xffffffff, 0x00070000,
506 0x9228, 0xffffffff, 0x00030002,
507 0x922c, 0xffffffff, 0x00050004,
508 0x9238, 0xffffffff, 0x00010006,
509 0x923c, 0xffffffff, 0x00090008,
510 0x9240, 0xffffffff, 0x00070000,
511 0x9244, 0xffffffff, 0x00030002,
512 0x9248, 0xffffffff, 0x00050004,
513 0x9254, 0xffffffff, 0x00010006,
514 0x9258, 0xffffffff, 0x00090008,
515 0x925c, 0xffffffff, 0x00070000,
516 0x9260, 0xffffffff, 0x00030002,
517 0x9264, 0xffffffff, 0x00050004,
518 0x9270, 0xffffffff, 0x00010006,
519 0x9274, 0xffffffff, 0x00090008,
520 0x9278, 0xffffffff, 0x00070000,
521 0x927c, 0xffffffff, 0x00030002,
522 0x9280, 0xffffffff, 0x00050004,
523 0x928c, 0xffffffff, 0x00010006,
524 0x9290, 0xffffffff, 0x00090008,
525 0x9294, 0xffffffff, 0x00000000,
526 0x929c, 0xffffffff, 0x00000001,
527 0x802c, 0xffffffff, 0xc0000000,
528 0x977c, 0xffffffff, 0x00000100,
529 0x3f80, 0xffffffff, 0x00000100,
530 0xa210, 0xffffffff, 0x00000100,
531 0xa214, 0xffffffff, 0x00000100,
532 0x4d8, 0xffffffff, 0x00000100,
533 0x9784, 0xffffffff, 0x00000100,
534 0x9698, 0xffffffff, 0x00000100,
535 0x4d4, 0xffffffff, 0x00000200,
536 0x30cc, 0xffffffff, 0x00000100,
537 0x802c, 0xffffffff, 0xc0000000
540 static const u32 supersumo_golden_registers[] =
542 0x5eb4, 0xffffffff, 0x00000002,
543 0x5c4, 0xffffffff, 0x00000001,
544 0x7030, 0xffffffff, 0x00000011,
545 0x7c30, 0xffffffff, 0x00000011,
546 0x6104, 0x01000300, 0x00000000,
547 0x5bc0, 0x00300000, 0x00000000,
548 0x8c04, 0xffffffff, 0x40600060,
549 0x8c08, 0xffffffff, 0x001c001c,
550 0x8c20, 0xffffffff, 0x00800080,
551 0x8c24, 0xffffffff, 0x00800080,
552 0x8c18, 0xffffffff, 0x20202078,
553 0x8c1c, 0xffffffff, 0x00001010,
554 0x918c, 0xffffffff, 0x00010006,
555 0x91a8, 0xffffffff, 0x00010006,
556 0x91c4, 0xffffffff, 0x00010006,
557 0x91e0, 0xffffffff, 0x00010006,
558 0x9200, 0xffffffff, 0x00010006,
559 0x9150, 0xffffffff, 0x6e944040,
560 0x917c, 0xffffffff, 0x00030002,
561 0x9180, 0xffffffff, 0x00050004,
562 0x9198, 0xffffffff, 0x00030002,
563 0x919c, 0xffffffff, 0x00050004,
564 0x91b4, 0xffffffff, 0x00030002,
565 0x91b8, 0xffffffff, 0x00050004,
566 0x91d0, 0xffffffff, 0x00030002,
567 0x91d4, 0xffffffff, 0x00050004,
568 0x91f0, 0xffffffff, 0x00030002,
569 0x91f4, 0xffffffff, 0x00050004,
570 0x915c, 0xffffffff, 0x00010000,
571 0x9160, 0xffffffff, 0x00030002,
572 0x3f90, 0xffff0000, 0xff000000,
573 0x9178, 0xffffffff, 0x00070000,
574 0x9194, 0xffffffff, 0x00070000,
575 0x91b0, 0xffffffff, 0x00070000,
576 0x91cc, 0xffffffff, 0x00070000,
577 0x91ec, 0xffffffff, 0x00070000,
578 0x9148, 0xffff0000, 0xff000000,
579 0x9190, 0xffffffff, 0x00090008,
580 0x91ac, 0xffffffff, 0x00090008,
581 0x91c8, 0xffffffff, 0x00090008,
582 0x91e4, 0xffffffff, 0x00090008,
583 0x9204, 0xffffffff, 0x00090008,
584 0x3f94, 0xffff0000, 0xff000000,
585 0x914c, 0xffff0000, 0xff000000,
586 0x929c, 0xffffffff, 0x00000001,
587 0x8a18, 0xffffffff, 0x00000100,
588 0x8b28, 0xffffffff, 0x00000100,
589 0x9144, 0xffffffff, 0x00000100,
590 0x5644, 0xffffffff, 0x00000100,
591 0x9b7c, 0xffffffff, 0x00000000,
592 0x8030, 0xffffffff, 0x0000100a,
593 0x8a14, 0xffffffff, 0x00000007,
594 0x8b24, 0xffffffff, 0x00ff0fff,
595 0x8b10, 0xffffffff, 0x00000000,
596 0x28a4c, 0x06000000, 0x06000000,
597 0x4d8, 0xffffffff, 0x00000100,
598 0x913c, 0xffff000f, 0x0100000a,
599 0x960c, 0xffffffff, 0x54763210,
600 0x88c4, 0xffffffff, 0x000000c2,
601 0x88d4, 0xffffffff, 0x00000010,
602 0x8974, 0xffffffff, 0x00000000,
603 0xc78, 0x00000080, 0x00000080,
604 0x5e78, 0xffffffff, 0x001000f0,
605 0xd02c, 0xffffffff, 0x08421000,
606 0xa008, 0xffffffff, 0x00010000,
607 0x8d00, 0xffffffff, 0x100e4848,
608 0x8d04, 0xffffffff, 0x00164745,
609 0x8c00, 0xffffffff, 0xe4000003,
610 0x8cf0, 0x1fffffff, 0x08e00620,
611 0x28350, 0xffffffff, 0x00000000,
612 0x9508, 0xffffffff, 0x00000002
615 static const u32 sumo_golden_registers[] =
617 0x900c, 0x00ffffff, 0x0017071f,
618 0x8c18, 0xffffffff, 0x10101060,
619 0x8c1c, 0xffffffff, 0x00001010,
620 0x8c30, 0x0000000f, 0x00000005,
621 0x9688, 0x0000000f, 0x00000007
624 static const u32 wrestler_golden_registers[] =
626 0x5eb4, 0xffffffff, 0x00000002,
627 0x5c4, 0xffffffff, 0x00000001,
628 0x7030, 0xffffffff, 0x00000011,
629 0x7c30, 0xffffffff, 0x00000011,
630 0x6104, 0x01000300, 0x00000000,
631 0x5bc0, 0x00300000, 0x00000000,
632 0x918c, 0xffffffff, 0x00010006,
633 0x91a8, 0xffffffff, 0x00010006,
634 0x9150, 0xffffffff, 0x6e944040,
635 0x917c, 0xffffffff, 0x00030002,
636 0x9198, 0xffffffff, 0x00030002,
637 0x915c, 0xffffffff, 0x00010000,
638 0x3f90, 0xffff0000, 0xff000000,
639 0x9178, 0xffffffff, 0x00070000,
640 0x9194, 0xffffffff, 0x00070000,
641 0x9148, 0xffff0000, 0xff000000,
642 0x9190, 0xffffffff, 0x00090008,
643 0x91ac, 0xffffffff, 0x00090008,
644 0x3f94, 0xffff0000, 0xff000000,
645 0x914c, 0xffff0000, 0xff000000,
646 0x929c, 0xffffffff, 0x00000001,
647 0x8a18, 0xffffffff, 0x00000100,
648 0x8b28, 0xffffffff, 0x00000100,
649 0x9144, 0xffffffff, 0x00000100,
650 0x9b7c, 0xffffffff, 0x00000000,
651 0x8030, 0xffffffff, 0x0000100a,
652 0x8a14, 0xffffffff, 0x00000001,
653 0x8b24, 0xffffffff, 0x00ff0fff,
654 0x8b10, 0xffffffff, 0x00000000,
655 0x28a4c, 0x06000000, 0x06000000,
656 0x4d8, 0xffffffff, 0x00000100,
657 0x913c, 0xffff000f, 0x0100000a,
658 0x960c, 0xffffffff, 0x54763210,
659 0x88c4, 0xffffffff, 0x000000c2,
660 0x88d4, 0xffffffff, 0x00000010,
661 0x8974, 0xffffffff, 0x00000000,
662 0xc78, 0x00000080, 0x00000080,
663 0x5e78, 0xffffffff, 0x001000f0,
664 0xd02c, 0xffffffff, 0x08421000,
665 0xa008, 0xffffffff, 0x00010000,
666 0x8d00, 0xffffffff, 0x100e4848,
667 0x8d04, 0xffffffff, 0x00164745,
668 0x8c00, 0xffffffff, 0xe4000003,
669 0x8cf0, 0x1fffffff, 0x08e00410,
670 0x28350, 0xffffffff, 0x00000000,
671 0x9508, 0xffffffff, 0x00000002,
672 0x900c, 0xffffffff, 0x0017071f,
673 0x8c18, 0xffffffff, 0x10101060,
674 0x8c1c, 0xffffffff, 0x00001010
677 static const u32 barts_golden_registers[] =
679 0x5eb4, 0xffffffff, 0x00000002,
680 0x5e78, 0x8f311ff1, 0x001000f0,
681 0x3f90, 0xffff0000, 0xff000000,
682 0x9148, 0xffff0000, 0xff000000,
683 0x3f94, 0xffff0000, 0xff000000,
684 0x914c, 0xffff0000, 0xff000000,
685 0xc78, 0x00000080, 0x00000080,
686 0xbd4, 0x70073777, 0x00010001,
687 0xd02c, 0xbfffff1f, 0x08421000,
688 0xd0b8, 0x03773777, 0x02011003,
689 0x5bc0, 0x00200000, 0x50100000,
690 0x98f8, 0x33773777, 0x02011003,
691 0x98fc, 0xffffffff, 0x76543210,
692 0x7030, 0x31000311, 0x00000011,
693 0x2f48, 0x00000007, 0x02011003,
694 0x6b28, 0x00000010, 0x00000012,
695 0x7728, 0x00000010, 0x00000012,
696 0x10328, 0x00000010, 0x00000012,
697 0x10f28, 0x00000010, 0x00000012,
698 0x11b28, 0x00000010, 0x00000012,
699 0x12728, 0x00000010, 0x00000012,
700 0x240c, 0x000007ff, 0x00000380,
701 0x8a14, 0xf000001f, 0x00000007,
702 0x8b24, 0x3fff3fff, 0x00ff0fff,
703 0x8b10, 0x0000ff0f, 0x00000000,
704 0x28a4c, 0x07ffffff, 0x06000000,
705 0x10c, 0x00000001, 0x00010003,
706 0xa02c, 0xffffffff, 0x0000009b,
707 0x913c, 0x0000000f, 0x0100000a,
708 0x8d00, 0xffff7f7f, 0x100e4848,
709 0x8d04, 0x00ffffff, 0x00164745,
710 0x8c00, 0xfffc0003, 0xe4000003,
711 0x8c04, 0xf8ff00ff, 0x40600060,
712 0x8c08, 0x00ff00ff, 0x001c001c,
713 0x8cf0, 0x1fff1fff, 0x08e00620,
714 0x8c20, 0x0fff0fff, 0x00800080,
715 0x8c24, 0x0fff0fff, 0x00800080,
716 0x8c18, 0xffffffff, 0x20202078,
717 0x8c1c, 0x0000ffff, 0x00001010,
718 0x28350, 0x00000f01, 0x00000000,
719 0x9508, 0x3700001f, 0x00000002,
720 0x960c, 0xffffffff, 0x54763210,
721 0x88c4, 0x001f3ae3, 0x000000c2,
722 0x88d4, 0x0000001f, 0x00000010,
723 0x8974, 0xffffffff, 0x00000000
726 static const u32 turks_golden_registers[] =
728 0x5eb4, 0xffffffff, 0x00000002,
729 0x5e78, 0x8f311ff1, 0x001000f0,
730 0x8c8, 0x00003000, 0x00001070,
731 0x8cc, 0x000fffff, 0x00040035,
732 0x3f90, 0xffff0000, 0xfff00000,
733 0x9148, 0xffff0000, 0xfff00000,
734 0x3f94, 0xffff0000, 0xfff00000,
735 0x914c, 0xffff0000, 0xfff00000,
736 0xc78, 0x00000080, 0x00000080,
737 0xbd4, 0x00073007, 0x00010002,
738 0xd02c, 0xbfffff1f, 0x08421000,
739 0xd0b8, 0x03773777, 0x02010002,
740 0x5bc0, 0x00200000, 0x50100000,
741 0x98f8, 0x33773777, 0x00010002,
742 0x98fc, 0xffffffff, 0x33221100,
743 0x7030, 0x31000311, 0x00000011,
744 0x2f48, 0x33773777, 0x00010002,
745 0x6b28, 0x00000010, 0x00000012,
746 0x7728, 0x00000010, 0x00000012,
747 0x10328, 0x00000010, 0x00000012,
748 0x10f28, 0x00000010, 0x00000012,
749 0x11b28, 0x00000010, 0x00000012,
750 0x12728, 0x00000010, 0x00000012,
751 0x240c, 0x000007ff, 0x00000380,
752 0x8a14, 0xf000001f, 0x00000007,
753 0x8b24, 0x3fff3fff, 0x00ff0fff,
754 0x8b10, 0x0000ff0f, 0x00000000,
755 0x28a4c, 0x07ffffff, 0x06000000,
756 0x10c, 0x00000001, 0x00010003,
757 0xa02c, 0xffffffff, 0x0000009b,
758 0x913c, 0x0000000f, 0x0100000a,
759 0x8d00, 0xffff7f7f, 0x100e4848,
760 0x8d04, 0x00ffffff, 0x00164745,
761 0x8c00, 0xfffc0003, 0xe4000003,
762 0x8c04, 0xf8ff00ff, 0x40600060,
763 0x8c08, 0x00ff00ff, 0x001c001c,
764 0x8cf0, 0x1fff1fff, 0x08e00410,
765 0x8c20, 0x0fff0fff, 0x00800080,
766 0x8c24, 0x0fff0fff, 0x00800080,
767 0x8c18, 0xffffffff, 0x20202078,
768 0x8c1c, 0x0000ffff, 0x00001010,
769 0x28350, 0x00000f01, 0x00000000,
770 0x9508, 0x3700001f, 0x00000002,
771 0x960c, 0xffffffff, 0x54763210,
772 0x88c4, 0x001f3ae3, 0x000000c2,
773 0x88d4, 0x0000001f, 0x00000010,
774 0x8974, 0xffffffff, 0x00000000
777 static const u32 caicos_golden_registers[] =
779 0x5eb4, 0xffffffff, 0x00000002,
780 0x5e78, 0x8f311ff1, 0x001000f0,
781 0x8c8, 0x00003420, 0x00001450,
782 0x8cc, 0x000fffff, 0x00040035,
783 0x3f90, 0xffff0000, 0xfffc0000,
784 0x9148, 0xffff0000, 0xfffc0000,
785 0x3f94, 0xffff0000, 0xfffc0000,
786 0x914c, 0xffff0000, 0xfffc0000,
787 0xc78, 0x00000080, 0x00000080,
788 0xbd4, 0x00073007, 0x00010001,
789 0xd02c, 0xbfffff1f, 0x08421000,
790 0xd0b8, 0x03773777, 0x02010001,
791 0x5bc0, 0x00200000, 0x50100000,
792 0x98f8, 0x33773777, 0x02010001,
793 0x98fc, 0xffffffff, 0x33221100,
794 0x7030, 0x31000311, 0x00000011,
795 0x2f48, 0x33773777, 0x02010001,
796 0x6b28, 0x00000010, 0x00000012,
797 0x7728, 0x00000010, 0x00000012,
798 0x10328, 0x00000010, 0x00000012,
799 0x10f28, 0x00000010, 0x00000012,
800 0x11b28, 0x00000010, 0x00000012,
801 0x12728, 0x00000010, 0x00000012,
802 0x240c, 0x000007ff, 0x00000380,
803 0x8a14, 0xf000001f, 0x00000001,
804 0x8b24, 0x3fff3fff, 0x00ff0fff,
805 0x8b10, 0x0000ff0f, 0x00000000,
806 0x28a4c, 0x07ffffff, 0x06000000,
807 0x10c, 0x00000001, 0x00010003,
808 0xa02c, 0xffffffff, 0x0000009b,
809 0x913c, 0x0000000f, 0x0100000a,
810 0x8d00, 0xffff7f7f, 0x100e4848,
811 0x8d04, 0x00ffffff, 0x00164745,
812 0x8c00, 0xfffc0003, 0xe4000003,
813 0x8c04, 0xf8ff00ff, 0x40600060,
814 0x8c08, 0x00ff00ff, 0x001c001c,
815 0x8cf0, 0x1fff1fff, 0x08e00410,
816 0x8c20, 0x0fff0fff, 0x00800080,
817 0x8c24, 0x0fff0fff, 0x00800080,
818 0x8c18, 0xffffffff, 0x20202078,
819 0x8c1c, 0x0000ffff, 0x00001010,
820 0x28350, 0x00000f01, 0x00000000,
821 0x9508, 0x3700001f, 0x00000002,
822 0x960c, 0xffffffff, 0x54763210,
823 0x88c4, 0x001f3ae3, 0x000000c2,
824 0x88d4, 0x0000001f, 0x00000010,
825 0x8974, 0xffffffff, 0x00000000
828 static void evergreen_init_golden_registers(struct radeon_device *rdev)
830 switch (rdev->family) {
833 radeon_program_register_sequence(rdev,
834 evergreen_golden_registers,
835 (const u32)ARRAY_SIZE(evergreen_golden_registers));
836 radeon_program_register_sequence(rdev,
837 evergreen_golden_registers2,
838 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
839 radeon_program_register_sequence(rdev,
841 (const u32)ARRAY_SIZE(cypress_mgcg_init));
844 radeon_program_register_sequence(rdev,
845 evergreen_golden_registers,
846 (const u32)ARRAY_SIZE(evergreen_golden_registers));
847 radeon_program_register_sequence(rdev,
848 evergreen_golden_registers2,
849 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
850 radeon_program_register_sequence(rdev,
852 (const u32)ARRAY_SIZE(juniper_mgcg_init));
855 radeon_program_register_sequence(rdev,
856 evergreen_golden_registers,
857 (const u32)ARRAY_SIZE(evergreen_golden_registers));
858 radeon_program_register_sequence(rdev,
859 evergreen_golden_registers2,
860 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
861 radeon_program_register_sequence(rdev,
863 (const u32)ARRAY_SIZE(redwood_mgcg_init));
866 radeon_program_register_sequence(rdev,
867 cedar_golden_registers,
868 (const u32)ARRAY_SIZE(cedar_golden_registers));
869 radeon_program_register_sequence(rdev,
870 evergreen_golden_registers2,
871 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
872 radeon_program_register_sequence(rdev,
874 (const u32)ARRAY_SIZE(cedar_mgcg_init));
877 radeon_program_register_sequence(rdev,
878 wrestler_golden_registers,
879 (const u32)ARRAY_SIZE(wrestler_golden_registers));
882 radeon_program_register_sequence(rdev,
883 supersumo_golden_registers,
884 (const u32)ARRAY_SIZE(supersumo_golden_registers));
887 radeon_program_register_sequence(rdev,
888 supersumo_golden_registers,
889 (const u32)ARRAY_SIZE(supersumo_golden_registers));
890 radeon_program_register_sequence(rdev,
891 sumo_golden_registers,
892 (const u32)ARRAY_SIZE(sumo_golden_registers));
895 radeon_program_register_sequence(rdev,
896 barts_golden_registers,
897 (const u32)ARRAY_SIZE(barts_golden_registers));
900 radeon_program_register_sequence(rdev,
901 turks_golden_registers,
902 (const u32)ARRAY_SIZE(turks_golden_registers));
905 radeon_program_register_sequence(rdev,
906 caicos_golden_registers,
907 (const u32)ARRAY_SIZE(caicos_golden_registers));
914 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
915 unsigned *bankh, unsigned *mtaspect,
916 unsigned *tile_split)
918 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
919 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
920 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
921 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
924 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
925 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
926 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
927 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
931 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
932 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
933 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
934 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
938 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
939 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
940 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
941 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
945 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
946 u32 cntl_reg, u32 status_reg)
949 struct atom_clock_dividers dividers;
951 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
952 clock, false, ÷rs);
956 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
958 for (i = 0; i < 100; i++) {
959 if (RREG32(status_reg) & DCLK_STATUS)
969 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
972 u32 cg_scratch = RREG32(CG_SCRATCH1);
974 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
977 cg_scratch &= 0xffff0000;
978 cg_scratch |= vclk / 100; /* Mhz */
980 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
983 cg_scratch &= 0x0000ffff;
984 cg_scratch |= (dclk / 100) << 16; /* Mhz */
987 WREG32(CG_SCRATCH1, cg_scratch);
992 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
994 /* start off with something large */
995 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
998 /* bypass vclk and dclk with bclk */
999 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1000 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1001 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1003 /* put PLL in bypass mode */
1004 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1006 if (!vclk || !dclk) {
1007 /* keep the Bypass mode, put PLL to sleep */
1008 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1012 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1013 16384, 0x03FFFFFF, 0, 128, 5,
1014 &fb_div, &vclk_div, &dclk_div);
1018 /* set VCO_MODE to 1 */
1019 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1021 /* toggle UPLL_SLEEP to 1 then back to 0 */
1022 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1023 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1025 /* deassert UPLL_RESET */
1026 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1030 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1034 /* assert UPLL_RESET again */
1035 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1037 /* disable spread spectrum. */
1038 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1040 /* set feedback divider */
1041 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1043 /* set ref divider to 0 */
1044 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1046 if (fb_div < 307200)
1047 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1049 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1051 /* set PDIV_A and PDIV_B */
1052 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1053 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1054 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1056 /* give the PLL some time to settle */
1059 /* deassert PLL_RESET */
1060 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1064 /* switch from bypass mode to normal mode */
1065 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1067 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1071 /* switch VCLK and DCLK selection */
1072 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1073 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1074 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1081 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1086 err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
1090 v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1092 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1093 * to avoid hangs or perfomance issues
1095 if ((v == 0) || (v == 6) || (v == 7)) {
1096 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1098 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
1102 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1104 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1110 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1114 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1115 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1124 * dce4_wait_for_vblank - vblank wait asic callback.
1126 * @rdev: radeon_device pointer
1127 * @crtc: crtc to wait for vblank on
1129 * Wait for vblank on the requested crtc (evergreen+).
1131 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1135 if (crtc >= rdev->num_crtc)
1138 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1141 /* depending on when we hit vblank, we may be close to active; if so,
1142 * wait for another frame.
1144 while (dce4_is_in_vblank(rdev, crtc)) {
1145 if (i++ % 100 == 0) {
1146 if (!dce4_is_counter_moving(rdev, crtc))
1151 while (!dce4_is_in_vblank(rdev, crtc)) {
1152 if (i++ % 100 == 0) {
1153 if (!dce4_is_counter_moving(rdev, crtc))
1160 * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1162 * @rdev: radeon_device pointer
1163 * @crtc: crtc to prepare for pageflip on
1165 * Pre-pageflip callback (evergreen+).
1166 * Enables the pageflip irq (vblank irq).
1168 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1170 /* enable the pflip int */
1171 radeon_irq_kms_pflip_irq_get(rdev, crtc);
1175 * evergreen_post_page_flip - pos-pageflip callback.
1177 * @rdev: radeon_device pointer
1178 * @crtc: crtc to cleanup pageflip on
1180 * Post-pageflip callback (evergreen+).
1181 * Disables the pageflip irq (vblank irq).
1183 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1185 /* disable the pflip int */
1186 radeon_irq_kms_pflip_irq_put(rdev, crtc);
1190 * evergreen_page_flip - pageflip callback.
1192 * @rdev: radeon_device pointer
1193 * @crtc_id: crtc to cleanup pageflip on
1194 * @crtc_base: new address of the crtc (GPU MC address)
1196 * Does the actual pageflip (evergreen+).
1197 * During vblank we take the crtc lock and wait for the update_pending
1198 * bit to go high, when it does, we release the lock, and allow the
1199 * double buffered update to take place.
1200 * Returns the current update pending status.
1202 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1204 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1205 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1208 /* Lock the graphics update lock */
1209 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1210 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1212 /* update the scanout addresses */
1213 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1214 upper_32_bits(crtc_base));
1215 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1218 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1219 upper_32_bits(crtc_base));
1220 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1223 /* Wait for update_pending to go high. */
1224 for (i = 0; i < rdev->usec_timeout; i++) {
1225 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1229 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1231 /* Unlock the lock, so double-buffering can take place inside vblank */
1232 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1233 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1235 /* Return current update_pending status: */
1236 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1239 /* get temperature in millidegrees */
1240 int evergreen_get_temp(struct radeon_device *rdev)
1243 int actual_temp = 0;
1245 if (rdev->family == CHIP_JUNIPER) {
1246 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1248 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1251 if (toffset & 0x100)
1252 actual_temp = temp / 2 - (0x200 - toffset);
1254 actual_temp = temp / 2 + toffset;
1256 actual_temp = actual_temp * 1000;
1259 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1264 else if (temp & 0x200)
1266 else if (temp & 0x100) {
1267 actual_temp = temp & 0x1ff;
1268 actual_temp |= ~0x1ff;
1270 actual_temp = temp & 0xff;
1272 actual_temp = (actual_temp * 1000) / 2;
1278 int sumo_get_temp(struct radeon_device *rdev)
1280 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1281 int actual_temp = temp - 49;
1283 return actual_temp * 1000;
1287 * sumo_pm_init_profile - Initialize power profiles callback.
1289 * @rdev: radeon_device pointer
1291 * Initialize the power states used in profile mode
1292 * (sumo, trinity, SI).
1293 * Used for profile mode only.
1295 void sumo_pm_init_profile(struct radeon_device *rdev)
1300 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1301 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1302 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1303 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1306 if (rdev->flags & RADEON_IS_MOBILITY)
1307 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1309 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1311 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1312 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1313 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1314 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1316 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1317 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1318 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1319 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1321 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1322 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1323 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1324 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1326 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1327 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1328 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1329 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1332 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1333 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1334 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1335 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1336 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1337 rdev->pm.power_state[idx].num_clock_modes - 1;
1339 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1340 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1341 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1342 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1343 rdev->pm.power_state[idx].num_clock_modes - 1;
1347 * btc_pm_init_profile - Initialize power profiles callback.
1349 * @rdev: radeon_device pointer
1351 * Initialize the power states used in profile mode
1353 * Used for profile mode only.
1355 void btc_pm_init_profile(struct radeon_device *rdev)
1360 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1361 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1362 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1363 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1364 /* starting with BTC, there is one state that is used for both
1365 * MH and SH. Difference is that we always use the high clock index for
1368 if (rdev->flags & RADEON_IS_MOBILITY)
1369 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1371 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1373 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1374 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1375 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1376 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1378 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1379 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1380 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1381 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1383 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1384 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1385 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1386 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1388 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1389 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1390 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1391 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1393 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1394 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1395 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1396 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1398 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1399 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1400 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1401 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1405 * evergreen_pm_misc - set additional pm hw parameters callback.
1407 * @rdev: radeon_device pointer
1409 * Set non-clock parameters associated with a power state
1410 * (voltage, etc.) (evergreen+).
1412 void evergreen_pm_misc(struct radeon_device *rdev)
1414 int req_ps_idx = rdev->pm.requested_power_state_index;
1415 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1416 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1417 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1419 if (voltage->type == VOLTAGE_SW) {
1420 /* 0xff01 is a flag rather then an actual voltage */
1421 if (voltage->voltage == 0xff01)
1423 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1424 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1425 rdev->pm.current_vddc = voltage->voltage;
1426 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1429 /* starting with BTC, there is one state that is used for both
1430 * MH and SH. Difference is that we always use the high clock index for
1433 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1434 (rdev->family >= CHIP_BARTS) &&
1435 rdev->pm.active_crtc_count &&
1436 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1437 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1438 voltage = &rdev->pm.power_state[req_ps_idx].
1439 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1441 /* 0xff01 is a flag rather then an actual voltage */
1442 if (voltage->vddci == 0xff01)
1444 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1445 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1446 rdev->pm.current_vddci = voltage->vddci;
1447 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1453 * evergreen_pm_prepare - pre-power state change callback.
1455 * @rdev: radeon_device pointer
1457 * Prepare for a power state change (evergreen+).
1459 void evergreen_pm_prepare(struct radeon_device *rdev)
1461 struct drm_device *ddev = rdev->ddev;
1462 struct drm_crtc *crtc;
1463 struct radeon_crtc *radeon_crtc;
1466 /* disable any active CRTCs */
1467 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1468 radeon_crtc = to_radeon_crtc(crtc);
1469 if (radeon_crtc->enabled) {
1470 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1471 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1472 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1478 * evergreen_pm_finish - post-power state change callback.
1480 * @rdev: radeon_device pointer
1482 * Clean up after a power state change (evergreen+).
1484 void evergreen_pm_finish(struct radeon_device *rdev)
1486 struct drm_device *ddev = rdev->ddev;
1487 struct drm_crtc *crtc;
1488 struct radeon_crtc *radeon_crtc;
1491 /* enable any active CRTCs */
1492 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1493 radeon_crtc = to_radeon_crtc(crtc);
1494 if (radeon_crtc->enabled) {
1495 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1496 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1497 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1503 * evergreen_hpd_sense - hpd sense callback.
1505 * @rdev: radeon_device pointer
1506 * @hpd: hpd (hotplug detect) pin
1508 * Checks if a digital monitor is connected (evergreen+).
1509 * Returns true if connected, false if not connected.
1511 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1513 bool connected = false;
1517 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1521 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1525 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1529 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1533 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1537 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1548 * evergreen_hpd_set_polarity - hpd set polarity callback.
1550 * @rdev: radeon_device pointer
1551 * @hpd: hpd (hotplug detect) pin
1553 * Set the polarity of the hpd pin (evergreen+).
1555 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1556 enum radeon_hpd_id hpd)
1559 bool connected = evergreen_hpd_sense(rdev, hpd);
1563 tmp = RREG32(DC_HPD1_INT_CONTROL);
1565 tmp &= ~DC_HPDx_INT_POLARITY;
1567 tmp |= DC_HPDx_INT_POLARITY;
1568 WREG32(DC_HPD1_INT_CONTROL, tmp);
1571 tmp = RREG32(DC_HPD2_INT_CONTROL);
1573 tmp &= ~DC_HPDx_INT_POLARITY;
1575 tmp |= DC_HPDx_INT_POLARITY;
1576 WREG32(DC_HPD2_INT_CONTROL, tmp);
1579 tmp = RREG32(DC_HPD3_INT_CONTROL);
1581 tmp &= ~DC_HPDx_INT_POLARITY;
1583 tmp |= DC_HPDx_INT_POLARITY;
1584 WREG32(DC_HPD3_INT_CONTROL, tmp);
1587 tmp = RREG32(DC_HPD4_INT_CONTROL);
1589 tmp &= ~DC_HPDx_INT_POLARITY;
1591 tmp |= DC_HPDx_INT_POLARITY;
1592 WREG32(DC_HPD4_INT_CONTROL, tmp);
1595 tmp = RREG32(DC_HPD5_INT_CONTROL);
1597 tmp &= ~DC_HPDx_INT_POLARITY;
1599 tmp |= DC_HPDx_INT_POLARITY;
1600 WREG32(DC_HPD5_INT_CONTROL, tmp);
1603 tmp = RREG32(DC_HPD6_INT_CONTROL);
1605 tmp &= ~DC_HPDx_INT_POLARITY;
1607 tmp |= DC_HPDx_INT_POLARITY;
1608 WREG32(DC_HPD6_INT_CONTROL, tmp);
1616 * evergreen_hpd_init - hpd setup callback.
1618 * @rdev: radeon_device pointer
1620 * Setup the hpd pins used by the card (evergreen+).
1621 * Enable the pin, set the polarity, and enable the hpd interrupts.
1623 void evergreen_hpd_init(struct radeon_device *rdev)
1625 struct drm_device *dev = rdev->ddev;
1626 struct drm_connector *connector;
1627 unsigned enabled = 0;
1628 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1629 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1631 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1632 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1634 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1635 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1636 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1637 * aux dp channel on imac and help (but not completely fix)
1638 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1639 * also avoid interrupt storms during dpms.
1643 switch (radeon_connector->hpd.hpd) {
1645 WREG32(DC_HPD1_CONTROL, tmp);
1648 WREG32(DC_HPD2_CONTROL, tmp);
1651 WREG32(DC_HPD3_CONTROL, tmp);
1654 WREG32(DC_HPD4_CONTROL, tmp);
1657 WREG32(DC_HPD5_CONTROL, tmp);
1660 WREG32(DC_HPD6_CONTROL, tmp);
1665 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1666 enabled |= 1 << radeon_connector->hpd.hpd;
1668 radeon_irq_kms_enable_hpd(rdev, enabled);
1672 * evergreen_hpd_fini - hpd tear down callback.
1674 * @rdev: radeon_device pointer
1676 * Tear down the hpd pins used by the card (evergreen+).
1677 * Disable the hpd interrupts.
1679 void evergreen_hpd_fini(struct radeon_device *rdev)
1681 struct drm_device *dev = rdev->ddev;
1682 struct drm_connector *connector;
1683 unsigned disabled = 0;
1685 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1686 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1687 switch (radeon_connector->hpd.hpd) {
1689 WREG32(DC_HPD1_CONTROL, 0);
1692 WREG32(DC_HPD2_CONTROL, 0);
1695 WREG32(DC_HPD3_CONTROL, 0);
1698 WREG32(DC_HPD4_CONTROL, 0);
1701 WREG32(DC_HPD5_CONTROL, 0);
1704 WREG32(DC_HPD6_CONTROL, 0);
1709 disabled |= 1 << radeon_connector->hpd.hpd;
1711 radeon_irq_kms_disable_hpd(rdev, disabled);
1714 /* watermark setup */
1716 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1717 struct radeon_crtc *radeon_crtc,
1718 struct drm_display_mode *mode,
1719 struct drm_display_mode *other_mode)
1721 u32 tmp, buffer_alloc, i;
1722 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1725 * There are 3 line buffers, each one shared by 2 display controllers.
1726 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1727 * the display controllers. The paritioning is done via one of four
1728 * preset allocations specified in bits 2:0:
1729 * first display controller
1730 * 0 - first half of lb (3840 * 2)
1731 * 1 - first 3/4 of lb (5760 * 2)
1732 * 2 - whole lb (7680 * 2), other crtc must be disabled
1733 * 3 - first 1/4 of lb (1920 * 2)
1734 * second display controller
1735 * 4 - second half of lb (3840 * 2)
1736 * 5 - second 3/4 of lb (5760 * 2)
1737 * 6 - whole lb (7680 * 2), other crtc must be disabled
1738 * 7 - last 1/4 of lb (1920 * 2)
1740 /* this can get tricky if we have two large displays on a paired group
1741 * of crtcs. Ideally for multiple large displays we'd assign them to
1742 * non-linked crtcs for maximum line buffer allocation.
1744 if (radeon_crtc->base.enabled && mode) {
1749 tmp = 2; /* whole */
1757 /* second controller of the pair uses second half of the lb */
1758 if (radeon_crtc->crtc_id % 2)
1760 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1762 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1763 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1764 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1765 for (i = 0; i < rdev->usec_timeout; i++) {
1766 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1767 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1773 if (radeon_crtc->base.enabled && mode) {
1778 if (ASIC_IS_DCE5(rdev))
1784 if (ASIC_IS_DCE5(rdev))
1790 if (ASIC_IS_DCE5(rdev))
1796 if (ASIC_IS_DCE5(rdev))
1803 /* controller not enabled, so no lb used */
1807 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1809 u32 tmp = RREG32(MC_SHARED_CHMAP);
1811 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1824 struct evergreen_wm_params {
1825 u32 dram_channels; /* number of dram channels */
1826 u32 yclk; /* bandwidth per dram data pin in kHz */
1827 u32 sclk; /* engine clock in kHz */
1828 u32 disp_clk; /* display clock in kHz */
1829 u32 src_width; /* viewport width */
1830 u32 active_time; /* active display time in ns */
1831 u32 blank_time; /* blank time in ns */
1832 bool interlaced; /* mode is interlaced */
1833 fixed20_12 vsc; /* vertical scale ratio */
1834 u32 num_heads; /* number of active crtcs */
1835 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1836 u32 lb_size; /* line buffer allocated to pipe */
1837 u32 vtaps; /* vertical scaler taps */
1840 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1842 /* Calculate DRAM Bandwidth and the part allocated to display. */
1843 fixed20_12 dram_efficiency; /* 0.7 */
1844 fixed20_12 yclk, dram_channels, bandwidth;
1847 a.full = dfixed_const(1000);
1848 yclk.full = dfixed_const(wm->yclk);
1849 yclk.full = dfixed_div(yclk, a);
1850 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1851 a.full = dfixed_const(10);
1852 dram_efficiency.full = dfixed_const(7);
1853 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1854 bandwidth.full = dfixed_mul(dram_channels, yclk);
1855 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1857 return dfixed_trunc(bandwidth);
1860 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1862 /* Calculate DRAM Bandwidth and the part allocated to display. */
1863 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1864 fixed20_12 yclk, dram_channels, bandwidth;
1867 a.full = dfixed_const(1000);
1868 yclk.full = dfixed_const(wm->yclk);
1869 yclk.full = dfixed_div(yclk, a);
1870 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1871 a.full = dfixed_const(10);
1872 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1873 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1874 bandwidth.full = dfixed_mul(dram_channels, yclk);
1875 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1877 return dfixed_trunc(bandwidth);
1880 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1882 /* Calculate the display Data return Bandwidth */
1883 fixed20_12 return_efficiency; /* 0.8 */
1884 fixed20_12 sclk, bandwidth;
1887 a.full = dfixed_const(1000);
1888 sclk.full = dfixed_const(wm->sclk);
1889 sclk.full = dfixed_div(sclk, a);
1890 a.full = dfixed_const(10);
1891 return_efficiency.full = dfixed_const(8);
1892 return_efficiency.full = dfixed_div(return_efficiency, a);
1893 a.full = dfixed_const(32);
1894 bandwidth.full = dfixed_mul(a, sclk);
1895 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1897 return dfixed_trunc(bandwidth);
1900 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1902 /* Calculate the DMIF Request Bandwidth */
1903 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1904 fixed20_12 disp_clk, bandwidth;
1907 a.full = dfixed_const(1000);
1908 disp_clk.full = dfixed_const(wm->disp_clk);
1909 disp_clk.full = dfixed_div(disp_clk, a);
1910 a.full = dfixed_const(10);
1911 disp_clk_request_efficiency.full = dfixed_const(8);
1912 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1913 a.full = dfixed_const(32);
1914 bandwidth.full = dfixed_mul(a, disp_clk);
1915 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1917 return dfixed_trunc(bandwidth);
1920 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1922 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1923 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1924 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
1925 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
1927 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1930 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
1932 /* Calculate the display mode Average Bandwidth
1933 * DisplayMode should contain the source and destination dimensions,
1937 fixed20_12 line_time;
1938 fixed20_12 src_width;
1939 fixed20_12 bandwidth;
1942 a.full = dfixed_const(1000);
1943 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1944 line_time.full = dfixed_div(line_time, a);
1945 bpp.full = dfixed_const(wm->bytes_per_pixel);
1946 src_width.full = dfixed_const(wm->src_width);
1947 bandwidth.full = dfixed_mul(src_width, bpp);
1948 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1949 bandwidth.full = dfixed_div(bandwidth, line_time);
1951 return dfixed_trunc(bandwidth);
1954 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
1956 /* First calcualte the latency in ns */
1957 u32 mc_latency = 2000; /* 2000 ns. */
1958 u32 available_bandwidth = evergreen_available_bandwidth(wm);
1959 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1960 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1961 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1962 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1963 (wm->num_heads * cursor_line_pair_return_time);
1964 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1965 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1968 if (wm->num_heads == 0)
1971 a.full = dfixed_const(2);
1972 b.full = dfixed_const(1);
1973 if ((wm->vsc.full > a.full) ||
1974 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1976 ((wm->vsc.full >= a.full) && wm->interlaced))
1977 max_src_lines_per_dst_line = 4;
1979 max_src_lines_per_dst_line = 2;
1981 a.full = dfixed_const(available_bandwidth);
1982 b.full = dfixed_const(wm->num_heads);
1983 a.full = dfixed_div(a, b);
1985 b.full = dfixed_const(1000);
1986 c.full = dfixed_const(wm->disp_clk);
1987 b.full = dfixed_div(c, b);
1988 c.full = dfixed_const(wm->bytes_per_pixel);
1989 b.full = dfixed_mul(b, c);
1991 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
1993 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
1994 b.full = dfixed_const(1000);
1995 c.full = dfixed_const(lb_fill_bw);
1996 b.full = dfixed_div(c, b);
1997 a.full = dfixed_div(a, b);
1998 line_fill_time = dfixed_trunc(a);
2000 if (line_fill_time < wm->active_time)
2003 return latency + (line_fill_time - wm->active_time);
2007 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2009 if (evergreen_average_bandwidth(wm) <=
2010 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2016 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2018 if (evergreen_average_bandwidth(wm) <=
2019 (evergreen_available_bandwidth(wm) / wm->num_heads))
2025 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2027 u32 lb_partitions = wm->lb_size / wm->src_width;
2028 u32 line_time = wm->active_time + wm->blank_time;
2029 u32 latency_tolerant_lines;
2033 a.full = dfixed_const(1);
2034 if (wm->vsc.full > a.full)
2035 latency_tolerant_lines = 1;
2037 if (lb_partitions <= (wm->vtaps + 1))
2038 latency_tolerant_lines = 1;
2040 latency_tolerant_lines = 2;
2043 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2045 if (evergreen_latency_watermark(wm) <= latency_hiding)
2051 static void evergreen_program_watermarks(struct radeon_device *rdev,
2052 struct radeon_crtc *radeon_crtc,
2053 u32 lb_size, u32 num_heads)
2055 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2056 struct evergreen_wm_params wm;
2059 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2060 u32 priority_a_mark = 0, priority_b_mark = 0;
2061 u32 priority_a_cnt = PRIORITY_OFF;
2062 u32 priority_b_cnt = PRIORITY_OFF;
2063 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2064 u32 tmp, arb_control3;
2067 if (radeon_crtc->base.enabled && num_heads && mode) {
2068 pixel_period = 1000000 / (u32)mode->clock;
2069 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2073 wm.yclk = rdev->pm.current_mclk * 10;
2074 wm.sclk = rdev->pm.current_sclk * 10;
2075 wm.disp_clk = mode->clock;
2076 wm.src_width = mode->crtc_hdisplay;
2077 wm.active_time = mode->crtc_hdisplay * pixel_period;
2078 wm.blank_time = line_time - wm.active_time;
2079 wm.interlaced = false;
2080 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2081 wm.interlaced = true;
2082 wm.vsc = radeon_crtc->vsc;
2084 if (radeon_crtc->rmx_type != RMX_OFF)
2086 wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
2087 wm.lb_size = lb_size;
2088 wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
2089 wm.num_heads = num_heads;
2091 /* set for high clocks */
2092 latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
2093 /* set for low clocks */
2094 /* wm.yclk = low clk; wm.sclk = low clk */
2095 latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
2097 /* possibly force display priority to high */
2098 /* should really do this at mode validation time... */
2099 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
2100 !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
2101 !evergreen_check_latency_hiding(&wm) ||
2102 (rdev->disp_priority == 2)) {
2103 DRM_DEBUG_KMS("force priority to high\n");
2104 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2105 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2108 a.full = dfixed_const(1000);
2109 b.full = dfixed_const(mode->clock);
2110 b.full = dfixed_div(b, a);
2111 c.full = dfixed_const(latency_watermark_a);
2112 c.full = dfixed_mul(c, b);
2113 c.full = dfixed_mul(c, radeon_crtc->hsc);
2114 c.full = dfixed_div(c, a);
2115 a.full = dfixed_const(16);
2116 c.full = dfixed_div(c, a);
2117 priority_a_mark = dfixed_trunc(c);
2118 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2120 a.full = dfixed_const(1000);
2121 b.full = dfixed_const(mode->clock);
2122 b.full = dfixed_div(b, a);
2123 c.full = dfixed_const(latency_watermark_b);
2124 c.full = dfixed_mul(c, b);
2125 c.full = dfixed_mul(c, radeon_crtc->hsc);
2126 c.full = dfixed_div(c, a);
2127 a.full = dfixed_const(16);
2128 c.full = dfixed_div(c, a);
2129 priority_b_mark = dfixed_trunc(c);
2130 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2134 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2136 tmp &= ~LATENCY_WATERMARK_MASK(3);
2137 tmp |= LATENCY_WATERMARK_MASK(1);
2138 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2139 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2140 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2141 LATENCY_HIGH_WATERMARK(line_time)));
2143 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2144 tmp &= ~LATENCY_WATERMARK_MASK(3);
2145 tmp |= LATENCY_WATERMARK_MASK(2);
2146 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2147 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2148 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2149 LATENCY_HIGH_WATERMARK(line_time)));
2150 /* restore original selection */
2151 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2153 /* write the priority marks */
2154 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2155 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2160 * evergreen_bandwidth_update - update display watermarks callback.
2162 * @rdev: radeon_device pointer
2164 * Update the display watermarks based on the requested mode(s)
2167 void evergreen_bandwidth_update(struct radeon_device *rdev)
2169 struct drm_display_mode *mode0 = NULL;
2170 struct drm_display_mode *mode1 = NULL;
2171 u32 num_heads = 0, lb_size;
2174 radeon_update_display_priority(rdev);
2176 for (i = 0; i < rdev->num_crtc; i++) {
2177 if (rdev->mode_info.crtcs[i]->base.enabled)
2180 for (i = 0; i < rdev->num_crtc; i += 2) {
2181 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2182 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2183 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2184 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2185 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2186 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2191 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2193 * @rdev: radeon_device pointer
2195 * Wait for the MC (memory controller) to be idle.
2197 * Returns 0 if the MC is idle, -1 if not.
2199 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2204 for (i = 0; i < rdev->usec_timeout; i++) {
2205 /* read MC_STATUS */
2206 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2217 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2222 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2224 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2225 for (i = 0; i < rdev->usec_timeout; i++) {
2226 /* read MC_STATUS */
2227 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2228 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2230 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2240 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2245 if (rdev->gart.robj == NULL) {
2246 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2249 r = radeon_gart_table_vram_pin(rdev);
2252 radeon_gart_restore(rdev);
2253 /* Setup L2 cache */
2254 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2255 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2256 EFFECTIVE_L2_QUEUE_SIZE(7));
2257 WREG32(VM_L2_CNTL2, 0);
2258 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2259 /* Setup TLB control */
2260 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2261 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2262 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2263 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2264 if (rdev->flags & RADEON_IS_IGP) {
2265 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2266 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2267 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2269 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2270 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2271 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2272 if ((rdev->family == CHIP_JUNIPER) ||
2273 (rdev->family == CHIP_CYPRESS) ||
2274 (rdev->family == CHIP_HEMLOCK) ||
2275 (rdev->family == CHIP_BARTS))
2276 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2278 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2279 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2280 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2281 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2282 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2283 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2284 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2285 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2286 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2287 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2288 (u32)(rdev->dummy_page.addr >> 12));
2289 WREG32(VM_CONTEXT1_CNTL, 0);
2291 evergreen_pcie_gart_tlb_flush(rdev);
2292 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2293 (unsigned)(rdev->mc.gtt_size >> 20),
2294 (unsigned long long)rdev->gart.table_addr);
2295 rdev->gart.ready = true;
2299 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2303 /* Disable all tables */
2304 WREG32(VM_CONTEXT0_CNTL, 0);
2305 WREG32(VM_CONTEXT1_CNTL, 0);
2307 /* Setup L2 cache */
2308 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2309 EFFECTIVE_L2_QUEUE_SIZE(7));
2310 WREG32(VM_L2_CNTL2, 0);
2311 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2312 /* Setup TLB control */
2313 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2314 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2315 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2316 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2317 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2318 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2319 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2320 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2321 radeon_gart_table_vram_unpin(rdev);
2324 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2326 evergreen_pcie_gart_disable(rdev);
2327 radeon_gart_table_vram_free(rdev);
2328 radeon_gart_fini(rdev);
2332 static void evergreen_agp_enable(struct radeon_device *rdev)
2336 /* Setup L2 cache */
2337 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2338 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2339 EFFECTIVE_L2_QUEUE_SIZE(7));
2340 WREG32(VM_L2_CNTL2, 0);
2341 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2342 /* Setup TLB control */
2343 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2344 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2345 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2346 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2347 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2348 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2349 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2350 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2351 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2352 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2353 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2354 WREG32(VM_CONTEXT0_CNTL, 0);
2355 WREG32(VM_CONTEXT1_CNTL, 0);
2358 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2360 u32 crtc_enabled, tmp, frame_count, blackout;
2363 if (!ASIC_IS_NODCE(rdev)) {
2364 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2365 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2367 /* disable VGA render */
2368 WREG32(VGA_RENDER_CONTROL, 0);
2370 /* blank the display controllers */
2371 for (i = 0; i < rdev->num_crtc; i++) {
2372 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2374 save->crtc_enabled[i] = true;
2375 if (ASIC_IS_DCE6(rdev)) {
2376 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2377 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2378 radeon_wait_for_vblank(rdev, i);
2379 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2380 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2381 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2382 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2385 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2386 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2387 radeon_wait_for_vblank(rdev, i);
2388 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2389 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2390 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2391 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2394 /* wait for the next frame */
2395 frame_count = radeon_get_vblank_counter(rdev, i);
2396 for (j = 0; j < rdev->usec_timeout; j++) {
2397 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2402 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2403 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2404 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2405 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2406 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2407 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2408 save->crtc_enabled[i] = false;
2411 save->crtc_enabled[i] = false;
2415 radeon_mc_wait_for_idle(rdev);
2417 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2418 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2419 /* Block CPU access */
2420 WREG32(BIF_FB_EN, 0);
2421 /* blackout the MC */
2422 blackout &= ~BLACKOUT_MODE_MASK;
2423 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2425 /* wait for the MC to settle */
2428 /* lock double buffered regs */
2429 for (i = 0; i < rdev->num_crtc; i++) {
2430 if (save->crtc_enabled[i]) {
2431 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2432 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2433 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2434 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2436 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2439 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2445 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2447 u32 tmp, frame_count;
2450 /* update crtc base addresses */
2451 for (i = 0; i < rdev->num_crtc; i++) {
2452 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2453 upper_32_bits(rdev->mc.vram_start));
2454 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2455 upper_32_bits(rdev->mc.vram_start));
2456 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2457 (u32)rdev->mc.vram_start);
2458 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2459 (u32)rdev->mc.vram_start);
2462 if (!ASIC_IS_NODCE(rdev)) {
2463 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2464 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2467 /* unlock regs and wait for update */
2468 for (i = 0; i < rdev->num_crtc; i++) {
2469 if (save->crtc_enabled[i]) {
2470 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2471 if ((tmp & 0x3) != 0) {
2473 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2475 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2476 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2477 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2478 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2480 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2483 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2485 for (j = 0; j < rdev->usec_timeout; j++) {
2486 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2487 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2494 /* unblackout the MC */
2495 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2496 tmp &= ~BLACKOUT_MODE_MASK;
2497 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2498 /* allow CPU access */
2499 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2501 for (i = 0; i < rdev->num_crtc; i++) {
2502 if (save->crtc_enabled[i]) {
2503 if (ASIC_IS_DCE6(rdev)) {
2504 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2505 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2506 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2507 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2508 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2510 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2511 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2512 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2513 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2514 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2516 /* wait for the next frame */
2517 frame_count = radeon_get_vblank_counter(rdev, i);
2518 for (j = 0; j < rdev->usec_timeout; j++) {
2519 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2525 if (!ASIC_IS_NODCE(rdev)) {
2526 /* Unlock vga access */
2527 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2529 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2533 void evergreen_mc_program(struct radeon_device *rdev)
2535 struct evergreen_mc_save save;
2539 /* Initialize HDP */
2540 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2541 WREG32((0x2c14 + j), 0x00000000);
2542 WREG32((0x2c18 + j), 0x00000000);
2543 WREG32((0x2c1c + j), 0x00000000);
2544 WREG32((0x2c20 + j), 0x00000000);
2545 WREG32((0x2c24 + j), 0x00000000);
2547 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2549 evergreen_mc_stop(rdev, &save);
2550 if (evergreen_mc_wait_for_idle(rdev)) {
2551 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2553 /* Lockout access through VGA aperture*/
2554 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2555 /* Update configuration */
2556 if (rdev->flags & RADEON_IS_AGP) {
2557 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2558 /* VRAM before AGP */
2559 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2560 rdev->mc.vram_start >> 12);
2561 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2562 rdev->mc.gtt_end >> 12);
2564 /* VRAM after AGP */
2565 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2566 rdev->mc.gtt_start >> 12);
2567 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2568 rdev->mc.vram_end >> 12);
2571 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2572 rdev->mc.vram_start >> 12);
2573 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2574 rdev->mc.vram_end >> 12);
2576 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2577 /* llano/ontario only */
2578 if ((rdev->family == CHIP_PALM) ||
2579 (rdev->family == CHIP_SUMO) ||
2580 (rdev->family == CHIP_SUMO2)) {
2581 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2582 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2583 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2584 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2586 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2587 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2588 WREG32(MC_VM_FB_LOCATION, tmp);
2589 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2590 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2591 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2592 if (rdev->flags & RADEON_IS_AGP) {
2593 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2594 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2595 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2597 WREG32(MC_VM_AGP_BASE, 0);
2598 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2599 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2601 if (evergreen_mc_wait_for_idle(rdev)) {
2602 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2604 evergreen_mc_resume(rdev, &save);
2605 /* we need to own VRAM, so turn off the VGA renderer here
2606 * to stop it overwriting our objects */
2607 rv515_vga_render_disable(rdev);
2613 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2615 struct radeon_ring *ring = &rdev->ring[ib->ring];
2618 /* set to DX10/11 mode */
2619 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2620 radeon_ring_write(ring, 1);
2622 if (ring->rptr_save_reg) {
2623 next_rptr = ring->wptr + 3 + 4;
2624 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2625 radeon_ring_write(ring, ((ring->rptr_save_reg -
2626 PACKET3_SET_CONFIG_REG_START) >> 2));
2627 radeon_ring_write(ring, next_rptr);
2628 } else if (rdev->wb.enabled) {
2629 next_rptr = ring->wptr + 5 + 4;
2630 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2631 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2632 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2633 radeon_ring_write(ring, next_rptr);
2634 radeon_ring_write(ring, 0);
2637 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2638 radeon_ring_write(ring,
2642 (ib->gpu_addr & 0xFFFFFFFC));
2643 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2644 radeon_ring_write(ring, ib->length_dw);
2648 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2650 const __be32 *fw_data;
2653 if (!rdev->me_fw || !rdev->pfp_fw)
2661 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2663 fw_data = (const __be32 *)rdev->pfp_fw->data;
2664 WREG32(CP_PFP_UCODE_ADDR, 0);
2665 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2666 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2667 WREG32(CP_PFP_UCODE_ADDR, 0);
2669 fw_data = (const __be32 *)rdev->me_fw->data;
2670 WREG32(CP_ME_RAM_WADDR, 0);
2671 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2672 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2674 WREG32(CP_PFP_UCODE_ADDR, 0);
2675 WREG32(CP_ME_RAM_WADDR, 0);
2676 WREG32(CP_ME_RAM_RADDR, 0);
2680 static int evergreen_cp_start(struct radeon_device *rdev)
2682 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2686 r = radeon_ring_lock(rdev, ring, 7);
2688 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2691 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2692 radeon_ring_write(ring, 0x1);
2693 radeon_ring_write(ring, 0x0);
2694 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2695 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2696 radeon_ring_write(ring, 0);
2697 radeon_ring_write(ring, 0);
2698 radeon_ring_unlock_commit(rdev, ring);
2701 WREG32(CP_ME_CNTL, cp_me);
2703 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2705 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2709 /* setup clear context state */
2710 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2711 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2713 for (i = 0; i < evergreen_default_size; i++)
2714 radeon_ring_write(ring, evergreen_default_state[i]);
2716 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2717 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2719 /* set clear context state */
2720 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2721 radeon_ring_write(ring, 0);
2723 /* SQ_VTX_BASE_VTX_LOC */
2724 radeon_ring_write(ring, 0xc0026f00);
2725 radeon_ring_write(ring, 0x00000000);
2726 radeon_ring_write(ring, 0x00000000);
2727 radeon_ring_write(ring, 0x00000000);
2730 radeon_ring_write(ring, 0xc0036f00);
2731 radeon_ring_write(ring, 0x00000bc4);
2732 radeon_ring_write(ring, 0xffffffff);
2733 radeon_ring_write(ring, 0xffffffff);
2734 radeon_ring_write(ring, 0xffffffff);
2736 radeon_ring_write(ring, 0xc0026900);
2737 radeon_ring_write(ring, 0x00000316);
2738 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2739 radeon_ring_write(ring, 0x00000010); /* */
2741 radeon_ring_unlock_commit(rdev, ring);
2746 static int evergreen_cp_resume(struct radeon_device *rdev)
2748 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2753 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2754 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2760 RREG32(GRBM_SOFT_RESET);
2762 WREG32(GRBM_SOFT_RESET, 0);
2763 RREG32(GRBM_SOFT_RESET);
2765 /* Set ring buffer size */
2766 rb_bufsz = drm_order(ring->ring_size / 8);
2767 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2769 tmp |= BUF_SWAP_32BIT;
2771 WREG32(CP_RB_CNTL, tmp);
2772 WREG32(CP_SEM_WAIT_TIMER, 0x0);
2773 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2775 /* Set the write pointer delay */
2776 WREG32(CP_RB_WPTR_DELAY, 0);
2778 /* Initialize the ring buffer's read and write pointers */
2779 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2780 WREG32(CP_RB_RPTR_WR, 0);
2782 WREG32(CP_RB_WPTR, ring->wptr);
2784 /* set the wb address whether it's enabled or not */
2785 WREG32(CP_RB_RPTR_ADDR,
2786 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2787 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2788 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2790 if (rdev->wb.enabled)
2791 WREG32(SCRATCH_UMSK, 0xff);
2793 tmp |= RB_NO_UPDATE;
2794 WREG32(SCRATCH_UMSK, 0);
2798 WREG32(CP_RB_CNTL, tmp);
2800 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2801 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2803 ring->rptr = RREG32(CP_RB_RPTR);
2805 evergreen_cp_start(rdev);
2807 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2809 ring->ready = false;
2818 static void evergreen_gpu_init(struct radeon_device *rdev)
2821 u32 mc_shared_chmap, mc_arb_ramcfg;
2825 u32 sq_lds_resource_mgmt;
2826 u32 sq_gpr_resource_mgmt_1;
2827 u32 sq_gpr_resource_mgmt_2;
2828 u32 sq_gpr_resource_mgmt_3;
2829 u32 sq_thread_resource_mgmt;
2830 u32 sq_thread_resource_mgmt_2;
2831 u32 sq_stack_resource_mgmt_1;
2832 u32 sq_stack_resource_mgmt_2;
2833 u32 sq_stack_resource_mgmt_3;
2834 u32 vgt_cache_invalidation;
2835 u32 hdp_host_path_cntl, tmp;
2836 u32 disabled_rb_mask;
2837 int i, j, num_shader_engines, ps_thread_count;
2839 switch (rdev->family) {
2842 rdev->config.evergreen.num_ses = 2;
2843 rdev->config.evergreen.max_pipes = 4;
2844 rdev->config.evergreen.max_tile_pipes = 8;
2845 rdev->config.evergreen.max_simds = 10;
2846 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2847 rdev->config.evergreen.max_gprs = 256;
2848 rdev->config.evergreen.max_threads = 248;
2849 rdev->config.evergreen.max_gs_threads = 32;
2850 rdev->config.evergreen.max_stack_entries = 512;
2851 rdev->config.evergreen.sx_num_of_sets = 4;
2852 rdev->config.evergreen.sx_max_export_size = 256;
2853 rdev->config.evergreen.sx_max_export_pos_size = 64;
2854 rdev->config.evergreen.sx_max_export_smx_size = 192;
2855 rdev->config.evergreen.max_hw_contexts = 8;
2856 rdev->config.evergreen.sq_num_cf_insts = 2;
2858 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2859 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2860 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2861 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
2864 rdev->config.evergreen.num_ses = 1;
2865 rdev->config.evergreen.max_pipes = 4;
2866 rdev->config.evergreen.max_tile_pipes = 4;
2867 rdev->config.evergreen.max_simds = 10;
2868 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2869 rdev->config.evergreen.max_gprs = 256;
2870 rdev->config.evergreen.max_threads = 248;
2871 rdev->config.evergreen.max_gs_threads = 32;
2872 rdev->config.evergreen.max_stack_entries = 512;
2873 rdev->config.evergreen.sx_num_of_sets = 4;
2874 rdev->config.evergreen.sx_max_export_size = 256;
2875 rdev->config.evergreen.sx_max_export_pos_size = 64;
2876 rdev->config.evergreen.sx_max_export_smx_size = 192;
2877 rdev->config.evergreen.max_hw_contexts = 8;
2878 rdev->config.evergreen.sq_num_cf_insts = 2;
2880 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2881 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2882 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2883 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
2886 rdev->config.evergreen.num_ses = 1;
2887 rdev->config.evergreen.max_pipes = 4;
2888 rdev->config.evergreen.max_tile_pipes = 4;
2889 rdev->config.evergreen.max_simds = 5;
2890 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2891 rdev->config.evergreen.max_gprs = 256;
2892 rdev->config.evergreen.max_threads = 248;
2893 rdev->config.evergreen.max_gs_threads = 32;
2894 rdev->config.evergreen.max_stack_entries = 256;
2895 rdev->config.evergreen.sx_num_of_sets = 4;
2896 rdev->config.evergreen.sx_max_export_size = 256;
2897 rdev->config.evergreen.sx_max_export_pos_size = 64;
2898 rdev->config.evergreen.sx_max_export_smx_size = 192;
2899 rdev->config.evergreen.max_hw_contexts = 8;
2900 rdev->config.evergreen.sq_num_cf_insts = 2;
2902 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2903 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2904 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2905 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
2909 rdev->config.evergreen.num_ses = 1;
2910 rdev->config.evergreen.max_pipes = 2;
2911 rdev->config.evergreen.max_tile_pipes = 2;
2912 rdev->config.evergreen.max_simds = 2;
2913 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2914 rdev->config.evergreen.max_gprs = 256;
2915 rdev->config.evergreen.max_threads = 192;
2916 rdev->config.evergreen.max_gs_threads = 16;
2917 rdev->config.evergreen.max_stack_entries = 256;
2918 rdev->config.evergreen.sx_num_of_sets = 4;
2919 rdev->config.evergreen.sx_max_export_size = 128;
2920 rdev->config.evergreen.sx_max_export_pos_size = 32;
2921 rdev->config.evergreen.sx_max_export_smx_size = 96;
2922 rdev->config.evergreen.max_hw_contexts = 4;
2923 rdev->config.evergreen.sq_num_cf_insts = 1;
2925 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2926 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2927 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2928 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2931 rdev->config.evergreen.num_ses = 1;
2932 rdev->config.evergreen.max_pipes = 2;
2933 rdev->config.evergreen.max_tile_pipes = 2;
2934 rdev->config.evergreen.max_simds = 2;
2935 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2936 rdev->config.evergreen.max_gprs = 256;
2937 rdev->config.evergreen.max_threads = 192;
2938 rdev->config.evergreen.max_gs_threads = 16;
2939 rdev->config.evergreen.max_stack_entries = 256;
2940 rdev->config.evergreen.sx_num_of_sets = 4;
2941 rdev->config.evergreen.sx_max_export_size = 128;
2942 rdev->config.evergreen.sx_max_export_pos_size = 32;
2943 rdev->config.evergreen.sx_max_export_smx_size = 96;
2944 rdev->config.evergreen.max_hw_contexts = 4;
2945 rdev->config.evergreen.sq_num_cf_insts = 1;
2947 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2948 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2949 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2950 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2953 rdev->config.evergreen.num_ses = 1;
2954 rdev->config.evergreen.max_pipes = 4;
2955 rdev->config.evergreen.max_tile_pipes = 4;
2956 if (rdev->pdev->device == 0x9648)
2957 rdev->config.evergreen.max_simds = 3;
2958 else if ((rdev->pdev->device == 0x9647) ||
2959 (rdev->pdev->device == 0x964a))
2960 rdev->config.evergreen.max_simds = 4;
2962 rdev->config.evergreen.max_simds = 5;
2963 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2964 rdev->config.evergreen.max_gprs = 256;
2965 rdev->config.evergreen.max_threads = 248;
2966 rdev->config.evergreen.max_gs_threads = 32;
2967 rdev->config.evergreen.max_stack_entries = 256;
2968 rdev->config.evergreen.sx_num_of_sets = 4;
2969 rdev->config.evergreen.sx_max_export_size = 256;
2970 rdev->config.evergreen.sx_max_export_pos_size = 64;
2971 rdev->config.evergreen.sx_max_export_smx_size = 192;
2972 rdev->config.evergreen.max_hw_contexts = 8;
2973 rdev->config.evergreen.sq_num_cf_insts = 2;
2975 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2976 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2977 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2978 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
2981 rdev->config.evergreen.num_ses = 1;
2982 rdev->config.evergreen.max_pipes = 4;
2983 rdev->config.evergreen.max_tile_pipes = 4;
2984 rdev->config.evergreen.max_simds = 2;
2985 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2986 rdev->config.evergreen.max_gprs = 256;
2987 rdev->config.evergreen.max_threads = 248;
2988 rdev->config.evergreen.max_gs_threads = 32;
2989 rdev->config.evergreen.max_stack_entries = 512;
2990 rdev->config.evergreen.sx_num_of_sets = 4;
2991 rdev->config.evergreen.sx_max_export_size = 256;
2992 rdev->config.evergreen.sx_max_export_pos_size = 64;
2993 rdev->config.evergreen.sx_max_export_smx_size = 192;
2994 rdev->config.evergreen.max_hw_contexts = 4;
2995 rdev->config.evergreen.sq_num_cf_insts = 2;
2997 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2998 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2999 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3000 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3003 rdev->config.evergreen.num_ses = 2;
3004 rdev->config.evergreen.max_pipes = 4;
3005 rdev->config.evergreen.max_tile_pipes = 8;
3006 rdev->config.evergreen.max_simds = 7;
3007 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3008 rdev->config.evergreen.max_gprs = 256;
3009 rdev->config.evergreen.max_threads = 248;
3010 rdev->config.evergreen.max_gs_threads = 32;
3011 rdev->config.evergreen.max_stack_entries = 512;
3012 rdev->config.evergreen.sx_num_of_sets = 4;
3013 rdev->config.evergreen.sx_max_export_size = 256;
3014 rdev->config.evergreen.sx_max_export_pos_size = 64;
3015 rdev->config.evergreen.sx_max_export_smx_size = 192;
3016 rdev->config.evergreen.max_hw_contexts = 8;
3017 rdev->config.evergreen.sq_num_cf_insts = 2;
3019 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3020 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3021 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3022 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3025 rdev->config.evergreen.num_ses = 1;
3026 rdev->config.evergreen.max_pipes = 4;
3027 rdev->config.evergreen.max_tile_pipes = 4;
3028 rdev->config.evergreen.max_simds = 6;
3029 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3030 rdev->config.evergreen.max_gprs = 256;
3031 rdev->config.evergreen.max_threads = 248;
3032 rdev->config.evergreen.max_gs_threads = 32;
3033 rdev->config.evergreen.max_stack_entries = 256;
3034 rdev->config.evergreen.sx_num_of_sets = 4;
3035 rdev->config.evergreen.sx_max_export_size = 256;
3036 rdev->config.evergreen.sx_max_export_pos_size = 64;
3037 rdev->config.evergreen.sx_max_export_smx_size = 192;
3038 rdev->config.evergreen.max_hw_contexts = 8;
3039 rdev->config.evergreen.sq_num_cf_insts = 2;
3041 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3042 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3043 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3044 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3047 rdev->config.evergreen.num_ses = 1;
3048 rdev->config.evergreen.max_pipes = 2;
3049 rdev->config.evergreen.max_tile_pipes = 2;
3050 rdev->config.evergreen.max_simds = 2;
3051 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3052 rdev->config.evergreen.max_gprs = 256;
3053 rdev->config.evergreen.max_threads = 192;
3054 rdev->config.evergreen.max_gs_threads = 16;
3055 rdev->config.evergreen.max_stack_entries = 256;
3056 rdev->config.evergreen.sx_num_of_sets = 4;
3057 rdev->config.evergreen.sx_max_export_size = 128;
3058 rdev->config.evergreen.sx_max_export_pos_size = 32;
3059 rdev->config.evergreen.sx_max_export_smx_size = 96;
3060 rdev->config.evergreen.max_hw_contexts = 4;
3061 rdev->config.evergreen.sq_num_cf_insts = 1;
3063 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3064 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3065 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3066 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3070 /* Initialize HDP */
3071 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3072 WREG32((0x2c14 + j), 0x00000000);
3073 WREG32((0x2c18 + j), 0x00000000);
3074 WREG32((0x2c1c + j), 0x00000000);
3075 WREG32((0x2c20 + j), 0x00000000);
3076 WREG32((0x2c24 + j), 0x00000000);
3079 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3081 evergreen_fix_pci_max_read_req_size(rdev);
3083 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3084 if ((rdev->family == CHIP_PALM) ||
3085 (rdev->family == CHIP_SUMO) ||
3086 (rdev->family == CHIP_SUMO2))
3087 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3089 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3091 /* setup tiling info dword. gb_addr_config is not adequate since it does
3092 * not have bank info, so create a custom tiling dword.
3093 * bits 3:0 num_pipes
3094 * bits 7:4 num_banks
3095 * bits 11:8 group_size
3096 * bits 15:12 row_size
3098 rdev->config.evergreen.tile_config = 0;
3099 switch (rdev->config.evergreen.max_tile_pipes) {
3102 rdev->config.evergreen.tile_config |= (0 << 0);
3105 rdev->config.evergreen.tile_config |= (1 << 0);
3108 rdev->config.evergreen.tile_config |= (2 << 0);
3111 rdev->config.evergreen.tile_config |= (3 << 0);
3114 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3115 if (rdev->flags & RADEON_IS_IGP)
3116 rdev->config.evergreen.tile_config |= 1 << 4;
3118 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3119 case 0: /* four banks */
3120 rdev->config.evergreen.tile_config |= 0 << 4;
3122 case 1: /* eight banks */
3123 rdev->config.evergreen.tile_config |= 1 << 4;
3125 case 2: /* sixteen banks */
3127 rdev->config.evergreen.tile_config |= 2 << 4;
3131 rdev->config.evergreen.tile_config |= 0 << 8;
3132 rdev->config.evergreen.tile_config |=
3133 ((gb_addr_config & 0x30000000) >> 28) << 12;
3135 num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3137 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3141 WREG32(RCU_IND_INDEX, 0x204);
3142 efuse_straps_4 = RREG32(RCU_IND_DATA);
3143 WREG32(RCU_IND_INDEX, 0x203);
3144 efuse_straps_3 = RREG32(RCU_IND_DATA);
3145 tmp = (((efuse_straps_4 & 0xf) << 4) |
3146 ((efuse_straps_3 & 0xf0000000) >> 28));
3149 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3150 u32 rb_disable_bitmap;
3152 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3153 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3154 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3156 tmp |= rb_disable_bitmap;
3159 /* enabled rb are just the one not disabled :) */
3160 disabled_rb_mask = tmp;
3162 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3164 /* if all the backends are disabled, fix it up here */
3165 if ((disabled_rb_mask & tmp) == tmp) {
3166 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3167 disabled_rb_mask &= ~(1 << i);
3170 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3171 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3173 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3174 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3175 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3176 WREG32(DMA_TILING_CONFIG, gb_addr_config);
3177 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3178 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3179 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3181 if ((rdev->config.evergreen.max_backends == 1) &&
3182 (rdev->flags & RADEON_IS_IGP)) {
3183 if ((disabled_rb_mask & 3) == 1) {
3184 /* RB0 disabled, RB1 enabled */
3187 /* RB1 disabled, RB0 enabled */
3191 tmp = gb_addr_config & NUM_PIPES_MASK;
3192 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3193 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3195 WREG32(GB_BACKEND_MAP, tmp);
3197 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3198 WREG32(CGTS_TCC_DISABLE, 0);
3199 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3200 WREG32(CGTS_USER_TCC_DISABLE, 0);
3202 /* set HW defaults for 3D engine */
3203 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3204 ROQ_IB2_START(0x2b)));
3206 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3208 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3213 sx_debug_1 = RREG32(SX_DEBUG_1);
3214 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3215 WREG32(SX_DEBUG_1, sx_debug_1);
3218 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3219 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3220 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3221 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3223 if (rdev->family <= CHIP_SUMO2)
3224 WREG32(SMX_SAR_CTL0, 0x00010000);
3226 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3227 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3228 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3230 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3231 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3232 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3234 WREG32(VGT_NUM_INSTANCES, 1);
3235 WREG32(SPI_CONFIG_CNTL, 0);
3236 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3237 WREG32(CP_PERFMON_CNTL, 0);
3239 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3240 FETCH_FIFO_HIWATER(0x4) |
3241 DONE_FIFO_HIWATER(0xe0) |
3242 ALU_UPDATE_FIFO_HIWATER(0x8)));
3244 sq_config = RREG32(SQ_CONFIG);
3245 sq_config &= ~(PS_PRIO(3) |
3249 sq_config |= (VC_ENABLE |
3256 switch (rdev->family) {
3262 /* no vertex cache */
3263 sq_config &= ~VC_ENABLE;
3269 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3271 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3272 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3273 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3274 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3275 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3276 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3277 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3279 switch (rdev->family) {
3284 ps_thread_count = 96;
3287 ps_thread_count = 128;
3291 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3292 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3293 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3294 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3295 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3296 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3298 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3299 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3300 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3301 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3302 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3303 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3305 WREG32(SQ_CONFIG, sq_config);
3306 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3307 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3308 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3309 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3310 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3311 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3312 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3313 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3314 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3315 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3317 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3318 FORCE_EOV_MAX_REZ_CNT(255)));
3320 switch (rdev->family) {
3326 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3329 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3332 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3333 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3335 WREG32(VGT_GS_VERTEX_REUSE, 16);
3336 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3337 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3339 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3340 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3342 WREG32(CB_PERF_CTR0_SEL_0, 0);
3343 WREG32(CB_PERF_CTR0_SEL_1, 0);
3344 WREG32(CB_PERF_CTR1_SEL_0, 0);
3345 WREG32(CB_PERF_CTR1_SEL_1, 0);
3346 WREG32(CB_PERF_CTR2_SEL_0, 0);
3347 WREG32(CB_PERF_CTR2_SEL_1, 0);
3348 WREG32(CB_PERF_CTR3_SEL_0, 0);
3349 WREG32(CB_PERF_CTR3_SEL_1, 0);
3351 /* clear render buffer base addresses */
3352 WREG32(CB_COLOR0_BASE, 0);
3353 WREG32(CB_COLOR1_BASE, 0);
3354 WREG32(CB_COLOR2_BASE, 0);
3355 WREG32(CB_COLOR3_BASE, 0);
3356 WREG32(CB_COLOR4_BASE, 0);
3357 WREG32(CB_COLOR5_BASE, 0);
3358 WREG32(CB_COLOR6_BASE, 0);
3359 WREG32(CB_COLOR7_BASE, 0);
3360 WREG32(CB_COLOR8_BASE, 0);
3361 WREG32(CB_COLOR9_BASE, 0);
3362 WREG32(CB_COLOR10_BASE, 0);
3363 WREG32(CB_COLOR11_BASE, 0);
3365 /* set the shader const cache sizes to 0 */
3366 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3368 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3371 tmp = RREG32(HDP_MISC_CNTL);
3372 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3373 WREG32(HDP_MISC_CNTL, tmp);
3375 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3376 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3378 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3384 int evergreen_mc_init(struct radeon_device *rdev)
3387 int chansize, numchan;
3389 /* Get VRAM informations */
3390 rdev->mc.vram_is_ddr = true;
3391 if ((rdev->family == CHIP_PALM) ||
3392 (rdev->family == CHIP_SUMO) ||
3393 (rdev->family == CHIP_SUMO2))
3394 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3396 tmp = RREG32(MC_ARB_RAMCFG);
3397 if (tmp & CHANSIZE_OVERRIDE) {
3399 } else if (tmp & CHANSIZE_MASK) {
3404 tmp = RREG32(MC_SHARED_CHMAP);
3405 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3420 rdev->mc.vram_width = numchan * chansize;
3421 /* Could aper size report 0 ? */
3422 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3423 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3424 /* Setup GPU memory space */
3425 if ((rdev->family == CHIP_PALM) ||
3426 (rdev->family == CHIP_SUMO) ||
3427 (rdev->family == CHIP_SUMO2)) {
3428 /* size in bytes on fusion */
3429 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3430 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3432 /* size in MB on evergreen/cayman/tn */
3433 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3434 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3436 rdev->mc.visible_vram_size = rdev->mc.aper_size;
3437 r700_vram_gtt_location(rdev, &rdev->mc);
3438 radeon_update_bandwidth_info(rdev);
3443 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3445 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3446 RREG32(GRBM_STATUS));
3447 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3448 RREG32(GRBM_STATUS_SE0));
3449 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3450 RREG32(GRBM_STATUS_SE1));
3451 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3452 RREG32(SRBM_STATUS));
3453 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3454 RREG32(SRBM_STATUS2));
3455 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3456 RREG32(CP_STALLED_STAT1));
3457 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3458 RREG32(CP_STALLED_STAT2));
3459 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3460 RREG32(CP_BUSY_STAT));
3461 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3463 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3464 RREG32(DMA_STATUS_REG));
3465 if (rdev->family >= CHIP_CAYMAN) {
3466 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3467 RREG32(DMA_STATUS_REG + 0x800));
3471 bool evergreen_is_display_hung(struct radeon_device *rdev)
3477 for (i = 0; i < rdev->num_crtc; i++) {
3478 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3479 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3480 crtc_hung |= (1 << i);
3484 for (j = 0; j < 10; j++) {
3485 for (i = 0; i < rdev->num_crtc; i++) {
3486 if (crtc_hung & (1 << i)) {
3487 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3488 if (tmp != crtc_status[i])
3489 crtc_hung &= ~(1 << i);
3500 static u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3506 tmp = RREG32(GRBM_STATUS);
3507 if (tmp & (PA_BUSY | SC_BUSY |
3509 TA_BUSY | VGT_BUSY |
3511 SPI_BUSY | VGT_BUSY_NO_DMA))
3512 reset_mask |= RADEON_RESET_GFX;
3514 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3515 CP_BUSY | CP_COHERENCY_BUSY))
3516 reset_mask |= RADEON_RESET_CP;
3518 if (tmp & GRBM_EE_BUSY)
3519 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3521 /* DMA_STATUS_REG */
3522 tmp = RREG32(DMA_STATUS_REG);
3523 if (!(tmp & DMA_IDLE))
3524 reset_mask |= RADEON_RESET_DMA;
3527 tmp = RREG32(SRBM_STATUS2);
3529 reset_mask |= RADEON_RESET_DMA;
3532 tmp = RREG32(SRBM_STATUS);
3533 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3534 reset_mask |= RADEON_RESET_RLC;
3537 reset_mask |= RADEON_RESET_IH;
3540 reset_mask |= RADEON_RESET_SEM;
3542 if (tmp & GRBM_RQ_PENDING)
3543 reset_mask |= RADEON_RESET_GRBM;
3546 reset_mask |= RADEON_RESET_VMC;
3548 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3549 MCC_BUSY | MCD_BUSY))
3550 reset_mask |= RADEON_RESET_MC;
3552 if (evergreen_is_display_hung(rdev))
3553 reset_mask |= RADEON_RESET_DISPLAY;
3556 tmp = RREG32(VM_L2_STATUS);
3558 reset_mask |= RADEON_RESET_VMC;
3560 /* Skip MC reset as it's mostly likely not hung, just busy */
3561 if (reset_mask & RADEON_RESET_MC) {
3562 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3563 reset_mask &= ~RADEON_RESET_MC;
3569 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3571 struct evergreen_mc_save save;
3572 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3575 if (reset_mask == 0)
3578 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3580 evergreen_print_gpu_status_regs(rdev);
3582 /* Disable CP parsing/prefetching */
3583 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3585 if (reset_mask & RADEON_RESET_DMA) {
3587 tmp = RREG32(DMA_RB_CNTL);
3588 tmp &= ~DMA_RB_ENABLE;
3589 WREG32(DMA_RB_CNTL, tmp);
3594 evergreen_mc_stop(rdev, &save);
3595 if (evergreen_mc_wait_for_idle(rdev)) {
3596 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3599 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3600 grbm_soft_reset |= SOFT_RESET_DB |
3613 if (reset_mask & RADEON_RESET_CP) {
3614 grbm_soft_reset |= SOFT_RESET_CP |
3617 srbm_soft_reset |= SOFT_RESET_GRBM;
3620 if (reset_mask & RADEON_RESET_DMA)
3621 srbm_soft_reset |= SOFT_RESET_DMA;
3623 if (reset_mask & RADEON_RESET_DISPLAY)
3624 srbm_soft_reset |= SOFT_RESET_DC;
3626 if (reset_mask & RADEON_RESET_RLC)
3627 srbm_soft_reset |= SOFT_RESET_RLC;
3629 if (reset_mask & RADEON_RESET_SEM)
3630 srbm_soft_reset |= SOFT_RESET_SEM;
3632 if (reset_mask & RADEON_RESET_IH)
3633 srbm_soft_reset |= SOFT_RESET_IH;
3635 if (reset_mask & RADEON_RESET_GRBM)
3636 srbm_soft_reset |= SOFT_RESET_GRBM;
3638 if (reset_mask & RADEON_RESET_VMC)
3639 srbm_soft_reset |= SOFT_RESET_VMC;
3641 if (!(rdev->flags & RADEON_IS_IGP)) {
3642 if (reset_mask & RADEON_RESET_MC)
3643 srbm_soft_reset |= SOFT_RESET_MC;
3646 if (grbm_soft_reset) {
3647 tmp = RREG32(GRBM_SOFT_RESET);
3648 tmp |= grbm_soft_reset;
3649 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3650 WREG32(GRBM_SOFT_RESET, tmp);
3651 tmp = RREG32(GRBM_SOFT_RESET);
3655 tmp &= ~grbm_soft_reset;
3656 WREG32(GRBM_SOFT_RESET, tmp);
3657 tmp = RREG32(GRBM_SOFT_RESET);
3660 if (srbm_soft_reset) {
3661 tmp = RREG32(SRBM_SOFT_RESET);
3662 tmp |= srbm_soft_reset;
3663 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3664 WREG32(SRBM_SOFT_RESET, tmp);
3665 tmp = RREG32(SRBM_SOFT_RESET);
3669 tmp &= ~srbm_soft_reset;
3670 WREG32(SRBM_SOFT_RESET, tmp);
3671 tmp = RREG32(SRBM_SOFT_RESET);
3674 /* Wait a little for things to settle down */
3677 evergreen_mc_resume(rdev, &save);
3680 evergreen_print_gpu_status_regs(rdev);
3683 int evergreen_asic_reset(struct radeon_device *rdev)
3687 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3690 r600_set_bios_scratch_engine_hung(rdev, true);
3692 evergreen_gpu_soft_reset(rdev, reset_mask);
3694 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3697 r600_set_bios_scratch_engine_hung(rdev, false);
3703 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3705 * @rdev: radeon_device pointer
3706 * @ring: radeon_ring structure holding ring information
3708 * Check if the GFX engine is locked up.
3709 * Returns true if the engine appears to be locked up, false if not.
3711 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3713 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3715 if (!(reset_mask & (RADEON_RESET_GFX |
3716 RADEON_RESET_COMPUTE |
3717 RADEON_RESET_CP))) {
3718 radeon_ring_lockup_update(ring);
3721 /* force CP activities */
3722 radeon_ring_force_activity(rdev, ring);
3723 return radeon_ring_test_lockup(rdev, ring);
3727 * evergreen_dma_is_lockup - Check if the DMA engine is locked up
3729 * @rdev: radeon_device pointer
3730 * @ring: radeon_ring structure holding ring information
3732 * Check if the async DMA engine is locked up.
3733 * Returns true if the engine appears to be locked up, false if not.
3735 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3737 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3739 if (!(reset_mask & RADEON_RESET_DMA)) {
3740 radeon_ring_lockup_update(ring);
3743 /* force ring activities */
3744 radeon_ring_force_activity(rdev, ring);
3745 return radeon_ring_test_lockup(rdev, ring);
3750 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
3752 if (crtc >= rdev->num_crtc)
3755 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
3758 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
3762 if (rdev->family >= CHIP_CAYMAN) {
3763 cayman_cp_int_cntl_setup(rdev, 0,
3764 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3765 cayman_cp_int_cntl_setup(rdev, 1, 0);
3766 cayman_cp_int_cntl_setup(rdev, 2, 0);
3767 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3768 WREG32(CAYMAN_DMA1_CNTL, tmp);
3770 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3771 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3772 WREG32(DMA_CNTL, tmp);
3773 WREG32(GRBM_INT_CNTL, 0);
3774 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3775 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3776 if (rdev->num_crtc >= 4) {
3777 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3778 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3780 if (rdev->num_crtc >= 6) {
3781 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3782 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3785 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3786 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3787 if (rdev->num_crtc >= 4) {
3788 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3789 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3791 if (rdev->num_crtc >= 6) {
3792 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3793 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3796 /* only one DAC on DCE5 */
3797 if (!ASIC_IS_DCE5(rdev))
3798 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3799 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3801 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3802 WREG32(DC_HPD1_INT_CONTROL, tmp);
3803 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3804 WREG32(DC_HPD2_INT_CONTROL, tmp);
3805 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3806 WREG32(DC_HPD3_INT_CONTROL, tmp);
3807 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3808 WREG32(DC_HPD4_INT_CONTROL, tmp);
3809 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3810 WREG32(DC_HPD5_INT_CONTROL, tmp);
3811 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3812 WREG32(DC_HPD6_INT_CONTROL, tmp);
3816 int evergreen_irq_set(struct radeon_device *rdev)
3818 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3819 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
3820 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
3821 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
3822 u32 grbm_int_cntl = 0;
3823 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
3824 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
3825 u32 dma_cntl, dma_cntl1 = 0;
3827 if (!rdev->irq.installed) {
3828 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3831 /* don't enable anything if the ih is disabled */
3832 if (!rdev->ih.enabled) {
3833 r600_disable_interrupts(rdev);
3834 /* force the active interrupt state to all disabled */
3835 evergreen_disable_interrupt_state(rdev);
3839 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3840 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3841 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3842 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3843 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3844 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3846 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3847 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3848 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3849 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3850 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3851 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3853 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3855 if (rdev->family >= CHIP_CAYMAN) {
3856 /* enable CP interrupts on all rings */
3857 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3858 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3859 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3861 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
3862 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
3863 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
3865 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
3866 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
3867 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
3870 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3871 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3872 cp_int_cntl |= RB_INT_ENABLE;
3873 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3877 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3878 DRM_DEBUG("r600_irq_set: sw int dma\n");
3879 dma_cntl |= TRAP_ENABLE;
3882 if (rdev->family >= CHIP_CAYMAN) {
3883 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3884 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
3885 DRM_DEBUG("r600_irq_set: sw int dma1\n");
3886 dma_cntl1 |= TRAP_ENABLE;
3890 if (rdev->irq.crtc_vblank_int[0] ||
3891 atomic_read(&rdev->irq.pflip[0])) {
3892 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
3893 crtc1 |= VBLANK_INT_MASK;
3895 if (rdev->irq.crtc_vblank_int[1] ||
3896 atomic_read(&rdev->irq.pflip[1])) {
3897 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
3898 crtc2 |= VBLANK_INT_MASK;
3900 if (rdev->irq.crtc_vblank_int[2] ||
3901 atomic_read(&rdev->irq.pflip[2])) {
3902 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
3903 crtc3 |= VBLANK_INT_MASK;
3905 if (rdev->irq.crtc_vblank_int[3] ||
3906 atomic_read(&rdev->irq.pflip[3])) {
3907 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
3908 crtc4 |= VBLANK_INT_MASK;
3910 if (rdev->irq.crtc_vblank_int[4] ||
3911 atomic_read(&rdev->irq.pflip[4])) {
3912 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
3913 crtc5 |= VBLANK_INT_MASK;
3915 if (rdev->irq.crtc_vblank_int[5] ||
3916 atomic_read(&rdev->irq.pflip[5])) {
3917 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
3918 crtc6 |= VBLANK_INT_MASK;
3920 if (rdev->irq.hpd[0]) {
3921 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
3922 hpd1 |= DC_HPDx_INT_EN;
3924 if (rdev->irq.hpd[1]) {
3925 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
3926 hpd2 |= DC_HPDx_INT_EN;
3928 if (rdev->irq.hpd[2]) {
3929 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
3930 hpd3 |= DC_HPDx_INT_EN;
3932 if (rdev->irq.hpd[3]) {
3933 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
3934 hpd4 |= DC_HPDx_INT_EN;
3936 if (rdev->irq.hpd[4]) {
3937 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
3938 hpd5 |= DC_HPDx_INT_EN;
3940 if (rdev->irq.hpd[5]) {
3941 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
3942 hpd6 |= DC_HPDx_INT_EN;
3944 if (rdev->irq.afmt[0]) {
3945 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
3946 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3948 if (rdev->irq.afmt[1]) {
3949 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
3950 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3952 if (rdev->irq.afmt[2]) {
3953 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
3954 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3956 if (rdev->irq.afmt[3]) {
3957 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
3958 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3960 if (rdev->irq.afmt[4]) {
3961 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
3962 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3964 if (rdev->irq.afmt[5]) {
3965 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
3966 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3969 if (rdev->family >= CHIP_CAYMAN) {
3970 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
3971 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
3972 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
3974 WREG32(CP_INT_CNTL, cp_int_cntl);
3976 WREG32(DMA_CNTL, dma_cntl);
3978 if (rdev->family >= CHIP_CAYMAN)
3979 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
3981 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3983 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
3984 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
3985 if (rdev->num_crtc >= 4) {
3986 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
3987 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
3989 if (rdev->num_crtc >= 6) {
3990 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
3991 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
3994 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
3995 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
3996 if (rdev->num_crtc >= 4) {
3997 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
3998 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4000 if (rdev->num_crtc >= 6) {
4001 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4002 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4005 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4006 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4007 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4008 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4009 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4010 WREG32(DC_HPD6_INT_CONTROL, hpd6);
4012 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4013 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4014 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4015 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4016 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4017 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4022 static void evergreen_irq_ack(struct radeon_device *rdev)
4026 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4027 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4028 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4029 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4030 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4031 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4032 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4033 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4034 if (rdev->num_crtc >= 4) {
4035 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4036 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4038 if (rdev->num_crtc >= 6) {
4039 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4040 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4043 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4044 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4045 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4046 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4047 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4048 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4050 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4051 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4052 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4053 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4054 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4055 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4056 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4057 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4058 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4059 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4060 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4061 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4063 if (rdev->num_crtc >= 4) {
4064 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4065 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4066 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4067 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4068 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4069 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4070 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4071 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4072 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4073 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4074 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4075 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4078 if (rdev->num_crtc >= 6) {
4079 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4080 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4081 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4082 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4083 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4084 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4085 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4086 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4087 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4088 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4089 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4090 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4093 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4094 tmp = RREG32(DC_HPD1_INT_CONTROL);
4095 tmp |= DC_HPDx_INT_ACK;
4096 WREG32(DC_HPD1_INT_CONTROL, tmp);
4098 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4099 tmp = RREG32(DC_HPD2_INT_CONTROL);
4100 tmp |= DC_HPDx_INT_ACK;
4101 WREG32(DC_HPD2_INT_CONTROL, tmp);
4103 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4104 tmp = RREG32(DC_HPD3_INT_CONTROL);
4105 tmp |= DC_HPDx_INT_ACK;
4106 WREG32(DC_HPD3_INT_CONTROL, tmp);
4108 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4109 tmp = RREG32(DC_HPD4_INT_CONTROL);
4110 tmp |= DC_HPDx_INT_ACK;
4111 WREG32(DC_HPD4_INT_CONTROL, tmp);
4113 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4114 tmp = RREG32(DC_HPD5_INT_CONTROL);
4115 tmp |= DC_HPDx_INT_ACK;
4116 WREG32(DC_HPD5_INT_CONTROL, tmp);
4118 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4119 tmp = RREG32(DC_HPD5_INT_CONTROL);
4120 tmp |= DC_HPDx_INT_ACK;
4121 WREG32(DC_HPD6_INT_CONTROL, tmp);
4123 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4124 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4125 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4126 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4128 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4129 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4130 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4131 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4133 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4134 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4135 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4136 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4138 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4139 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4140 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4141 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4143 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4144 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4145 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4146 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4148 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4149 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4150 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4151 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4155 static void evergreen_irq_disable(struct radeon_device *rdev)
4157 r600_disable_interrupts(rdev);
4158 /* Wait and acknowledge irq */
4160 evergreen_irq_ack(rdev);
4161 evergreen_disable_interrupt_state(rdev);
4164 void evergreen_irq_suspend(struct radeon_device *rdev)
4166 evergreen_irq_disable(rdev);
4167 r600_rlc_stop(rdev);
4170 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4174 if (rdev->wb.enabled)
4175 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4177 wptr = RREG32(IH_RB_WPTR);
4179 if (wptr & RB_OVERFLOW) {
4180 /* When a ring buffer overflow happen start parsing interrupt
4181 * from the last not overwritten vector (wptr + 16). Hopefully
4182 * this should allow us to catchup.
4184 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4185 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4186 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4187 tmp = RREG32(IH_RB_CNTL);
4188 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4189 WREG32(IH_RB_CNTL, tmp);
4191 return (wptr & rdev->ih.ptr_mask);
4194 int evergreen_irq_process(struct radeon_device *rdev)
4198 u32 src_id, src_data;
4200 bool queue_hotplug = false;
4201 bool queue_hdmi = false;
4203 if (!rdev->ih.enabled || rdev->shutdown)
4206 wptr = evergreen_get_ih_wptr(rdev);
4209 /* is somebody else already processing irqs? */
4210 if (atomic_xchg(&rdev->ih.lock, 1))
4213 rptr = rdev->ih.rptr;
4214 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4216 /* Order reading of wptr vs. reading of IH ring data */
4219 /* display interrupts */
4220 evergreen_irq_ack(rdev);
4222 while (rptr != wptr) {
4223 /* wptr/rptr are in bytes! */
4224 ring_index = rptr / 4;
4225 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4226 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4229 case 1: /* D1 vblank/vline */
4231 case 0: /* D1 vblank */
4232 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4233 if (rdev->irq.crtc_vblank_int[0]) {
4234 drm_handle_vblank(rdev->ddev, 0);
4235 rdev->pm.vblank_sync = true;
4236 wake_up(&rdev->irq.vblank_queue);
4238 if (atomic_read(&rdev->irq.pflip[0]))
4239 radeon_crtc_handle_flip(rdev, 0);
4240 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4241 DRM_DEBUG("IH: D1 vblank\n");
4244 case 1: /* D1 vline */
4245 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4246 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4247 DRM_DEBUG("IH: D1 vline\n");
4251 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4255 case 2: /* D2 vblank/vline */
4257 case 0: /* D2 vblank */
4258 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4259 if (rdev->irq.crtc_vblank_int[1]) {
4260 drm_handle_vblank(rdev->ddev, 1);
4261 rdev->pm.vblank_sync = true;
4262 wake_up(&rdev->irq.vblank_queue);
4264 if (atomic_read(&rdev->irq.pflip[1]))
4265 radeon_crtc_handle_flip(rdev, 1);
4266 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4267 DRM_DEBUG("IH: D2 vblank\n");
4270 case 1: /* D2 vline */
4271 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4272 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4273 DRM_DEBUG("IH: D2 vline\n");
4277 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4281 case 3: /* D3 vblank/vline */
4283 case 0: /* D3 vblank */
4284 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4285 if (rdev->irq.crtc_vblank_int[2]) {
4286 drm_handle_vblank(rdev->ddev, 2);
4287 rdev->pm.vblank_sync = true;
4288 wake_up(&rdev->irq.vblank_queue);
4290 if (atomic_read(&rdev->irq.pflip[2]))
4291 radeon_crtc_handle_flip(rdev, 2);
4292 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4293 DRM_DEBUG("IH: D3 vblank\n");
4296 case 1: /* D3 vline */
4297 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4298 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4299 DRM_DEBUG("IH: D3 vline\n");
4303 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4307 case 4: /* D4 vblank/vline */
4309 case 0: /* D4 vblank */
4310 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4311 if (rdev->irq.crtc_vblank_int[3]) {
4312 drm_handle_vblank(rdev->ddev, 3);
4313 rdev->pm.vblank_sync = true;
4314 wake_up(&rdev->irq.vblank_queue);
4316 if (atomic_read(&rdev->irq.pflip[3]))
4317 radeon_crtc_handle_flip(rdev, 3);
4318 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4319 DRM_DEBUG("IH: D4 vblank\n");
4322 case 1: /* D4 vline */
4323 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4324 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4325 DRM_DEBUG("IH: D4 vline\n");
4329 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4333 case 5: /* D5 vblank/vline */
4335 case 0: /* D5 vblank */
4336 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4337 if (rdev->irq.crtc_vblank_int[4]) {
4338 drm_handle_vblank(rdev->ddev, 4);
4339 rdev->pm.vblank_sync = true;
4340 wake_up(&rdev->irq.vblank_queue);
4342 if (atomic_read(&rdev->irq.pflip[4]))
4343 radeon_crtc_handle_flip(rdev, 4);
4344 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4345 DRM_DEBUG("IH: D5 vblank\n");
4348 case 1: /* D5 vline */
4349 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4350 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4351 DRM_DEBUG("IH: D5 vline\n");
4355 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4359 case 6: /* D6 vblank/vline */
4361 case 0: /* D6 vblank */
4362 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4363 if (rdev->irq.crtc_vblank_int[5]) {
4364 drm_handle_vblank(rdev->ddev, 5);
4365 rdev->pm.vblank_sync = true;
4366 wake_up(&rdev->irq.vblank_queue);
4368 if (atomic_read(&rdev->irq.pflip[5]))
4369 radeon_crtc_handle_flip(rdev, 5);
4370 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4371 DRM_DEBUG("IH: D6 vblank\n");
4374 case 1: /* D6 vline */
4375 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4376 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4377 DRM_DEBUG("IH: D6 vline\n");
4381 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4385 case 42: /* HPD hotplug */
4388 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4389 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4390 queue_hotplug = true;
4391 DRM_DEBUG("IH: HPD1\n");
4395 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4396 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4397 queue_hotplug = true;
4398 DRM_DEBUG("IH: HPD2\n");
4402 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4403 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4404 queue_hotplug = true;
4405 DRM_DEBUG("IH: HPD3\n");
4409 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4410 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4411 queue_hotplug = true;
4412 DRM_DEBUG("IH: HPD4\n");
4416 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4417 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4418 queue_hotplug = true;
4419 DRM_DEBUG("IH: HPD5\n");
4423 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4424 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4425 queue_hotplug = true;
4426 DRM_DEBUG("IH: HPD6\n");
4430 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4437 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4438 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4440 DRM_DEBUG("IH: HDMI0\n");
4444 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4445 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4447 DRM_DEBUG("IH: HDMI1\n");
4451 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4452 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4454 DRM_DEBUG("IH: HDMI2\n");
4458 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4459 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4461 DRM_DEBUG("IH: HDMI3\n");
4465 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4466 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4468 DRM_DEBUG("IH: HDMI4\n");
4472 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4473 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4475 DRM_DEBUG("IH: HDMI5\n");
4479 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4483 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4484 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4488 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4489 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
4490 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
4491 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4492 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
4493 /* reset addr and status */
4494 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4496 case 176: /* CP_INT in ring buffer */
4497 case 177: /* CP_INT in IB1 */
4498 case 178: /* CP_INT in IB2 */
4499 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4500 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4502 case 181: /* CP EOP event */
4503 DRM_DEBUG("IH: CP EOP\n");
4504 if (rdev->family >= CHIP_CAYMAN) {
4507 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4510 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4513 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4517 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4519 case 224: /* DMA trap event */
4520 DRM_DEBUG("IH: DMA trap\n");
4521 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4523 case 233: /* GUI IDLE */
4524 DRM_DEBUG("IH: GUI idle\n");
4526 case 244: /* DMA trap event */
4527 if (rdev->family >= CHIP_CAYMAN) {
4528 DRM_DEBUG("IH: DMA1 trap\n");
4529 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4533 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4537 /* wptr/rptr are in bytes! */
4539 rptr &= rdev->ih.ptr_mask;
4542 schedule_work(&rdev->hotplug_work);
4544 schedule_work(&rdev->audio_work);
4545 rdev->ih.rptr = rptr;
4546 WREG32(IH_RB_RPTR, rdev->ih.rptr);
4547 atomic_set(&rdev->ih.lock, 0);
4549 /* make sure wptr hasn't changed while processing */
4550 wptr = evergreen_get_ih_wptr(rdev);
4558 * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
4560 * @rdev: radeon_device pointer
4561 * @fence: radeon fence object
4563 * Add a DMA fence packet to the ring to write
4564 * the fence seq number and DMA trap packet to generate
4565 * an interrupt if needed (evergreen-SI).
4567 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
4568 struct radeon_fence *fence)
4570 struct radeon_ring *ring = &rdev->ring[fence->ring];
4571 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
4572 /* write the fence */
4573 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
4574 radeon_ring_write(ring, addr & 0xfffffffc);
4575 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
4576 radeon_ring_write(ring, fence->seq);
4577 /* generate an interrupt */
4578 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
4580 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
4581 radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
4582 radeon_ring_write(ring, 1);
4586 * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
4588 * @rdev: radeon_device pointer
4589 * @ib: IB object to schedule
4591 * Schedule an IB in the DMA ring (evergreen).
4593 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
4594 struct radeon_ib *ib)
4596 struct radeon_ring *ring = &rdev->ring[ib->ring];
4598 if (rdev->wb.enabled) {
4599 u32 next_rptr = ring->wptr + 4;
4600 while ((next_rptr & 7) != 5)
4603 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
4604 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
4605 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
4606 radeon_ring_write(ring, next_rptr);
4609 /* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
4610 * Pad as necessary with NOPs.
4612 while ((ring->wptr & 7) != 5)
4613 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4614 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
4615 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
4616 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
4621 * evergreen_copy_dma - copy pages using the DMA engine
4623 * @rdev: radeon_device pointer
4624 * @src_offset: src GPU address
4625 * @dst_offset: dst GPU address
4626 * @num_gpu_pages: number of GPU pages to xfer
4627 * @fence: radeon fence object
4629 * Copy GPU paging using the DMA engine (evergreen-cayman).
4630 * Used by the radeon ttm implementation to move pages if
4631 * registered as the asic copy callback.
4633 int evergreen_copy_dma(struct radeon_device *rdev,
4634 uint64_t src_offset, uint64_t dst_offset,
4635 unsigned num_gpu_pages,
4636 struct radeon_fence **fence)
4638 struct radeon_semaphore *sem = NULL;
4639 int ring_index = rdev->asic->copy.dma_ring_index;
4640 struct radeon_ring *ring = &rdev->ring[ring_index];
4641 u32 size_in_dw, cur_size_in_dw;
4645 r = radeon_semaphore_create(rdev, &sem);
4647 DRM_ERROR("radeon: moving bo (%d).\n", r);
4651 size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
4652 num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
4653 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
4655 DRM_ERROR("radeon: moving bo (%d).\n", r);
4656 radeon_semaphore_free(rdev, &sem, NULL);
4660 if (radeon_fence_need_sync(*fence, ring->idx)) {
4661 radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
4663 radeon_fence_note_sync(*fence, ring->idx);
4665 radeon_semaphore_free(rdev, &sem, NULL);
4668 for (i = 0; i < num_loops; i++) {
4669 cur_size_in_dw = size_in_dw;
4670 if (cur_size_in_dw > 0xFFFFF)
4671 cur_size_in_dw = 0xFFFFF;
4672 size_in_dw -= cur_size_in_dw;
4673 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
4674 radeon_ring_write(ring, dst_offset & 0xfffffffc);
4675 radeon_ring_write(ring, src_offset & 0xfffffffc);
4676 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
4677 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
4678 src_offset += cur_size_in_dw * 4;
4679 dst_offset += cur_size_in_dw * 4;
4682 r = radeon_fence_emit(rdev, fence, ring->idx);
4684 radeon_ring_unlock_undo(rdev, ring);
4688 radeon_ring_unlock_commit(rdev, ring);
4689 radeon_semaphore_free(rdev, &sem, *fence);
4694 static int evergreen_startup(struct radeon_device *rdev)
4696 struct radeon_ring *ring;
4699 /* enable pcie gen2 link */
4700 evergreen_pcie_gen2_enable(rdev);
4702 evergreen_mc_program(rdev);
4704 if (ASIC_IS_DCE5(rdev)) {
4705 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
4706 r = ni_init_microcode(rdev);
4708 DRM_ERROR("Failed to load firmware!\n");
4712 r = ni_mc_load_microcode(rdev);
4714 DRM_ERROR("Failed to load MC firmware!\n");
4718 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
4719 r = r600_init_microcode(rdev);
4721 DRM_ERROR("Failed to load firmware!\n");
4727 r = r600_vram_scratch_init(rdev);
4731 if (rdev->flags & RADEON_IS_AGP) {
4732 evergreen_agp_enable(rdev);
4734 r = evergreen_pcie_gart_enable(rdev);
4738 evergreen_gpu_init(rdev);
4740 r = evergreen_blit_init(rdev);
4742 r600_blit_fini(rdev);
4743 rdev->asic->copy.copy = NULL;
4744 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
4747 /* allocate wb buffer */
4748 r = radeon_wb_init(rdev);
4752 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
4754 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
4758 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
4760 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
4764 r = rv770_uvd_resume(rdev);
4766 r = radeon_fence_driver_start_ring(rdev,
4767 R600_RING_TYPE_UVD_INDEX);
4769 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
4773 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4776 if (!rdev->irq.installed) {
4777 r = radeon_irq_kms_init(rdev);
4782 r = r600_irq_init(rdev);
4784 DRM_ERROR("radeon: IH init failed (%d).\n", r);
4785 radeon_irq_kms_fini(rdev);
4788 evergreen_irq_set(rdev);
4790 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
4791 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
4792 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
4793 0, 0xfffff, RADEON_CP_PACKET2);
4797 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
4798 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
4799 DMA_RB_RPTR, DMA_RB_WPTR,
4800 2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4804 r = evergreen_cp_load_microcode(rdev);
4807 r = evergreen_cp_resume(rdev);
4810 r = r600_dma_resume(rdev);
4814 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4815 if (ring->ring_size) {
4816 r = radeon_ring_init(rdev, ring, ring->ring_size,
4817 R600_WB_UVD_RPTR_OFFSET,
4818 UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
4819 0, 0xfffff, RADEON_CP_PACKET2);
4821 r = r600_uvd_init(rdev);
4824 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
4827 r = radeon_ib_pool_init(rdev);
4829 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
4833 r = r600_audio_init(rdev);
4835 DRM_ERROR("radeon: audio init failed\n");
4842 int evergreen_resume(struct radeon_device *rdev)
4846 /* reset the asic, the gfx blocks are often in a bad state
4847 * after the driver is unloaded or after a resume
4849 if (radeon_asic_reset(rdev))
4850 dev_warn(rdev->dev, "GPU reset failed !\n");
4851 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
4852 * posting will perform necessary task to bring back GPU into good
4856 atom_asic_init(rdev->mode_info.atom_context);
4858 /* init golden registers */
4859 evergreen_init_golden_registers(rdev);
4861 rdev->accel_working = true;
4862 r = evergreen_startup(rdev);
4864 DRM_ERROR("evergreen startup failed on resume\n");
4865 rdev->accel_working = false;
4873 int evergreen_suspend(struct radeon_device *rdev)
4875 r600_audio_fini(rdev);
4876 r600_uvd_stop(rdev);
4877 radeon_uvd_suspend(rdev);
4879 r600_dma_stop(rdev);
4880 evergreen_irq_suspend(rdev);
4881 radeon_wb_disable(rdev);
4882 evergreen_pcie_gart_disable(rdev);
4887 /* Plan is to move initialization in that function and use
4888 * helper function so that radeon_device_init pretty much
4889 * do nothing more than calling asic specific function. This
4890 * should also allow to remove a bunch of callback function
4893 int evergreen_init(struct radeon_device *rdev)
4898 if (!radeon_get_bios(rdev)) {
4899 if (ASIC_IS_AVIVO(rdev))
4902 /* Must be an ATOMBIOS */
4903 if (!rdev->is_atom_bios) {
4904 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
4907 r = radeon_atombios_init(rdev);
4910 /* reset the asic, the gfx blocks are often in a bad state
4911 * after the driver is unloaded or after a resume
4913 if (radeon_asic_reset(rdev))
4914 dev_warn(rdev->dev, "GPU reset failed !\n");
4915 /* Post card if necessary */
4916 if (!radeon_card_posted(rdev)) {
4918 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
4921 DRM_INFO("GPU not posted. posting now...\n");
4922 atom_asic_init(rdev->mode_info.atom_context);
4924 /* init golden registers */
4925 evergreen_init_golden_registers(rdev);
4926 /* Initialize scratch registers */
4927 r600_scratch_init(rdev);
4928 /* Initialize surface registers */
4929 radeon_surface_init(rdev);
4930 /* Initialize clocks */
4931 radeon_get_clock_info(rdev->ddev);
4933 r = radeon_fence_driver_init(rdev);
4936 /* initialize AGP */
4937 if (rdev->flags & RADEON_IS_AGP) {
4938 r = radeon_agp_init(rdev);
4940 radeon_agp_disable(rdev);
4942 /* initialize memory controller */
4943 r = evergreen_mc_init(rdev);
4946 /* Memory manager */
4947 r = radeon_bo_init(rdev);
4951 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
4952 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
4954 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
4955 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
4957 r = radeon_uvd_init(rdev);
4959 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4960 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
4964 rdev->ih.ring_obj = NULL;
4965 r600_ih_ring_init(rdev, 64 * 1024);
4967 r = r600_pcie_gart_init(rdev);
4971 rdev->accel_working = true;
4972 r = evergreen_startup(rdev);
4974 dev_err(rdev->dev, "disabling GPU acceleration\n");
4976 r600_dma_fini(rdev);
4977 r600_irq_fini(rdev);
4978 radeon_wb_fini(rdev);
4979 radeon_ib_pool_fini(rdev);
4980 radeon_irq_kms_fini(rdev);
4981 evergreen_pcie_gart_fini(rdev);
4982 rdev->accel_working = false;
4985 /* Don't start up if the MC ucode is missing on BTC parts.
4986 * The default clocks and voltages before the MC ucode
4987 * is loaded are not suffient for advanced operations.
4989 if (ASIC_IS_DCE5(rdev)) {
4990 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
4991 DRM_ERROR("radeon: MC ucode required for NI+.\n");
4999 void evergreen_fini(struct radeon_device *rdev)
5001 r600_audio_fini(rdev);
5002 r600_blit_fini(rdev);
5004 r600_dma_fini(rdev);
5005 r600_irq_fini(rdev);
5006 radeon_wb_fini(rdev);
5007 radeon_ib_pool_fini(rdev);
5008 radeon_irq_kms_fini(rdev);
5009 evergreen_pcie_gart_fini(rdev);
5010 r600_uvd_stop(rdev);
5011 radeon_uvd_fini(rdev);
5012 r600_vram_scratch_fini(rdev);
5013 radeon_gem_fini(rdev);
5014 radeon_fence_driver_fini(rdev);
5015 radeon_agp_fini(rdev);
5016 radeon_bo_fini(rdev);
5017 radeon_atombios_fini(rdev);
5022 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5024 u32 link_width_cntl, speed_cntl;
5026 if (radeon_pcie_gen2 == 0)
5029 if (rdev->flags & RADEON_IS_IGP)
5032 if (!(rdev->flags & RADEON_IS_PCIE))
5035 /* x2 cards have a special sequence */
5036 if (ASIC_IS_X2(rdev))
5039 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5040 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5043 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5044 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5045 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5049 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5051 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5052 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5054 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5055 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5056 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5058 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5059 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5060 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5062 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5063 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5064 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5066 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5067 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5068 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5070 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5071 speed_cntl |= LC_GEN2_EN_STRAP;
5072 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5075 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5076 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5078 link_width_cntl |= LC_UPCONFIGURE_DIS;
5080 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5081 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);