Line data Source code
1 : /*
2 : * Copyright 2010 Advanced Micro Devices, Inc.
3 : *
4 : * Permission is hereby granted, free of charge, to any person obtaining a
5 : * copy of this software and associated documentation files (the "Software"),
6 : * to deal in the Software without restriction, including without limitation
7 : * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 : * and/or sell copies of the Software, and to permit persons to whom the
9 : * Software is furnished to do so, subject to the following conditions:
10 : *
11 : * The above copyright notice and this permission notice shall be included in
12 : * all copies or substantial portions of the Software.
13 : *
14 : * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 : * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 : * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 : * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 : * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 : * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 : * OTHER DEALINGS IN THE SOFTWARE.
21 : *
22 : * Authors: Alex Deucher
23 : */
24 : #include <dev/pci/drm/drmP.h>
25 : #include "radeon.h"
26 : #include "radeon_asic.h"
27 : #include "radeon_audio.h"
28 : #include <dev/pci/drm/radeon_drm.h>
29 : #include "evergreend.h"
30 : #include "atom.h"
31 : #include "avivod.h"
32 : #include "evergreen_reg.h"
33 : #include "evergreen_blit_shaders.h"
34 : #include "radeon_ucode.h"
35 :
36 : /*
37 : * Indirect registers accessor
38 : */
39 0 : u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
40 : {
41 : unsigned long flags;
42 : u32 r;
43 :
44 0 : spin_lock_irqsave(&rdev->cg_idx_lock, flags);
45 0 : WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
46 0 : r = RREG32(EVERGREEN_CG_IND_DATA);
47 0 : spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
48 0 : return r;
49 : }
50 :
51 0 : void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
52 : {
53 : unsigned long flags;
54 :
55 0 : spin_lock_irqsave(&rdev->cg_idx_lock, flags);
56 0 : WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
57 0 : WREG32(EVERGREEN_CG_IND_DATA, (v));
58 0 : spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
59 0 : }
60 :
61 0 : u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
62 : {
63 : unsigned long flags;
64 : u32 r;
65 :
66 0 : spin_lock_irqsave(&rdev->pif_idx_lock, flags);
67 0 : WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
68 0 : r = RREG32(EVERGREEN_PIF_PHY0_DATA);
69 0 : spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
70 0 : return r;
71 : }
72 :
73 0 : void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
74 : {
75 : unsigned long flags;
76 :
77 0 : spin_lock_irqsave(&rdev->pif_idx_lock, flags);
78 0 : WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
79 0 : WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
80 0 : spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
81 0 : }
82 :
83 0 : u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
84 : {
85 : unsigned long flags;
86 : u32 r;
87 :
88 0 : spin_lock_irqsave(&rdev->pif_idx_lock, flags);
89 0 : WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
90 0 : r = RREG32(EVERGREEN_PIF_PHY1_DATA);
91 0 : spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
92 0 : return r;
93 : }
94 :
95 0 : void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
96 : {
97 : unsigned long flags;
98 :
99 0 : spin_lock_irqsave(&rdev->pif_idx_lock, flags);
100 0 : WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
101 0 : WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
102 0 : spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
103 0 : }
104 :
105 : static const u32 crtc_offsets[6] =
106 : {
107 : EVERGREEN_CRTC0_REGISTER_OFFSET,
108 : EVERGREEN_CRTC1_REGISTER_OFFSET,
109 : EVERGREEN_CRTC2_REGISTER_OFFSET,
110 : EVERGREEN_CRTC3_REGISTER_OFFSET,
111 : EVERGREEN_CRTC4_REGISTER_OFFSET,
112 : EVERGREEN_CRTC5_REGISTER_OFFSET
113 : };
114 :
115 : #include "clearstate_evergreen.h"
116 :
117 : static const u32 sumo_rlc_save_restore_register_list[] =
118 : {
119 : 0x98fc,
120 : 0x9830,
121 : 0x9834,
122 : 0x9838,
123 : 0x9870,
124 : 0x9874,
125 : 0x8a14,
126 : 0x8b24,
127 : 0x8bcc,
128 : 0x8b10,
129 : 0x8d00,
130 : 0x8d04,
131 : 0x8c00,
132 : 0x8c04,
133 : 0x8c08,
134 : 0x8c0c,
135 : 0x8d8c,
136 : 0x8c20,
137 : 0x8c24,
138 : 0x8c28,
139 : 0x8c18,
140 : 0x8c1c,
141 : 0x8cf0,
142 : 0x8e2c,
143 : 0x8e38,
144 : 0x8c30,
145 : 0x9508,
146 : 0x9688,
147 : 0x9608,
148 : 0x960c,
149 : 0x9610,
150 : 0x9614,
151 : 0x88c4,
152 : 0x88d4,
153 : 0xa008,
154 : 0x900c,
155 : 0x9100,
156 : 0x913c,
157 : 0x98f8,
158 : 0x98f4,
159 : 0x9b7c,
160 : 0x3f8c,
161 : 0x8950,
162 : 0x8954,
163 : 0x8a18,
164 : 0x8b28,
165 : 0x9144,
166 : 0x9148,
167 : 0x914c,
168 : 0x3f90,
169 : 0x3f94,
170 : 0x915c,
171 : 0x9160,
172 : 0x9178,
173 : 0x917c,
174 : 0x9180,
175 : 0x918c,
176 : 0x9190,
177 : 0x9194,
178 : 0x9198,
179 : 0x919c,
180 : 0x91a8,
181 : 0x91ac,
182 : 0x91b0,
183 : 0x91b4,
184 : 0x91b8,
185 : 0x91c4,
186 : 0x91c8,
187 : 0x91cc,
188 : 0x91d0,
189 : 0x91d4,
190 : 0x91e0,
191 : 0x91e4,
192 : 0x91ec,
193 : 0x91f0,
194 : 0x91f4,
195 : 0x9200,
196 : 0x9204,
197 : 0x929c,
198 : 0x9150,
199 : 0x802c,
200 : };
201 :
202 : static void evergreen_gpu_init(struct radeon_device *rdev);
203 : void evergreen_fini(struct radeon_device *rdev);
204 : void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
205 : void evergreen_program_aspm(struct radeon_device *rdev);
206 : extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
207 : int ring, u32 cp_int_cntl);
208 : extern void cayman_vm_decode_fault(struct radeon_device *rdev,
209 : u32 status, u32 addr);
210 : void cik_init_cp_pg_table(struct radeon_device *rdev);
211 :
212 : extern u32 si_get_csb_size(struct radeon_device *rdev);
213 : extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
214 : extern u32 cik_get_csb_size(struct radeon_device *rdev);
215 : extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 : extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
217 :
218 : static const u32 evergreen_golden_registers[] =
219 : {
220 : 0x3f90, 0xffff0000, 0xff000000,
221 : 0x9148, 0xffff0000, 0xff000000,
222 : 0x3f94, 0xffff0000, 0xff000000,
223 : 0x914c, 0xffff0000, 0xff000000,
224 : 0x9b7c, 0xffffffff, 0x00000000,
225 : 0x8a14, 0xffffffff, 0x00000007,
226 : 0x8b10, 0xffffffff, 0x00000000,
227 : 0x960c, 0xffffffff, 0x54763210,
228 : 0x88c4, 0xffffffff, 0x000000c2,
229 : 0x88d4, 0xffffffff, 0x00000010,
230 : 0x8974, 0xffffffff, 0x00000000,
231 : 0xc78, 0x00000080, 0x00000080,
232 : 0x5eb4, 0xffffffff, 0x00000002,
233 : 0x5e78, 0xffffffff, 0x001000f0,
234 : 0x6104, 0x01000300, 0x00000000,
235 : 0x5bc0, 0x00300000, 0x00000000,
236 : 0x7030, 0xffffffff, 0x00000011,
237 : 0x7c30, 0xffffffff, 0x00000011,
238 : 0x10830, 0xffffffff, 0x00000011,
239 : 0x11430, 0xffffffff, 0x00000011,
240 : 0x12030, 0xffffffff, 0x00000011,
241 : 0x12c30, 0xffffffff, 0x00000011,
242 : 0xd02c, 0xffffffff, 0x08421000,
243 : 0x240c, 0xffffffff, 0x00000380,
244 : 0x8b24, 0xffffffff, 0x00ff0fff,
245 : 0x28a4c, 0x06000000, 0x06000000,
246 : 0x10c, 0x00000001, 0x00000001,
247 : 0x8d00, 0xffffffff, 0x100e4848,
248 : 0x8d04, 0xffffffff, 0x00164745,
249 : 0x8c00, 0xffffffff, 0xe4000003,
250 : 0x8c04, 0xffffffff, 0x40600060,
251 : 0x8c08, 0xffffffff, 0x001c001c,
252 : 0x8cf0, 0xffffffff, 0x08e00620,
253 : 0x8c20, 0xffffffff, 0x00800080,
254 : 0x8c24, 0xffffffff, 0x00800080,
255 : 0x8c18, 0xffffffff, 0x20202078,
256 : 0x8c1c, 0xffffffff, 0x00001010,
257 : 0x28350, 0xffffffff, 0x00000000,
258 : 0xa008, 0xffffffff, 0x00010000,
259 : 0x5c4, 0xffffffff, 0x00000001,
260 : 0x9508, 0xffffffff, 0x00000002,
261 : 0x913c, 0x0000000f, 0x0000000a
262 : };
263 :
264 : static const u32 evergreen_golden_registers2[] =
265 : {
266 : 0x2f4c, 0xffffffff, 0x00000000,
267 : 0x54f4, 0xffffffff, 0x00000000,
268 : 0x54f0, 0xffffffff, 0x00000000,
269 : 0x5498, 0xffffffff, 0x00000000,
270 : 0x549c, 0xffffffff, 0x00000000,
271 : 0x5494, 0xffffffff, 0x00000000,
272 : 0x53cc, 0xffffffff, 0x00000000,
273 : 0x53c8, 0xffffffff, 0x00000000,
274 : 0x53c4, 0xffffffff, 0x00000000,
275 : 0x53c0, 0xffffffff, 0x00000000,
276 : 0x53bc, 0xffffffff, 0x00000000,
277 : 0x53b8, 0xffffffff, 0x00000000,
278 : 0x53b4, 0xffffffff, 0x00000000,
279 : 0x53b0, 0xffffffff, 0x00000000
280 : };
281 :
282 : static const u32 cypress_mgcg_init[] =
283 : {
284 : 0x802c, 0xffffffff, 0xc0000000,
285 : 0x5448, 0xffffffff, 0x00000100,
286 : 0x55e4, 0xffffffff, 0x00000100,
287 : 0x160c, 0xffffffff, 0x00000100,
288 : 0x5644, 0xffffffff, 0x00000100,
289 : 0xc164, 0xffffffff, 0x00000100,
290 : 0x8a18, 0xffffffff, 0x00000100,
291 : 0x897c, 0xffffffff, 0x06000100,
292 : 0x8b28, 0xffffffff, 0x00000100,
293 : 0x9144, 0xffffffff, 0x00000100,
294 : 0x9a60, 0xffffffff, 0x00000100,
295 : 0x9868, 0xffffffff, 0x00000100,
296 : 0x8d58, 0xffffffff, 0x00000100,
297 : 0x9510, 0xffffffff, 0x00000100,
298 : 0x949c, 0xffffffff, 0x00000100,
299 : 0x9654, 0xffffffff, 0x00000100,
300 : 0x9030, 0xffffffff, 0x00000100,
301 : 0x9034, 0xffffffff, 0x00000100,
302 : 0x9038, 0xffffffff, 0x00000100,
303 : 0x903c, 0xffffffff, 0x00000100,
304 : 0x9040, 0xffffffff, 0x00000100,
305 : 0xa200, 0xffffffff, 0x00000100,
306 : 0xa204, 0xffffffff, 0x00000100,
307 : 0xa208, 0xffffffff, 0x00000100,
308 : 0xa20c, 0xffffffff, 0x00000100,
309 : 0x971c, 0xffffffff, 0x00000100,
310 : 0x977c, 0xffffffff, 0x00000100,
311 : 0x3f80, 0xffffffff, 0x00000100,
312 : 0xa210, 0xffffffff, 0x00000100,
313 : 0xa214, 0xffffffff, 0x00000100,
314 : 0x4d8, 0xffffffff, 0x00000100,
315 : 0x9784, 0xffffffff, 0x00000100,
316 : 0x9698, 0xffffffff, 0x00000100,
317 : 0x4d4, 0xffffffff, 0x00000200,
318 : 0x30cc, 0xffffffff, 0x00000100,
319 : 0xd0c0, 0xffffffff, 0xff000100,
320 : 0x802c, 0xffffffff, 0x40000000,
321 : 0x915c, 0xffffffff, 0x00010000,
322 : 0x9160, 0xffffffff, 0x00030002,
323 : 0x9178, 0xffffffff, 0x00070000,
324 : 0x917c, 0xffffffff, 0x00030002,
325 : 0x9180, 0xffffffff, 0x00050004,
326 : 0x918c, 0xffffffff, 0x00010006,
327 : 0x9190, 0xffffffff, 0x00090008,
328 : 0x9194, 0xffffffff, 0x00070000,
329 : 0x9198, 0xffffffff, 0x00030002,
330 : 0x919c, 0xffffffff, 0x00050004,
331 : 0x91a8, 0xffffffff, 0x00010006,
332 : 0x91ac, 0xffffffff, 0x00090008,
333 : 0x91b0, 0xffffffff, 0x00070000,
334 : 0x91b4, 0xffffffff, 0x00030002,
335 : 0x91b8, 0xffffffff, 0x00050004,
336 : 0x91c4, 0xffffffff, 0x00010006,
337 : 0x91c8, 0xffffffff, 0x00090008,
338 : 0x91cc, 0xffffffff, 0x00070000,
339 : 0x91d0, 0xffffffff, 0x00030002,
340 : 0x91d4, 0xffffffff, 0x00050004,
341 : 0x91e0, 0xffffffff, 0x00010006,
342 : 0x91e4, 0xffffffff, 0x00090008,
343 : 0x91e8, 0xffffffff, 0x00000000,
344 : 0x91ec, 0xffffffff, 0x00070000,
345 : 0x91f0, 0xffffffff, 0x00030002,
346 : 0x91f4, 0xffffffff, 0x00050004,
347 : 0x9200, 0xffffffff, 0x00010006,
348 : 0x9204, 0xffffffff, 0x00090008,
349 : 0x9208, 0xffffffff, 0x00070000,
350 : 0x920c, 0xffffffff, 0x00030002,
351 : 0x9210, 0xffffffff, 0x00050004,
352 : 0x921c, 0xffffffff, 0x00010006,
353 : 0x9220, 0xffffffff, 0x00090008,
354 : 0x9224, 0xffffffff, 0x00070000,
355 : 0x9228, 0xffffffff, 0x00030002,
356 : 0x922c, 0xffffffff, 0x00050004,
357 : 0x9238, 0xffffffff, 0x00010006,
358 : 0x923c, 0xffffffff, 0x00090008,
359 : 0x9240, 0xffffffff, 0x00070000,
360 : 0x9244, 0xffffffff, 0x00030002,
361 : 0x9248, 0xffffffff, 0x00050004,
362 : 0x9254, 0xffffffff, 0x00010006,
363 : 0x9258, 0xffffffff, 0x00090008,
364 : 0x925c, 0xffffffff, 0x00070000,
365 : 0x9260, 0xffffffff, 0x00030002,
366 : 0x9264, 0xffffffff, 0x00050004,
367 : 0x9270, 0xffffffff, 0x00010006,
368 : 0x9274, 0xffffffff, 0x00090008,
369 : 0x9278, 0xffffffff, 0x00070000,
370 : 0x927c, 0xffffffff, 0x00030002,
371 : 0x9280, 0xffffffff, 0x00050004,
372 : 0x928c, 0xffffffff, 0x00010006,
373 : 0x9290, 0xffffffff, 0x00090008,
374 : 0x9294, 0xffffffff, 0x00000000,
375 : 0x929c, 0xffffffff, 0x00000001,
376 : 0x802c, 0xffffffff, 0x40010000,
377 : 0x915c, 0xffffffff, 0x00010000,
378 : 0x9160, 0xffffffff, 0x00030002,
379 : 0x9178, 0xffffffff, 0x00070000,
380 : 0x917c, 0xffffffff, 0x00030002,
381 : 0x9180, 0xffffffff, 0x00050004,
382 : 0x918c, 0xffffffff, 0x00010006,
383 : 0x9190, 0xffffffff, 0x00090008,
384 : 0x9194, 0xffffffff, 0x00070000,
385 : 0x9198, 0xffffffff, 0x00030002,
386 : 0x919c, 0xffffffff, 0x00050004,
387 : 0x91a8, 0xffffffff, 0x00010006,
388 : 0x91ac, 0xffffffff, 0x00090008,
389 : 0x91b0, 0xffffffff, 0x00070000,
390 : 0x91b4, 0xffffffff, 0x00030002,
391 : 0x91b8, 0xffffffff, 0x00050004,
392 : 0x91c4, 0xffffffff, 0x00010006,
393 : 0x91c8, 0xffffffff, 0x00090008,
394 : 0x91cc, 0xffffffff, 0x00070000,
395 : 0x91d0, 0xffffffff, 0x00030002,
396 : 0x91d4, 0xffffffff, 0x00050004,
397 : 0x91e0, 0xffffffff, 0x00010006,
398 : 0x91e4, 0xffffffff, 0x00090008,
399 : 0x91e8, 0xffffffff, 0x00000000,
400 : 0x91ec, 0xffffffff, 0x00070000,
401 : 0x91f0, 0xffffffff, 0x00030002,
402 : 0x91f4, 0xffffffff, 0x00050004,
403 : 0x9200, 0xffffffff, 0x00010006,
404 : 0x9204, 0xffffffff, 0x00090008,
405 : 0x9208, 0xffffffff, 0x00070000,
406 : 0x920c, 0xffffffff, 0x00030002,
407 : 0x9210, 0xffffffff, 0x00050004,
408 : 0x921c, 0xffffffff, 0x00010006,
409 : 0x9220, 0xffffffff, 0x00090008,
410 : 0x9224, 0xffffffff, 0x00070000,
411 : 0x9228, 0xffffffff, 0x00030002,
412 : 0x922c, 0xffffffff, 0x00050004,
413 : 0x9238, 0xffffffff, 0x00010006,
414 : 0x923c, 0xffffffff, 0x00090008,
415 : 0x9240, 0xffffffff, 0x00070000,
416 : 0x9244, 0xffffffff, 0x00030002,
417 : 0x9248, 0xffffffff, 0x00050004,
418 : 0x9254, 0xffffffff, 0x00010006,
419 : 0x9258, 0xffffffff, 0x00090008,
420 : 0x925c, 0xffffffff, 0x00070000,
421 : 0x9260, 0xffffffff, 0x00030002,
422 : 0x9264, 0xffffffff, 0x00050004,
423 : 0x9270, 0xffffffff, 0x00010006,
424 : 0x9274, 0xffffffff, 0x00090008,
425 : 0x9278, 0xffffffff, 0x00070000,
426 : 0x927c, 0xffffffff, 0x00030002,
427 : 0x9280, 0xffffffff, 0x00050004,
428 : 0x928c, 0xffffffff, 0x00010006,
429 : 0x9290, 0xffffffff, 0x00090008,
430 : 0x9294, 0xffffffff, 0x00000000,
431 : 0x929c, 0xffffffff, 0x00000001,
432 : 0x802c, 0xffffffff, 0xc0000000
433 : };
434 :
435 : static const u32 redwood_mgcg_init[] =
436 : {
437 : 0x802c, 0xffffffff, 0xc0000000,
438 : 0x5448, 0xffffffff, 0x00000100,
439 : 0x55e4, 0xffffffff, 0x00000100,
440 : 0x160c, 0xffffffff, 0x00000100,
441 : 0x5644, 0xffffffff, 0x00000100,
442 : 0xc164, 0xffffffff, 0x00000100,
443 : 0x8a18, 0xffffffff, 0x00000100,
444 : 0x897c, 0xffffffff, 0x06000100,
445 : 0x8b28, 0xffffffff, 0x00000100,
446 : 0x9144, 0xffffffff, 0x00000100,
447 : 0x9a60, 0xffffffff, 0x00000100,
448 : 0x9868, 0xffffffff, 0x00000100,
449 : 0x8d58, 0xffffffff, 0x00000100,
450 : 0x9510, 0xffffffff, 0x00000100,
451 : 0x949c, 0xffffffff, 0x00000100,
452 : 0x9654, 0xffffffff, 0x00000100,
453 : 0x9030, 0xffffffff, 0x00000100,
454 : 0x9034, 0xffffffff, 0x00000100,
455 : 0x9038, 0xffffffff, 0x00000100,
456 : 0x903c, 0xffffffff, 0x00000100,
457 : 0x9040, 0xffffffff, 0x00000100,
458 : 0xa200, 0xffffffff, 0x00000100,
459 : 0xa204, 0xffffffff, 0x00000100,
460 : 0xa208, 0xffffffff, 0x00000100,
461 : 0xa20c, 0xffffffff, 0x00000100,
462 : 0x971c, 0xffffffff, 0x00000100,
463 : 0x977c, 0xffffffff, 0x00000100,
464 : 0x3f80, 0xffffffff, 0x00000100,
465 : 0xa210, 0xffffffff, 0x00000100,
466 : 0xa214, 0xffffffff, 0x00000100,
467 : 0x4d8, 0xffffffff, 0x00000100,
468 : 0x9784, 0xffffffff, 0x00000100,
469 : 0x9698, 0xffffffff, 0x00000100,
470 : 0x4d4, 0xffffffff, 0x00000200,
471 : 0x30cc, 0xffffffff, 0x00000100,
472 : 0xd0c0, 0xffffffff, 0xff000100,
473 : 0x802c, 0xffffffff, 0x40000000,
474 : 0x915c, 0xffffffff, 0x00010000,
475 : 0x9160, 0xffffffff, 0x00030002,
476 : 0x9178, 0xffffffff, 0x00070000,
477 : 0x917c, 0xffffffff, 0x00030002,
478 : 0x9180, 0xffffffff, 0x00050004,
479 : 0x918c, 0xffffffff, 0x00010006,
480 : 0x9190, 0xffffffff, 0x00090008,
481 : 0x9194, 0xffffffff, 0x00070000,
482 : 0x9198, 0xffffffff, 0x00030002,
483 : 0x919c, 0xffffffff, 0x00050004,
484 : 0x91a8, 0xffffffff, 0x00010006,
485 : 0x91ac, 0xffffffff, 0x00090008,
486 : 0x91b0, 0xffffffff, 0x00070000,
487 : 0x91b4, 0xffffffff, 0x00030002,
488 : 0x91b8, 0xffffffff, 0x00050004,
489 : 0x91c4, 0xffffffff, 0x00010006,
490 : 0x91c8, 0xffffffff, 0x00090008,
491 : 0x91cc, 0xffffffff, 0x00070000,
492 : 0x91d0, 0xffffffff, 0x00030002,
493 : 0x91d4, 0xffffffff, 0x00050004,
494 : 0x91e0, 0xffffffff, 0x00010006,
495 : 0x91e4, 0xffffffff, 0x00090008,
496 : 0x91e8, 0xffffffff, 0x00000000,
497 : 0x91ec, 0xffffffff, 0x00070000,
498 : 0x91f0, 0xffffffff, 0x00030002,
499 : 0x91f4, 0xffffffff, 0x00050004,
500 : 0x9200, 0xffffffff, 0x00010006,
501 : 0x9204, 0xffffffff, 0x00090008,
502 : 0x9294, 0xffffffff, 0x00000000,
503 : 0x929c, 0xffffffff, 0x00000001,
504 : 0x802c, 0xffffffff, 0xc0000000
505 : };
506 :
507 : static const u32 cedar_golden_registers[] =
508 : {
509 : 0x3f90, 0xffff0000, 0xff000000,
510 : 0x9148, 0xffff0000, 0xff000000,
511 : 0x3f94, 0xffff0000, 0xff000000,
512 : 0x914c, 0xffff0000, 0xff000000,
513 : 0x9b7c, 0xffffffff, 0x00000000,
514 : 0x8a14, 0xffffffff, 0x00000007,
515 : 0x8b10, 0xffffffff, 0x00000000,
516 : 0x960c, 0xffffffff, 0x54763210,
517 : 0x88c4, 0xffffffff, 0x000000c2,
518 : 0x88d4, 0xffffffff, 0x00000000,
519 : 0x8974, 0xffffffff, 0x00000000,
520 : 0xc78, 0x00000080, 0x00000080,
521 : 0x5eb4, 0xffffffff, 0x00000002,
522 : 0x5e78, 0xffffffff, 0x001000f0,
523 : 0x6104, 0x01000300, 0x00000000,
524 : 0x5bc0, 0x00300000, 0x00000000,
525 : 0x7030, 0xffffffff, 0x00000011,
526 : 0x7c30, 0xffffffff, 0x00000011,
527 : 0x10830, 0xffffffff, 0x00000011,
528 : 0x11430, 0xffffffff, 0x00000011,
529 : 0xd02c, 0xffffffff, 0x08421000,
530 : 0x240c, 0xffffffff, 0x00000380,
531 : 0x8b24, 0xffffffff, 0x00ff0fff,
532 : 0x28a4c, 0x06000000, 0x06000000,
533 : 0x10c, 0x00000001, 0x00000001,
534 : 0x8d00, 0xffffffff, 0x100e4848,
535 : 0x8d04, 0xffffffff, 0x00164745,
536 : 0x8c00, 0xffffffff, 0xe4000003,
537 : 0x8c04, 0xffffffff, 0x40600060,
538 : 0x8c08, 0xffffffff, 0x001c001c,
539 : 0x8cf0, 0xffffffff, 0x08e00410,
540 : 0x8c20, 0xffffffff, 0x00800080,
541 : 0x8c24, 0xffffffff, 0x00800080,
542 : 0x8c18, 0xffffffff, 0x20202078,
543 : 0x8c1c, 0xffffffff, 0x00001010,
544 : 0x28350, 0xffffffff, 0x00000000,
545 : 0xa008, 0xffffffff, 0x00010000,
546 : 0x5c4, 0xffffffff, 0x00000001,
547 : 0x9508, 0xffffffff, 0x00000002
548 : };
549 :
550 : static const u32 cedar_mgcg_init[] =
551 : {
552 : 0x802c, 0xffffffff, 0xc0000000,
553 : 0x5448, 0xffffffff, 0x00000100,
554 : 0x55e4, 0xffffffff, 0x00000100,
555 : 0x160c, 0xffffffff, 0x00000100,
556 : 0x5644, 0xffffffff, 0x00000100,
557 : 0xc164, 0xffffffff, 0x00000100,
558 : 0x8a18, 0xffffffff, 0x00000100,
559 : 0x897c, 0xffffffff, 0x06000100,
560 : 0x8b28, 0xffffffff, 0x00000100,
561 : 0x9144, 0xffffffff, 0x00000100,
562 : 0x9a60, 0xffffffff, 0x00000100,
563 : 0x9868, 0xffffffff, 0x00000100,
564 : 0x8d58, 0xffffffff, 0x00000100,
565 : 0x9510, 0xffffffff, 0x00000100,
566 : 0x949c, 0xffffffff, 0x00000100,
567 : 0x9654, 0xffffffff, 0x00000100,
568 : 0x9030, 0xffffffff, 0x00000100,
569 : 0x9034, 0xffffffff, 0x00000100,
570 : 0x9038, 0xffffffff, 0x00000100,
571 : 0x903c, 0xffffffff, 0x00000100,
572 : 0x9040, 0xffffffff, 0x00000100,
573 : 0xa200, 0xffffffff, 0x00000100,
574 : 0xa204, 0xffffffff, 0x00000100,
575 : 0xa208, 0xffffffff, 0x00000100,
576 : 0xa20c, 0xffffffff, 0x00000100,
577 : 0x971c, 0xffffffff, 0x00000100,
578 : 0x977c, 0xffffffff, 0x00000100,
579 : 0x3f80, 0xffffffff, 0x00000100,
580 : 0xa210, 0xffffffff, 0x00000100,
581 : 0xa214, 0xffffffff, 0x00000100,
582 : 0x4d8, 0xffffffff, 0x00000100,
583 : 0x9784, 0xffffffff, 0x00000100,
584 : 0x9698, 0xffffffff, 0x00000100,
585 : 0x4d4, 0xffffffff, 0x00000200,
586 : 0x30cc, 0xffffffff, 0x00000100,
587 : 0xd0c0, 0xffffffff, 0xff000100,
588 : 0x802c, 0xffffffff, 0x40000000,
589 : 0x915c, 0xffffffff, 0x00010000,
590 : 0x9178, 0xffffffff, 0x00050000,
591 : 0x917c, 0xffffffff, 0x00030002,
592 : 0x918c, 0xffffffff, 0x00010004,
593 : 0x9190, 0xffffffff, 0x00070006,
594 : 0x9194, 0xffffffff, 0x00050000,
595 : 0x9198, 0xffffffff, 0x00030002,
596 : 0x91a8, 0xffffffff, 0x00010004,
597 : 0x91ac, 0xffffffff, 0x00070006,
598 : 0x91e8, 0xffffffff, 0x00000000,
599 : 0x9294, 0xffffffff, 0x00000000,
600 : 0x929c, 0xffffffff, 0x00000001,
601 : 0x802c, 0xffffffff, 0xc0000000
602 : };
603 :
604 : static const u32 juniper_mgcg_init[] =
605 : {
606 : 0x802c, 0xffffffff, 0xc0000000,
607 : 0x5448, 0xffffffff, 0x00000100,
608 : 0x55e4, 0xffffffff, 0x00000100,
609 : 0x160c, 0xffffffff, 0x00000100,
610 : 0x5644, 0xffffffff, 0x00000100,
611 : 0xc164, 0xffffffff, 0x00000100,
612 : 0x8a18, 0xffffffff, 0x00000100,
613 : 0x897c, 0xffffffff, 0x06000100,
614 : 0x8b28, 0xffffffff, 0x00000100,
615 : 0x9144, 0xffffffff, 0x00000100,
616 : 0x9a60, 0xffffffff, 0x00000100,
617 : 0x9868, 0xffffffff, 0x00000100,
618 : 0x8d58, 0xffffffff, 0x00000100,
619 : 0x9510, 0xffffffff, 0x00000100,
620 : 0x949c, 0xffffffff, 0x00000100,
621 : 0x9654, 0xffffffff, 0x00000100,
622 : 0x9030, 0xffffffff, 0x00000100,
623 : 0x9034, 0xffffffff, 0x00000100,
624 : 0x9038, 0xffffffff, 0x00000100,
625 : 0x903c, 0xffffffff, 0x00000100,
626 : 0x9040, 0xffffffff, 0x00000100,
627 : 0xa200, 0xffffffff, 0x00000100,
628 : 0xa204, 0xffffffff, 0x00000100,
629 : 0xa208, 0xffffffff, 0x00000100,
630 : 0xa20c, 0xffffffff, 0x00000100,
631 : 0x971c, 0xffffffff, 0x00000100,
632 : 0xd0c0, 0xffffffff, 0xff000100,
633 : 0x802c, 0xffffffff, 0x40000000,
634 : 0x915c, 0xffffffff, 0x00010000,
635 : 0x9160, 0xffffffff, 0x00030002,
636 : 0x9178, 0xffffffff, 0x00070000,
637 : 0x917c, 0xffffffff, 0x00030002,
638 : 0x9180, 0xffffffff, 0x00050004,
639 : 0x918c, 0xffffffff, 0x00010006,
640 : 0x9190, 0xffffffff, 0x00090008,
641 : 0x9194, 0xffffffff, 0x00070000,
642 : 0x9198, 0xffffffff, 0x00030002,
643 : 0x919c, 0xffffffff, 0x00050004,
644 : 0x91a8, 0xffffffff, 0x00010006,
645 : 0x91ac, 0xffffffff, 0x00090008,
646 : 0x91b0, 0xffffffff, 0x00070000,
647 : 0x91b4, 0xffffffff, 0x00030002,
648 : 0x91b8, 0xffffffff, 0x00050004,
649 : 0x91c4, 0xffffffff, 0x00010006,
650 : 0x91c8, 0xffffffff, 0x00090008,
651 : 0x91cc, 0xffffffff, 0x00070000,
652 : 0x91d0, 0xffffffff, 0x00030002,
653 : 0x91d4, 0xffffffff, 0x00050004,
654 : 0x91e0, 0xffffffff, 0x00010006,
655 : 0x91e4, 0xffffffff, 0x00090008,
656 : 0x91e8, 0xffffffff, 0x00000000,
657 : 0x91ec, 0xffffffff, 0x00070000,
658 : 0x91f0, 0xffffffff, 0x00030002,
659 : 0x91f4, 0xffffffff, 0x00050004,
660 : 0x9200, 0xffffffff, 0x00010006,
661 : 0x9204, 0xffffffff, 0x00090008,
662 : 0x9208, 0xffffffff, 0x00070000,
663 : 0x920c, 0xffffffff, 0x00030002,
664 : 0x9210, 0xffffffff, 0x00050004,
665 : 0x921c, 0xffffffff, 0x00010006,
666 : 0x9220, 0xffffffff, 0x00090008,
667 : 0x9224, 0xffffffff, 0x00070000,
668 : 0x9228, 0xffffffff, 0x00030002,
669 : 0x922c, 0xffffffff, 0x00050004,
670 : 0x9238, 0xffffffff, 0x00010006,
671 : 0x923c, 0xffffffff, 0x00090008,
672 : 0x9240, 0xffffffff, 0x00070000,
673 : 0x9244, 0xffffffff, 0x00030002,
674 : 0x9248, 0xffffffff, 0x00050004,
675 : 0x9254, 0xffffffff, 0x00010006,
676 : 0x9258, 0xffffffff, 0x00090008,
677 : 0x925c, 0xffffffff, 0x00070000,
678 : 0x9260, 0xffffffff, 0x00030002,
679 : 0x9264, 0xffffffff, 0x00050004,
680 : 0x9270, 0xffffffff, 0x00010006,
681 : 0x9274, 0xffffffff, 0x00090008,
682 : 0x9278, 0xffffffff, 0x00070000,
683 : 0x927c, 0xffffffff, 0x00030002,
684 : 0x9280, 0xffffffff, 0x00050004,
685 : 0x928c, 0xffffffff, 0x00010006,
686 : 0x9290, 0xffffffff, 0x00090008,
687 : 0x9294, 0xffffffff, 0x00000000,
688 : 0x929c, 0xffffffff, 0x00000001,
689 : 0x802c, 0xffffffff, 0xc0000000,
690 : 0x977c, 0xffffffff, 0x00000100,
691 : 0x3f80, 0xffffffff, 0x00000100,
692 : 0xa210, 0xffffffff, 0x00000100,
693 : 0xa214, 0xffffffff, 0x00000100,
694 : 0x4d8, 0xffffffff, 0x00000100,
695 : 0x9784, 0xffffffff, 0x00000100,
696 : 0x9698, 0xffffffff, 0x00000100,
697 : 0x4d4, 0xffffffff, 0x00000200,
698 : 0x30cc, 0xffffffff, 0x00000100,
699 : 0x802c, 0xffffffff, 0xc0000000
700 : };
701 :
702 : static const u32 supersumo_golden_registers[] =
703 : {
704 : 0x5eb4, 0xffffffff, 0x00000002,
705 : 0x5c4, 0xffffffff, 0x00000001,
706 : 0x7030, 0xffffffff, 0x00000011,
707 : 0x7c30, 0xffffffff, 0x00000011,
708 : 0x6104, 0x01000300, 0x00000000,
709 : 0x5bc0, 0x00300000, 0x00000000,
710 : 0x8c04, 0xffffffff, 0x40600060,
711 : 0x8c08, 0xffffffff, 0x001c001c,
712 : 0x8c20, 0xffffffff, 0x00800080,
713 : 0x8c24, 0xffffffff, 0x00800080,
714 : 0x8c18, 0xffffffff, 0x20202078,
715 : 0x8c1c, 0xffffffff, 0x00001010,
716 : 0x918c, 0xffffffff, 0x00010006,
717 : 0x91a8, 0xffffffff, 0x00010006,
718 : 0x91c4, 0xffffffff, 0x00010006,
719 : 0x91e0, 0xffffffff, 0x00010006,
720 : 0x9200, 0xffffffff, 0x00010006,
721 : 0x9150, 0xffffffff, 0x6e944040,
722 : 0x917c, 0xffffffff, 0x00030002,
723 : 0x9180, 0xffffffff, 0x00050004,
724 : 0x9198, 0xffffffff, 0x00030002,
725 : 0x919c, 0xffffffff, 0x00050004,
726 : 0x91b4, 0xffffffff, 0x00030002,
727 : 0x91b8, 0xffffffff, 0x00050004,
728 : 0x91d0, 0xffffffff, 0x00030002,
729 : 0x91d4, 0xffffffff, 0x00050004,
730 : 0x91f0, 0xffffffff, 0x00030002,
731 : 0x91f4, 0xffffffff, 0x00050004,
732 : 0x915c, 0xffffffff, 0x00010000,
733 : 0x9160, 0xffffffff, 0x00030002,
734 : 0x3f90, 0xffff0000, 0xff000000,
735 : 0x9178, 0xffffffff, 0x00070000,
736 : 0x9194, 0xffffffff, 0x00070000,
737 : 0x91b0, 0xffffffff, 0x00070000,
738 : 0x91cc, 0xffffffff, 0x00070000,
739 : 0x91ec, 0xffffffff, 0x00070000,
740 : 0x9148, 0xffff0000, 0xff000000,
741 : 0x9190, 0xffffffff, 0x00090008,
742 : 0x91ac, 0xffffffff, 0x00090008,
743 : 0x91c8, 0xffffffff, 0x00090008,
744 : 0x91e4, 0xffffffff, 0x00090008,
745 : 0x9204, 0xffffffff, 0x00090008,
746 : 0x3f94, 0xffff0000, 0xff000000,
747 : 0x914c, 0xffff0000, 0xff000000,
748 : 0x929c, 0xffffffff, 0x00000001,
749 : 0x8a18, 0xffffffff, 0x00000100,
750 : 0x8b28, 0xffffffff, 0x00000100,
751 : 0x9144, 0xffffffff, 0x00000100,
752 : 0x5644, 0xffffffff, 0x00000100,
753 : 0x9b7c, 0xffffffff, 0x00000000,
754 : 0x8030, 0xffffffff, 0x0000100a,
755 : 0x8a14, 0xffffffff, 0x00000007,
756 : 0x8b24, 0xffffffff, 0x00ff0fff,
757 : 0x8b10, 0xffffffff, 0x00000000,
758 : 0x28a4c, 0x06000000, 0x06000000,
759 : 0x4d8, 0xffffffff, 0x00000100,
760 : 0x913c, 0xffff000f, 0x0100000a,
761 : 0x960c, 0xffffffff, 0x54763210,
762 : 0x88c4, 0xffffffff, 0x000000c2,
763 : 0x88d4, 0xffffffff, 0x00000010,
764 : 0x8974, 0xffffffff, 0x00000000,
765 : 0xc78, 0x00000080, 0x00000080,
766 : 0x5e78, 0xffffffff, 0x001000f0,
767 : 0xd02c, 0xffffffff, 0x08421000,
768 : 0xa008, 0xffffffff, 0x00010000,
769 : 0x8d00, 0xffffffff, 0x100e4848,
770 : 0x8d04, 0xffffffff, 0x00164745,
771 : 0x8c00, 0xffffffff, 0xe4000003,
772 : 0x8cf0, 0x1fffffff, 0x08e00620,
773 : 0x28350, 0xffffffff, 0x00000000,
774 : 0x9508, 0xffffffff, 0x00000002
775 : };
776 :
777 : static const u32 sumo_golden_registers[] =
778 : {
779 : 0x900c, 0x00ffffff, 0x0017071f,
780 : 0x8c18, 0xffffffff, 0x10101060,
781 : 0x8c1c, 0xffffffff, 0x00001010,
782 : 0x8c30, 0x0000000f, 0x00000005,
783 : 0x9688, 0x0000000f, 0x00000007
784 : };
785 :
786 : static const u32 wrestler_golden_registers[] =
787 : {
788 : 0x5eb4, 0xffffffff, 0x00000002,
789 : 0x5c4, 0xffffffff, 0x00000001,
790 : 0x7030, 0xffffffff, 0x00000011,
791 : 0x7c30, 0xffffffff, 0x00000011,
792 : 0x6104, 0x01000300, 0x00000000,
793 : 0x5bc0, 0x00300000, 0x00000000,
794 : 0x918c, 0xffffffff, 0x00010006,
795 : 0x91a8, 0xffffffff, 0x00010006,
796 : 0x9150, 0xffffffff, 0x6e944040,
797 : 0x917c, 0xffffffff, 0x00030002,
798 : 0x9198, 0xffffffff, 0x00030002,
799 : 0x915c, 0xffffffff, 0x00010000,
800 : 0x3f90, 0xffff0000, 0xff000000,
801 : 0x9178, 0xffffffff, 0x00070000,
802 : 0x9194, 0xffffffff, 0x00070000,
803 : 0x9148, 0xffff0000, 0xff000000,
804 : 0x9190, 0xffffffff, 0x00090008,
805 : 0x91ac, 0xffffffff, 0x00090008,
806 : 0x3f94, 0xffff0000, 0xff000000,
807 : 0x914c, 0xffff0000, 0xff000000,
808 : 0x929c, 0xffffffff, 0x00000001,
809 : 0x8a18, 0xffffffff, 0x00000100,
810 : 0x8b28, 0xffffffff, 0x00000100,
811 : 0x9144, 0xffffffff, 0x00000100,
812 : 0x9b7c, 0xffffffff, 0x00000000,
813 : 0x8030, 0xffffffff, 0x0000100a,
814 : 0x8a14, 0xffffffff, 0x00000001,
815 : 0x8b24, 0xffffffff, 0x00ff0fff,
816 : 0x8b10, 0xffffffff, 0x00000000,
817 : 0x28a4c, 0x06000000, 0x06000000,
818 : 0x4d8, 0xffffffff, 0x00000100,
819 : 0x913c, 0xffff000f, 0x0100000a,
820 : 0x960c, 0xffffffff, 0x54763210,
821 : 0x88c4, 0xffffffff, 0x000000c2,
822 : 0x88d4, 0xffffffff, 0x00000010,
823 : 0x8974, 0xffffffff, 0x00000000,
824 : 0xc78, 0x00000080, 0x00000080,
825 : 0x5e78, 0xffffffff, 0x001000f0,
826 : 0xd02c, 0xffffffff, 0x08421000,
827 : 0xa008, 0xffffffff, 0x00010000,
828 : 0x8d00, 0xffffffff, 0x100e4848,
829 : 0x8d04, 0xffffffff, 0x00164745,
830 : 0x8c00, 0xffffffff, 0xe4000003,
831 : 0x8cf0, 0x1fffffff, 0x08e00410,
832 : 0x28350, 0xffffffff, 0x00000000,
833 : 0x9508, 0xffffffff, 0x00000002,
834 : 0x900c, 0xffffffff, 0x0017071f,
835 : 0x8c18, 0xffffffff, 0x10101060,
836 : 0x8c1c, 0xffffffff, 0x00001010
837 : };
838 :
839 : static const u32 barts_golden_registers[] =
840 : {
841 : 0x5eb4, 0xffffffff, 0x00000002,
842 : 0x5e78, 0x8f311ff1, 0x001000f0,
843 : 0x3f90, 0xffff0000, 0xff000000,
844 : 0x9148, 0xffff0000, 0xff000000,
845 : 0x3f94, 0xffff0000, 0xff000000,
846 : 0x914c, 0xffff0000, 0xff000000,
847 : 0xc78, 0x00000080, 0x00000080,
848 : 0xbd4, 0x70073777, 0x00010001,
849 : 0xd02c, 0xbfffff1f, 0x08421000,
850 : 0xd0b8, 0x03773777, 0x02011003,
851 : 0x5bc0, 0x00200000, 0x50100000,
852 : 0x98f8, 0x33773777, 0x02011003,
853 : 0x98fc, 0xffffffff, 0x76543210,
854 : 0x7030, 0x31000311, 0x00000011,
855 : 0x2f48, 0x00000007, 0x02011003,
856 : 0x6b28, 0x00000010, 0x00000012,
857 : 0x7728, 0x00000010, 0x00000012,
858 : 0x10328, 0x00000010, 0x00000012,
859 : 0x10f28, 0x00000010, 0x00000012,
860 : 0x11b28, 0x00000010, 0x00000012,
861 : 0x12728, 0x00000010, 0x00000012,
862 : 0x240c, 0x000007ff, 0x00000380,
863 : 0x8a14, 0xf000001f, 0x00000007,
864 : 0x8b24, 0x3fff3fff, 0x00ff0fff,
865 : 0x8b10, 0x0000ff0f, 0x00000000,
866 : 0x28a4c, 0x07ffffff, 0x06000000,
867 : 0x10c, 0x00000001, 0x00010003,
868 : 0xa02c, 0xffffffff, 0x0000009b,
869 : 0x913c, 0x0000000f, 0x0100000a,
870 : 0x8d00, 0xffff7f7f, 0x100e4848,
871 : 0x8d04, 0x00ffffff, 0x00164745,
872 : 0x8c00, 0xfffc0003, 0xe4000003,
873 : 0x8c04, 0xf8ff00ff, 0x40600060,
874 : 0x8c08, 0x00ff00ff, 0x001c001c,
875 : 0x8cf0, 0x1fff1fff, 0x08e00620,
876 : 0x8c20, 0x0fff0fff, 0x00800080,
877 : 0x8c24, 0x0fff0fff, 0x00800080,
878 : 0x8c18, 0xffffffff, 0x20202078,
879 : 0x8c1c, 0x0000ffff, 0x00001010,
880 : 0x28350, 0x00000f01, 0x00000000,
881 : 0x9508, 0x3700001f, 0x00000002,
882 : 0x960c, 0xffffffff, 0x54763210,
883 : 0x88c4, 0x001f3ae3, 0x000000c2,
884 : 0x88d4, 0x0000001f, 0x00000010,
885 : 0x8974, 0xffffffff, 0x00000000
886 : };
887 :
888 : static const u32 turks_golden_registers[] =
889 : {
890 : 0x5eb4, 0xffffffff, 0x00000002,
891 : 0x5e78, 0x8f311ff1, 0x001000f0,
892 : 0x8c8, 0x00003000, 0x00001070,
893 : 0x8cc, 0x000fffff, 0x00040035,
894 : 0x3f90, 0xffff0000, 0xfff00000,
895 : 0x9148, 0xffff0000, 0xfff00000,
896 : 0x3f94, 0xffff0000, 0xfff00000,
897 : 0x914c, 0xffff0000, 0xfff00000,
898 : 0xc78, 0x00000080, 0x00000080,
899 : 0xbd4, 0x00073007, 0x00010002,
900 : 0xd02c, 0xbfffff1f, 0x08421000,
901 : 0xd0b8, 0x03773777, 0x02010002,
902 : 0x5bc0, 0x00200000, 0x50100000,
903 : 0x98f8, 0x33773777, 0x00010002,
904 : 0x98fc, 0xffffffff, 0x33221100,
905 : 0x7030, 0x31000311, 0x00000011,
906 : 0x2f48, 0x33773777, 0x00010002,
907 : 0x6b28, 0x00000010, 0x00000012,
908 : 0x7728, 0x00000010, 0x00000012,
909 : 0x10328, 0x00000010, 0x00000012,
910 : 0x10f28, 0x00000010, 0x00000012,
911 : 0x11b28, 0x00000010, 0x00000012,
912 : 0x12728, 0x00000010, 0x00000012,
913 : 0x240c, 0x000007ff, 0x00000380,
914 : 0x8a14, 0xf000001f, 0x00000007,
915 : 0x8b24, 0x3fff3fff, 0x00ff0fff,
916 : 0x8b10, 0x0000ff0f, 0x00000000,
917 : 0x28a4c, 0x07ffffff, 0x06000000,
918 : 0x10c, 0x00000001, 0x00010003,
919 : 0xa02c, 0xffffffff, 0x0000009b,
920 : 0x913c, 0x0000000f, 0x0100000a,
921 : 0x8d00, 0xffff7f7f, 0x100e4848,
922 : 0x8d04, 0x00ffffff, 0x00164745,
923 : 0x8c00, 0xfffc0003, 0xe4000003,
924 : 0x8c04, 0xf8ff00ff, 0x40600060,
925 : 0x8c08, 0x00ff00ff, 0x001c001c,
926 : 0x8cf0, 0x1fff1fff, 0x08e00410,
927 : 0x8c20, 0x0fff0fff, 0x00800080,
928 : 0x8c24, 0x0fff0fff, 0x00800080,
929 : 0x8c18, 0xffffffff, 0x20202078,
930 : 0x8c1c, 0x0000ffff, 0x00001010,
931 : 0x28350, 0x00000f01, 0x00000000,
932 : 0x9508, 0x3700001f, 0x00000002,
933 : 0x960c, 0xffffffff, 0x54763210,
934 : 0x88c4, 0x001f3ae3, 0x000000c2,
935 : 0x88d4, 0x0000001f, 0x00000010,
936 : 0x8974, 0xffffffff, 0x00000000
937 : };
938 :
939 : static const u32 caicos_golden_registers[] =
940 : {
941 : 0x5eb4, 0xffffffff, 0x00000002,
942 : 0x5e78, 0x8f311ff1, 0x001000f0,
943 : 0x8c8, 0x00003420, 0x00001450,
944 : 0x8cc, 0x000fffff, 0x00040035,
945 : 0x3f90, 0xffff0000, 0xfffc0000,
946 : 0x9148, 0xffff0000, 0xfffc0000,
947 : 0x3f94, 0xffff0000, 0xfffc0000,
948 : 0x914c, 0xffff0000, 0xfffc0000,
949 : 0xc78, 0x00000080, 0x00000080,
950 : 0xbd4, 0x00073007, 0x00010001,
951 : 0xd02c, 0xbfffff1f, 0x08421000,
952 : 0xd0b8, 0x03773777, 0x02010001,
953 : 0x5bc0, 0x00200000, 0x50100000,
954 : 0x98f8, 0x33773777, 0x02010001,
955 : 0x98fc, 0xffffffff, 0x33221100,
956 : 0x7030, 0x31000311, 0x00000011,
957 : 0x2f48, 0x33773777, 0x02010001,
958 : 0x6b28, 0x00000010, 0x00000012,
959 : 0x7728, 0x00000010, 0x00000012,
960 : 0x10328, 0x00000010, 0x00000012,
961 : 0x10f28, 0x00000010, 0x00000012,
962 : 0x11b28, 0x00000010, 0x00000012,
963 : 0x12728, 0x00000010, 0x00000012,
964 : 0x240c, 0x000007ff, 0x00000380,
965 : 0x8a14, 0xf000001f, 0x00000001,
966 : 0x8b24, 0x3fff3fff, 0x00ff0fff,
967 : 0x8b10, 0x0000ff0f, 0x00000000,
968 : 0x28a4c, 0x07ffffff, 0x06000000,
969 : 0x10c, 0x00000001, 0x00010003,
970 : 0xa02c, 0xffffffff, 0x0000009b,
971 : 0x913c, 0x0000000f, 0x0100000a,
972 : 0x8d00, 0xffff7f7f, 0x100e4848,
973 : 0x8d04, 0x00ffffff, 0x00164745,
974 : 0x8c00, 0xfffc0003, 0xe4000003,
975 : 0x8c04, 0xf8ff00ff, 0x40600060,
976 : 0x8c08, 0x00ff00ff, 0x001c001c,
977 : 0x8cf0, 0x1fff1fff, 0x08e00410,
978 : 0x8c20, 0x0fff0fff, 0x00800080,
979 : 0x8c24, 0x0fff0fff, 0x00800080,
980 : 0x8c18, 0xffffffff, 0x20202078,
981 : 0x8c1c, 0x0000ffff, 0x00001010,
982 : 0x28350, 0x00000f01, 0x00000000,
983 : 0x9508, 0x3700001f, 0x00000002,
984 : 0x960c, 0xffffffff, 0x54763210,
985 : 0x88c4, 0x001f3ae3, 0x000000c2,
986 : 0x88d4, 0x0000001f, 0x00000010,
987 : 0x8974, 0xffffffff, 0x00000000
988 : };
989 :
990 0 : static void evergreen_init_golden_registers(struct radeon_device *rdev)
991 : {
992 0 : switch (rdev->family) {
993 : case CHIP_CYPRESS:
994 : case CHIP_HEMLOCK:
995 0 : radeon_program_register_sequence(rdev,
996 : evergreen_golden_registers,
997 : (const u32)ARRAY_SIZE(evergreen_golden_registers));
998 0 : radeon_program_register_sequence(rdev,
999 : evergreen_golden_registers2,
1000 : (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1001 0 : radeon_program_register_sequence(rdev,
1002 : cypress_mgcg_init,
1003 : (const u32)ARRAY_SIZE(cypress_mgcg_init));
1004 0 : break;
1005 : case CHIP_JUNIPER:
1006 0 : radeon_program_register_sequence(rdev,
1007 : evergreen_golden_registers,
1008 : (const u32)ARRAY_SIZE(evergreen_golden_registers));
1009 0 : radeon_program_register_sequence(rdev,
1010 : evergreen_golden_registers2,
1011 : (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1012 0 : radeon_program_register_sequence(rdev,
1013 : juniper_mgcg_init,
1014 : (const u32)ARRAY_SIZE(juniper_mgcg_init));
1015 0 : break;
1016 : case CHIP_REDWOOD:
1017 0 : radeon_program_register_sequence(rdev,
1018 : evergreen_golden_registers,
1019 : (const u32)ARRAY_SIZE(evergreen_golden_registers));
1020 0 : radeon_program_register_sequence(rdev,
1021 : evergreen_golden_registers2,
1022 : (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1023 0 : radeon_program_register_sequence(rdev,
1024 : redwood_mgcg_init,
1025 : (const u32)ARRAY_SIZE(redwood_mgcg_init));
1026 0 : break;
1027 : case CHIP_CEDAR:
1028 0 : radeon_program_register_sequence(rdev,
1029 : cedar_golden_registers,
1030 : (const u32)ARRAY_SIZE(cedar_golden_registers));
1031 0 : radeon_program_register_sequence(rdev,
1032 : evergreen_golden_registers2,
1033 : (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1034 0 : radeon_program_register_sequence(rdev,
1035 : cedar_mgcg_init,
1036 : (const u32)ARRAY_SIZE(cedar_mgcg_init));
1037 0 : break;
1038 : case CHIP_PALM:
1039 0 : radeon_program_register_sequence(rdev,
1040 : wrestler_golden_registers,
1041 : (const u32)ARRAY_SIZE(wrestler_golden_registers));
1042 0 : break;
1043 : case CHIP_SUMO:
1044 0 : radeon_program_register_sequence(rdev,
1045 : supersumo_golden_registers,
1046 : (const u32)ARRAY_SIZE(supersumo_golden_registers));
1047 0 : break;
1048 : case CHIP_SUMO2:
1049 0 : radeon_program_register_sequence(rdev,
1050 : supersumo_golden_registers,
1051 : (const u32)ARRAY_SIZE(supersumo_golden_registers));
1052 0 : radeon_program_register_sequence(rdev,
1053 : sumo_golden_registers,
1054 : (const u32)ARRAY_SIZE(sumo_golden_registers));
1055 0 : break;
1056 : case CHIP_BARTS:
1057 0 : radeon_program_register_sequence(rdev,
1058 : barts_golden_registers,
1059 : (const u32)ARRAY_SIZE(barts_golden_registers));
1060 0 : break;
1061 : case CHIP_TURKS:
1062 0 : radeon_program_register_sequence(rdev,
1063 : turks_golden_registers,
1064 : (const u32)ARRAY_SIZE(turks_golden_registers));
1065 0 : break;
1066 : case CHIP_CAICOS:
1067 0 : radeon_program_register_sequence(rdev,
1068 : caicos_golden_registers,
1069 : (const u32)ARRAY_SIZE(caicos_golden_registers));
1070 0 : break;
1071 : default:
1072 : break;
1073 : }
1074 0 : }
1075 :
1076 : /**
1077 : * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1078 : *
1079 : * @rdev: radeon_device pointer
1080 : * @reg: register offset in bytes
1081 : * @val: register value
1082 : *
1083 : * Returns 0 for success or -EINVAL for an invalid register
1084 : *
1085 : */
1086 0 : int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1087 : u32 reg, u32 *val)
1088 : {
1089 0 : switch (reg) {
1090 : case GRBM_STATUS:
1091 : case GRBM_STATUS_SE0:
1092 : case GRBM_STATUS_SE1:
1093 : case SRBM_STATUS:
1094 : case SRBM_STATUS2:
1095 : case DMA_STATUS_REG:
1096 : case UVD_STATUS:
1097 0 : *val = RREG32(reg);
1098 0 : return 0;
1099 : default:
1100 0 : return -EINVAL;
1101 : }
1102 0 : }
1103 :
1104 0 : void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1105 : unsigned *bankh, unsigned *mtaspect,
1106 : unsigned *tile_split)
1107 : {
1108 0 : *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1109 0 : *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1110 0 : *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1111 0 : *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1112 0 : switch (*bankw) {
1113 : default:
1114 0 : case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1115 0 : case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1116 0 : case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1117 0 : case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1118 : }
1119 0 : switch (*bankh) {
1120 : default:
1121 0 : case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1122 0 : case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1123 0 : case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1124 0 : case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1125 : }
1126 0 : switch (*mtaspect) {
1127 : default:
1128 0 : case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1129 0 : case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1130 0 : case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1131 0 : case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1132 : }
1133 0 : }
1134 :
1135 0 : static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1136 : u32 cntl_reg, u32 status_reg)
1137 : {
1138 : int r, i;
1139 0 : struct atom_clock_dividers dividers;
1140 :
1141 0 : r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1142 : clock, false, ÷rs);
1143 0 : if (r)
1144 0 : return r;
1145 :
1146 0 : WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1147 :
1148 0 : for (i = 0; i < 100; i++) {
1149 0 : if (RREG32(status_reg) & DCLK_STATUS)
1150 : break;
1151 0 : mdelay(10);
1152 : }
1153 0 : if (i == 100)
1154 0 : return -ETIMEDOUT;
1155 :
1156 0 : return 0;
1157 0 : }
1158 :
1159 0 : int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1160 : {
1161 : int r = 0;
1162 0 : u32 cg_scratch = RREG32(CG_SCRATCH1);
1163 :
1164 0 : r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1165 0 : if (r)
1166 : goto done;
1167 0 : cg_scratch &= 0xffff0000;
1168 0 : cg_scratch |= vclk / 100; /* Mhz */
1169 :
1170 0 : r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1171 0 : if (r)
1172 : goto done;
1173 0 : cg_scratch &= 0x0000ffff;
1174 0 : cg_scratch |= (dclk / 100) << 16; /* Mhz */
1175 :
1176 : done:
1177 0 : WREG32(CG_SCRATCH1, cg_scratch);
1178 :
1179 0 : return r;
1180 : }
1181 :
1182 0 : int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1183 : {
1184 : /* start off with something large */
1185 0 : unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1186 : int r;
1187 :
1188 : /* bypass vclk and dclk with bclk */
1189 0 : WREG32_P(CG_UPLL_FUNC_CNTL_2,
1190 : VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1191 : ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1192 :
1193 : /* put PLL in bypass mode */
1194 0 : WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1195 :
1196 0 : if (!vclk || !dclk) {
1197 : /* keep the Bypass mode, put PLL to sleep */
1198 0 : WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1199 0 : return 0;
1200 : }
1201 :
1202 0 : r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1203 : 16384, 0x03FFFFFF, 0, 128, 5,
1204 : &fb_div, &vclk_div, &dclk_div);
1205 0 : if (r)
1206 0 : return r;
1207 :
1208 : /* set VCO_MODE to 1 */
1209 0 : WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1210 :
1211 : /* toggle UPLL_SLEEP to 1 then back to 0 */
1212 0 : WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1213 0 : WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1214 :
1215 : /* deassert UPLL_RESET */
1216 0 : WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1217 :
1218 0 : mdelay(1);
1219 :
1220 0 : r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1221 0 : if (r)
1222 0 : return r;
1223 :
1224 : /* assert UPLL_RESET again */
1225 0 : WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1226 :
1227 : /* disable spread spectrum. */
1228 0 : WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1229 :
1230 : /* set feedback divider */
1231 0 : WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1232 :
1233 : /* set ref divider to 0 */
1234 0 : WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1235 :
1236 0 : if (fb_div < 307200)
1237 0 : WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1238 : else
1239 0 : WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1240 :
1241 : /* set PDIV_A and PDIV_B */
1242 0 : WREG32_P(CG_UPLL_FUNC_CNTL_2,
1243 : UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1244 : ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1245 :
1246 : /* give the PLL some time to settle */
1247 0 : mdelay(15);
1248 :
1249 : /* deassert PLL_RESET */
1250 0 : WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1251 :
1252 0 : mdelay(15);
1253 :
1254 : /* switch from bypass mode to normal mode */
1255 0 : WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1256 :
1257 0 : r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1258 0 : if (r)
1259 0 : return r;
1260 :
1261 : /* switch VCLK and DCLK selection */
1262 0 : WREG32_P(CG_UPLL_FUNC_CNTL_2,
1263 : VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1264 : ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1265 :
1266 0 : mdelay(100);
1267 :
1268 0 : return 0;
1269 0 : }
1270 :
1271 : #ifdef __linux__
1272 : void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1273 : {
1274 : int readrq;
1275 : u16 v;
1276 :
1277 : readrq = pcie_get_readrq(rdev->pdev);
1278 : v = ffs(readrq) - 8;
1279 : /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1280 : * to avoid hangs or perfomance issues
1281 : */
1282 : if ((v == 0) || (v == 6) || (v == 7))
1283 : pcie_set_readrq(rdev->pdev, 512);
1284 : }
1285 : #else
1286 0 : void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1287 : {
1288 0 : pcireg_t ctl, v;
1289 0 : int off;
1290 :
1291 0 : if (pci_get_capability(rdev->pc, rdev->pa_tag, PCI_CAP_PCIEXPRESS,
1292 0 : &off, &ctl) == 0)
1293 0 : return;
1294 :
1295 0 : ctl = pci_conf_read(rdev->pc, rdev->pa_tag, off + PCI_PCIE_DCSR);
1296 :
1297 0 : v = (ctl & PCI_PCIE_DCSR_MPS) >> 12;
1298 :
1299 : /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1300 : * to avoid hangs or perfomance issues
1301 : */
1302 0 : if ((v == 0) || (v == 6) || (v == 7)) {
1303 0 : ctl &= ~PCI_PCIE_DCSR_MPS;
1304 0 : ctl |= (2 << 12);
1305 0 : pci_conf_write(rdev->pc, rdev->pa_tag, off + PCI_PCIE_DCSR, ctl);
1306 0 : }
1307 0 : }
1308 : #endif
1309 :
1310 0 : void dce4_program_fmt(struct drm_encoder *encoder)
1311 : {
1312 0 : struct drm_device *dev = encoder->dev;
1313 0 : struct radeon_device *rdev = dev->dev_private;
1314 0 : struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1315 0 : struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1316 0 : struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1317 : int bpc = 0;
1318 : u32 tmp = 0;
1319 : enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1320 :
1321 0 : if (connector) {
1322 0 : struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1323 0 : bpc = radeon_get_monitor_bpc(connector);
1324 0 : dither = radeon_connector->dither;
1325 0 : }
1326 :
1327 : /* LVDS/eDP FMT is set up by atom */
1328 0 : if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1329 0 : return;
1330 :
1331 : /* not needed for analog */
1332 0 : if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1333 0 : (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1334 0 : return;
1335 :
1336 0 : if (bpc == 0)
1337 0 : return;
1338 :
1339 0 : switch (bpc) {
1340 : case 6:
1341 0 : if (dither == RADEON_FMT_DITHER_ENABLE)
1342 : /* XXX sort out optimal dither settings */
1343 0 : tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1344 : FMT_SPATIAL_DITHER_EN);
1345 : else
1346 : tmp |= FMT_TRUNCATE_EN;
1347 : break;
1348 : case 8:
1349 0 : if (dither == RADEON_FMT_DITHER_ENABLE)
1350 : /* XXX sort out optimal dither settings */
1351 0 : tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1352 : FMT_RGB_RANDOM_ENABLE |
1353 : FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1354 : else
1355 : tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1356 : break;
1357 : case 10:
1358 : default:
1359 : /* not needed */
1360 : break;
1361 : }
1362 :
1363 0 : WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1364 0 : }
1365 :
1366 0 : static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1367 : {
1368 0 : if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1369 0 : return true;
1370 : else
1371 0 : return false;
1372 0 : }
1373 :
1374 0 : static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1375 : {
1376 : u32 pos1, pos2;
1377 :
1378 0 : pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1379 0 : pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1380 :
1381 0 : if (pos1 != pos2)
1382 0 : return true;
1383 : else
1384 0 : return false;
1385 0 : }
1386 :
1387 : /**
1388 : * dce4_wait_for_vblank - vblank wait asic callback.
1389 : *
1390 : * @rdev: radeon_device pointer
1391 : * @crtc: crtc to wait for vblank on
1392 : *
1393 : * Wait for vblank on the requested crtc (evergreen+).
1394 : */
1395 0 : void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1396 : {
1397 : unsigned i = 0;
1398 :
1399 0 : if (crtc >= rdev->num_crtc)
1400 0 : return;
1401 :
1402 0 : if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1403 0 : return;
1404 :
1405 : /* depending on when we hit vblank, we may be close to active; if so,
1406 : * wait for another frame.
1407 : */
1408 0 : while (dce4_is_in_vblank(rdev, crtc)) {
1409 0 : if (i++ % 100 == 0) {
1410 0 : if (!dce4_is_counter_moving(rdev, crtc))
1411 : break;
1412 : }
1413 : }
1414 :
1415 0 : while (!dce4_is_in_vblank(rdev, crtc)) {
1416 0 : if (i++ % 100 == 0) {
1417 0 : if (!dce4_is_counter_moving(rdev, crtc))
1418 : break;
1419 : }
1420 : }
1421 0 : }
1422 :
1423 : /**
1424 : * evergreen_page_flip - pageflip callback.
1425 : *
1426 : * @rdev: radeon_device pointer
1427 : * @crtc_id: crtc to cleanup pageflip on
1428 : * @crtc_base: new address of the crtc (GPU MC address)
1429 : *
1430 : * Triggers the actual pageflip by updating the primary
1431 : * surface base address (evergreen+).
1432 : */
1433 0 : void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1434 : {
1435 0 : struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1436 :
1437 : /* update the scanout addresses */
1438 0 : WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1439 : upper_32_bits(crtc_base));
1440 0 : WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1441 : (u32)crtc_base);
1442 : /* post the write */
1443 0 : RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1444 0 : }
1445 :
1446 : /**
1447 : * evergreen_page_flip_pending - check if page flip is still pending
1448 : *
1449 : * @rdev: radeon_device pointer
1450 : * @crtc_id: crtc to check
1451 : *
1452 : * Returns the current update pending status.
1453 : */
1454 0 : bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1455 : {
1456 0 : struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1457 :
1458 : /* Return current update_pending status: */
1459 0 : return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1460 : EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1461 : }
1462 :
1463 : /* get temperature in millidegrees */
1464 0 : int evergreen_get_temp(struct radeon_device *rdev)
1465 : {
1466 : u32 temp, toffset;
1467 : int actual_temp = 0;
1468 :
1469 0 : if (rdev->family == CHIP_JUNIPER) {
1470 0 : toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1471 : TOFFSET_SHIFT;
1472 0 : temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1473 : TS0_ADC_DOUT_SHIFT;
1474 :
1475 0 : if (toffset & 0x100)
1476 0 : actual_temp = temp / 2 - (0x200 - toffset);
1477 : else
1478 0 : actual_temp = temp / 2 + toffset;
1479 :
1480 0 : actual_temp = actual_temp * 1000;
1481 :
1482 0 : } else {
1483 0 : temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1484 : ASIC_T_SHIFT;
1485 :
1486 0 : if (temp & 0x400)
1487 0 : actual_temp = -256;
1488 0 : else if (temp & 0x200)
1489 0 : actual_temp = 255;
1490 0 : else if (temp & 0x100) {
1491 0 : actual_temp = temp & 0x1ff;
1492 0 : actual_temp |= ~0x1ff;
1493 0 : } else
1494 0 : actual_temp = temp & 0xff;
1495 :
1496 0 : actual_temp = (actual_temp * 1000) / 2;
1497 : }
1498 :
1499 0 : return actual_temp;
1500 : }
1501 :
1502 0 : int sumo_get_temp(struct radeon_device *rdev)
1503 : {
1504 0 : u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1505 0 : int actual_temp = temp - 49;
1506 :
1507 0 : return actual_temp * 1000;
1508 : }
1509 :
1510 : /**
1511 : * sumo_pm_init_profile - Initialize power profiles callback.
1512 : *
1513 : * @rdev: radeon_device pointer
1514 : *
1515 : * Initialize the power states used in profile mode
1516 : * (sumo, trinity, SI).
1517 : * Used for profile mode only.
1518 : */
1519 0 : void sumo_pm_init_profile(struct radeon_device *rdev)
1520 : {
1521 : int idx;
1522 :
1523 : /* default */
1524 0 : rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1525 0 : rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1526 0 : rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1527 0 : rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1528 :
1529 : /* low,mid sh/mh */
1530 0 : if (rdev->flags & RADEON_IS_MOBILITY)
1531 0 : idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1532 : else
1533 0 : idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1534 :
1535 0 : rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1536 0 : rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1537 0 : rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1538 0 : rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1539 :
1540 0 : rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1541 0 : rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1542 0 : rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1543 0 : rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1544 :
1545 0 : rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1546 0 : rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1547 0 : rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1548 0 : rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1549 :
1550 0 : rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1551 0 : rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1552 0 : rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1553 0 : rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1554 :
1555 : /* high sh/mh */
1556 0 : idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1557 0 : rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1558 0 : rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1559 0 : rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1560 0 : rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1561 0 : rdev->pm.power_state[idx].num_clock_modes - 1;
1562 :
1563 0 : rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1564 0 : rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1565 0 : rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1566 0 : rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1567 0 : rdev->pm.power_state[idx].num_clock_modes - 1;
1568 0 : }
1569 :
1570 : /**
1571 : * btc_pm_init_profile - Initialize power profiles callback.
1572 : *
1573 : * @rdev: radeon_device pointer
1574 : *
1575 : * Initialize the power states used in profile mode
1576 : * (BTC, cayman).
1577 : * Used for profile mode only.
1578 : */
1579 0 : void btc_pm_init_profile(struct radeon_device *rdev)
1580 : {
1581 : int idx;
1582 :
1583 : /* default */
1584 0 : rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1585 0 : rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1586 0 : rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1587 0 : rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1588 : /* starting with BTC, there is one state that is used for both
1589 : * MH and SH. Difference is that we always use the high clock index for
1590 : * mclk.
1591 : */
1592 0 : if (rdev->flags & RADEON_IS_MOBILITY)
1593 0 : idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1594 : else
1595 0 : idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1596 : /* low sh */
1597 0 : rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1598 0 : rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1599 0 : rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1600 0 : rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1601 : /* mid sh */
1602 0 : rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1603 0 : rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1604 0 : rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1605 0 : rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1606 : /* high sh */
1607 0 : rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1608 0 : rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1609 0 : rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1610 0 : rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1611 : /* low mh */
1612 0 : rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1613 0 : rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1614 0 : rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1615 0 : rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1616 : /* mid mh */
1617 0 : rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1618 0 : rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1619 0 : rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1620 0 : rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1621 : /* high mh */
1622 0 : rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1623 0 : rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1624 0 : rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1625 0 : rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1626 0 : }
1627 :
1628 : /**
1629 : * evergreen_pm_misc - set additional pm hw parameters callback.
1630 : *
1631 : * @rdev: radeon_device pointer
1632 : *
1633 : * Set non-clock parameters associated with a power state
1634 : * (voltage, etc.) (evergreen+).
1635 : */
1636 0 : void evergreen_pm_misc(struct radeon_device *rdev)
1637 : {
1638 0 : int req_ps_idx = rdev->pm.requested_power_state_index;
1639 0 : int req_cm_idx = rdev->pm.requested_clock_mode_index;
1640 0 : struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1641 0 : struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1642 :
1643 0 : if (voltage->type == VOLTAGE_SW) {
1644 : /* 0xff0x are flags rather then an actual voltage */
1645 0 : if ((voltage->voltage & 0xff00) == 0xff00)
1646 0 : return;
1647 0 : if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1648 0 : radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1649 0 : rdev->pm.current_vddc = voltage->voltage;
1650 : DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1651 0 : }
1652 :
1653 : /* starting with BTC, there is one state that is used for both
1654 : * MH and SH. Difference is that we always use the high clock index for
1655 : * mclk and vddci.
1656 : */
1657 0 : if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1658 0 : (rdev->family >= CHIP_BARTS) &&
1659 0 : rdev->pm.active_crtc_count &&
1660 0 : ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1661 0 : (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1662 0 : voltage = &rdev->pm.power_state[req_ps_idx].
1663 0 : clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1664 :
1665 : /* 0xff0x are flags rather then an actual voltage */
1666 0 : if ((voltage->vddci & 0xff00) == 0xff00)
1667 0 : return;
1668 0 : if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1669 0 : radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1670 0 : rdev->pm.current_vddci = voltage->vddci;
1671 : DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1672 0 : }
1673 : }
1674 0 : }
1675 :
1676 : /**
1677 : * evergreen_pm_prepare - pre-power state change callback.
1678 : *
1679 : * @rdev: radeon_device pointer
1680 : *
1681 : * Prepare for a power state change (evergreen+).
1682 : */
1683 0 : void evergreen_pm_prepare(struct radeon_device *rdev)
1684 : {
1685 0 : struct drm_device *ddev = rdev->ddev;
1686 : struct drm_crtc *crtc;
1687 : struct radeon_crtc *radeon_crtc;
1688 : u32 tmp;
1689 :
1690 : /* disable any active CRTCs */
1691 0 : list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1692 0 : radeon_crtc = to_radeon_crtc(crtc);
1693 0 : if (radeon_crtc->enabled) {
1694 0 : tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1695 0 : tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1696 0 : WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1697 0 : }
1698 : }
1699 0 : }
1700 :
1701 : /**
1702 : * evergreen_pm_finish - post-power state change callback.
1703 : *
1704 : * @rdev: radeon_device pointer
1705 : *
1706 : * Clean up after a power state change (evergreen+).
1707 : */
1708 0 : void evergreen_pm_finish(struct radeon_device *rdev)
1709 : {
1710 0 : struct drm_device *ddev = rdev->ddev;
1711 : struct drm_crtc *crtc;
1712 : struct radeon_crtc *radeon_crtc;
1713 : u32 tmp;
1714 :
1715 : /* enable any active CRTCs */
1716 0 : list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1717 0 : radeon_crtc = to_radeon_crtc(crtc);
1718 0 : if (radeon_crtc->enabled) {
1719 0 : tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1720 0 : tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1721 0 : WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1722 0 : }
1723 : }
1724 0 : }
1725 :
1726 : /**
1727 : * evergreen_hpd_sense - hpd sense callback.
1728 : *
1729 : * @rdev: radeon_device pointer
1730 : * @hpd: hpd (hotplug detect) pin
1731 : *
1732 : * Checks if a digital monitor is connected (evergreen+).
1733 : * Returns true if connected, false if not connected.
1734 : */
1735 0 : bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1736 : {
1737 : bool connected = false;
1738 :
1739 0 : switch (hpd) {
1740 : case RADEON_HPD_1:
1741 0 : if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1742 0 : connected = true;
1743 : break;
1744 : case RADEON_HPD_2:
1745 0 : if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1746 0 : connected = true;
1747 : break;
1748 : case RADEON_HPD_3:
1749 0 : if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1750 0 : connected = true;
1751 : break;
1752 : case RADEON_HPD_4:
1753 0 : if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1754 0 : connected = true;
1755 : break;
1756 : case RADEON_HPD_5:
1757 0 : if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1758 0 : connected = true;
1759 : break;
1760 : case RADEON_HPD_6:
1761 0 : if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1762 0 : connected = true;
1763 : break;
1764 : default:
1765 : break;
1766 : }
1767 :
1768 0 : return connected;
1769 : }
1770 :
1771 : /**
1772 : * evergreen_hpd_set_polarity - hpd set polarity callback.
1773 : *
1774 : * @rdev: radeon_device pointer
1775 : * @hpd: hpd (hotplug detect) pin
1776 : *
1777 : * Set the polarity of the hpd pin (evergreen+).
1778 : */
1779 0 : void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1780 : enum radeon_hpd_id hpd)
1781 : {
1782 : u32 tmp;
1783 0 : bool connected = evergreen_hpd_sense(rdev, hpd);
1784 :
1785 0 : switch (hpd) {
1786 : case RADEON_HPD_1:
1787 0 : tmp = RREG32(DC_HPD1_INT_CONTROL);
1788 0 : if (connected)
1789 0 : tmp &= ~DC_HPDx_INT_POLARITY;
1790 : else
1791 0 : tmp |= DC_HPDx_INT_POLARITY;
1792 0 : WREG32(DC_HPD1_INT_CONTROL, tmp);
1793 0 : break;
1794 : case RADEON_HPD_2:
1795 0 : tmp = RREG32(DC_HPD2_INT_CONTROL);
1796 0 : if (connected)
1797 0 : tmp &= ~DC_HPDx_INT_POLARITY;
1798 : else
1799 0 : tmp |= DC_HPDx_INT_POLARITY;
1800 0 : WREG32(DC_HPD2_INT_CONTROL, tmp);
1801 0 : break;
1802 : case RADEON_HPD_3:
1803 0 : tmp = RREG32(DC_HPD3_INT_CONTROL);
1804 0 : if (connected)
1805 0 : tmp &= ~DC_HPDx_INT_POLARITY;
1806 : else
1807 0 : tmp |= DC_HPDx_INT_POLARITY;
1808 0 : WREG32(DC_HPD3_INT_CONTROL, tmp);
1809 0 : break;
1810 : case RADEON_HPD_4:
1811 0 : tmp = RREG32(DC_HPD4_INT_CONTROL);
1812 0 : if (connected)
1813 0 : tmp &= ~DC_HPDx_INT_POLARITY;
1814 : else
1815 0 : tmp |= DC_HPDx_INT_POLARITY;
1816 0 : WREG32(DC_HPD4_INT_CONTROL, tmp);
1817 0 : break;
1818 : case RADEON_HPD_5:
1819 0 : tmp = RREG32(DC_HPD5_INT_CONTROL);
1820 0 : if (connected)
1821 0 : tmp &= ~DC_HPDx_INT_POLARITY;
1822 : else
1823 0 : tmp |= DC_HPDx_INT_POLARITY;
1824 0 : WREG32(DC_HPD5_INT_CONTROL, tmp);
1825 0 : break;
1826 : case RADEON_HPD_6:
1827 0 : tmp = RREG32(DC_HPD6_INT_CONTROL);
1828 0 : if (connected)
1829 0 : tmp &= ~DC_HPDx_INT_POLARITY;
1830 : else
1831 0 : tmp |= DC_HPDx_INT_POLARITY;
1832 0 : WREG32(DC_HPD6_INT_CONTROL, tmp);
1833 0 : break;
1834 : default:
1835 : break;
1836 : }
1837 0 : }
1838 :
1839 : /**
1840 : * evergreen_hpd_init - hpd setup callback.
1841 : *
1842 : * @rdev: radeon_device pointer
1843 : *
1844 : * Setup the hpd pins used by the card (evergreen+).
1845 : * Enable the pin, set the polarity, and enable the hpd interrupts.
1846 : */
1847 0 : void evergreen_hpd_init(struct radeon_device *rdev)
1848 : {
1849 0 : struct drm_device *dev = rdev->ddev;
1850 : struct drm_connector *connector;
1851 : unsigned enabled = 0;
1852 : u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1853 : DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1854 :
1855 0 : list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1856 0 : struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1857 :
1858 0 : if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1859 0 : connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1860 : /* don't try to enable hpd on eDP or LVDS avoid breaking the
1861 : * aux dp channel on imac and help (but not completely fix)
1862 : * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1863 : * also avoid interrupt storms during dpms.
1864 : */
1865 0 : continue;
1866 : }
1867 0 : switch (radeon_connector->hpd.hpd) {
1868 : case RADEON_HPD_1:
1869 0 : WREG32(DC_HPD1_CONTROL, tmp);
1870 0 : break;
1871 : case RADEON_HPD_2:
1872 0 : WREG32(DC_HPD2_CONTROL, tmp);
1873 0 : break;
1874 : case RADEON_HPD_3:
1875 0 : WREG32(DC_HPD3_CONTROL, tmp);
1876 0 : break;
1877 : case RADEON_HPD_4:
1878 0 : WREG32(DC_HPD4_CONTROL, tmp);
1879 0 : break;
1880 : case RADEON_HPD_5:
1881 0 : WREG32(DC_HPD5_CONTROL, tmp);
1882 0 : break;
1883 : case RADEON_HPD_6:
1884 0 : WREG32(DC_HPD6_CONTROL, tmp);
1885 0 : break;
1886 : default:
1887 : break;
1888 : }
1889 0 : radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1890 0 : enabled |= 1 << radeon_connector->hpd.hpd;
1891 0 : }
1892 0 : radeon_irq_kms_enable_hpd(rdev, enabled);
1893 0 : }
1894 :
1895 : /**
1896 : * evergreen_hpd_fini - hpd tear down callback.
1897 : *
1898 : * @rdev: radeon_device pointer
1899 : *
1900 : * Tear down the hpd pins used by the card (evergreen+).
1901 : * Disable the hpd interrupts.
1902 : */
1903 0 : void evergreen_hpd_fini(struct radeon_device *rdev)
1904 : {
1905 0 : struct drm_device *dev = rdev->ddev;
1906 : struct drm_connector *connector;
1907 : unsigned disabled = 0;
1908 :
1909 0 : list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1910 0 : struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1911 0 : switch (radeon_connector->hpd.hpd) {
1912 : case RADEON_HPD_1:
1913 0 : WREG32(DC_HPD1_CONTROL, 0);
1914 0 : break;
1915 : case RADEON_HPD_2:
1916 0 : WREG32(DC_HPD2_CONTROL, 0);
1917 0 : break;
1918 : case RADEON_HPD_3:
1919 0 : WREG32(DC_HPD3_CONTROL, 0);
1920 0 : break;
1921 : case RADEON_HPD_4:
1922 0 : WREG32(DC_HPD4_CONTROL, 0);
1923 0 : break;
1924 : case RADEON_HPD_5:
1925 0 : WREG32(DC_HPD5_CONTROL, 0);
1926 0 : break;
1927 : case RADEON_HPD_6:
1928 0 : WREG32(DC_HPD6_CONTROL, 0);
1929 0 : break;
1930 : default:
1931 : break;
1932 : }
1933 0 : disabled |= 1 << radeon_connector->hpd.hpd;
1934 : }
1935 0 : radeon_irq_kms_disable_hpd(rdev, disabled);
1936 0 : }
1937 :
1938 : /* watermark setup */
1939 :
1940 0 : static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1941 : struct radeon_crtc *radeon_crtc,
1942 : struct drm_display_mode *mode,
1943 : struct drm_display_mode *other_mode)
1944 : {
1945 : u32 tmp, buffer_alloc, i;
1946 0 : u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1947 : /*
1948 : * Line Buffer Setup
1949 : * There are 3 line buffers, each one shared by 2 display controllers.
1950 : * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1951 : * the display controllers. The paritioning is done via one of four
1952 : * preset allocations specified in bits 2:0:
1953 : * first display controller
1954 : * 0 - first half of lb (3840 * 2)
1955 : * 1 - first 3/4 of lb (5760 * 2)
1956 : * 2 - whole lb (7680 * 2), other crtc must be disabled
1957 : * 3 - first 1/4 of lb (1920 * 2)
1958 : * second display controller
1959 : * 4 - second half of lb (3840 * 2)
1960 : * 5 - second 3/4 of lb (5760 * 2)
1961 : * 6 - whole lb (7680 * 2), other crtc must be disabled
1962 : * 7 - last 1/4 of lb (1920 * 2)
1963 : */
1964 : /* this can get tricky if we have two large displays on a paired group
1965 : * of crtcs. Ideally for multiple large displays we'd assign them to
1966 : * non-linked crtcs for maximum line buffer allocation.
1967 : */
1968 0 : if (radeon_crtc->base.enabled && mode) {
1969 0 : if (other_mode) {
1970 : tmp = 0; /* 1/2 */
1971 : buffer_alloc = 1;
1972 0 : } else {
1973 : tmp = 2; /* whole */
1974 : buffer_alloc = 2;
1975 : }
1976 : } else {
1977 : tmp = 0;
1978 : buffer_alloc = 0;
1979 : }
1980 :
1981 : /* second controller of the pair uses second half of the lb */
1982 0 : if (radeon_crtc->crtc_id % 2)
1983 0 : tmp += 4;
1984 0 : WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1985 :
1986 0 : if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1987 0 : WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1988 : DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1989 0 : for (i = 0; i < rdev->usec_timeout; i++) {
1990 0 : if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1991 : DMIF_BUFFERS_ALLOCATED_COMPLETED)
1992 : break;
1993 0 : udelay(1);
1994 : }
1995 : }
1996 :
1997 0 : if (radeon_crtc->base.enabled && mode) {
1998 0 : switch (tmp) {
1999 : case 0:
2000 : case 4:
2001 : default:
2002 0 : if (ASIC_IS_DCE5(rdev))
2003 0 : return 4096 * 2;
2004 : else
2005 0 : return 3840 * 2;
2006 : case 1:
2007 : case 5:
2008 0 : if (ASIC_IS_DCE5(rdev))
2009 0 : return 6144 * 2;
2010 : else
2011 0 : return 5760 * 2;
2012 : case 2:
2013 : case 6:
2014 0 : if (ASIC_IS_DCE5(rdev))
2015 0 : return 8192 * 2;
2016 : else
2017 0 : return 7680 * 2;
2018 : case 3:
2019 : case 7:
2020 0 : if (ASIC_IS_DCE5(rdev))
2021 0 : return 2048 * 2;
2022 : else
2023 0 : return 1920 * 2;
2024 : }
2025 : }
2026 :
2027 : /* controller not enabled, so no lb used */
2028 0 : return 0;
2029 0 : }
2030 :
2031 0 : u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2032 : {
2033 0 : u32 tmp = RREG32(MC_SHARED_CHMAP);
2034 :
2035 0 : switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2036 : case 0:
2037 : default:
2038 0 : return 1;
2039 : case 1:
2040 0 : return 2;
2041 : case 2:
2042 0 : return 4;
2043 : case 3:
2044 0 : return 8;
2045 : }
2046 0 : }
2047 :
2048 : struct evergreen_wm_params {
2049 : u32 dram_channels; /* number of dram channels */
2050 : u32 yclk; /* bandwidth per dram data pin in kHz */
2051 : u32 sclk; /* engine clock in kHz */
2052 : u32 disp_clk; /* display clock in kHz */
2053 : u32 src_width; /* viewport width */
2054 : u32 active_time; /* active display time in ns */
2055 : u32 blank_time; /* blank time in ns */
2056 : bool interlaced; /* mode is interlaced */
2057 : fixed20_12 vsc; /* vertical scale ratio */
2058 : u32 num_heads; /* number of active crtcs */
2059 : u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2060 : u32 lb_size; /* line buffer allocated to pipe */
2061 : u32 vtaps; /* vertical scaler taps */
2062 : };
2063 :
2064 0 : static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2065 : {
2066 : /* Calculate DRAM Bandwidth and the part allocated to display. */
2067 : fixed20_12 dram_efficiency; /* 0.7 */
2068 : fixed20_12 yclk, dram_channels, bandwidth;
2069 : fixed20_12 a;
2070 :
2071 : a.full = dfixed_const(1000);
2072 0 : yclk.full = dfixed_const(wm->yclk);
2073 0 : yclk.full = dfixed_div(yclk, a);
2074 0 : dram_channels.full = dfixed_const(wm->dram_channels * 4);
2075 : a.full = dfixed_const(10);
2076 : dram_efficiency.full = dfixed_const(7);
2077 0 : dram_efficiency.full = dfixed_div(dram_efficiency, a);
2078 0 : bandwidth.full = dfixed_mul(dram_channels, yclk);
2079 0 : bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2080 :
2081 0 : return dfixed_trunc(bandwidth);
2082 : }
2083 :
2084 0 : static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2085 : {
2086 : /* Calculate DRAM Bandwidth and the part allocated to display. */
2087 : fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2088 : fixed20_12 yclk, dram_channels, bandwidth;
2089 : fixed20_12 a;
2090 :
2091 : a.full = dfixed_const(1000);
2092 0 : yclk.full = dfixed_const(wm->yclk);
2093 0 : yclk.full = dfixed_div(yclk, a);
2094 0 : dram_channels.full = dfixed_const(wm->dram_channels * 4);
2095 : a.full = dfixed_const(10);
2096 : disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2097 0 : disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2098 0 : bandwidth.full = dfixed_mul(dram_channels, yclk);
2099 0 : bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2100 :
2101 0 : return dfixed_trunc(bandwidth);
2102 : }
2103 :
2104 0 : static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2105 : {
2106 : /* Calculate the display Data return Bandwidth */
2107 : fixed20_12 return_efficiency; /* 0.8 */
2108 : fixed20_12 sclk, bandwidth;
2109 : fixed20_12 a;
2110 :
2111 : a.full = dfixed_const(1000);
2112 0 : sclk.full = dfixed_const(wm->sclk);
2113 0 : sclk.full = dfixed_div(sclk, a);
2114 : a.full = dfixed_const(10);
2115 : return_efficiency.full = dfixed_const(8);
2116 0 : return_efficiency.full = dfixed_div(return_efficiency, a);
2117 : a.full = dfixed_const(32);
2118 0 : bandwidth.full = dfixed_mul(a, sclk);
2119 0 : bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2120 :
2121 0 : return dfixed_trunc(bandwidth);
2122 : }
2123 :
2124 0 : static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2125 : {
2126 : /* Calculate the DMIF Request Bandwidth */
2127 : fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2128 : fixed20_12 disp_clk, bandwidth;
2129 : fixed20_12 a;
2130 :
2131 : a.full = dfixed_const(1000);
2132 0 : disp_clk.full = dfixed_const(wm->disp_clk);
2133 0 : disp_clk.full = dfixed_div(disp_clk, a);
2134 : a.full = dfixed_const(10);
2135 : disp_clk_request_efficiency.full = dfixed_const(8);
2136 0 : disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2137 : a.full = dfixed_const(32);
2138 0 : bandwidth.full = dfixed_mul(a, disp_clk);
2139 0 : bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2140 :
2141 0 : return dfixed_trunc(bandwidth);
2142 : }
2143 :
2144 0 : static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2145 : {
2146 : /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2147 0 : u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2148 0 : u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2149 0 : u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2150 :
2151 0 : return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2152 : }
2153 :
2154 0 : static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2155 : {
2156 : /* Calculate the display mode Average Bandwidth
2157 : * DisplayMode should contain the source and destination dimensions,
2158 : * timing, etc.
2159 : */
2160 : fixed20_12 bpp;
2161 : fixed20_12 line_time;
2162 : fixed20_12 src_width;
2163 : fixed20_12 bandwidth;
2164 : fixed20_12 a;
2165 :
2166 : a.full = dfixed_const(1000);
2167 0 : line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2168 0 : line_time.full = dfixed_div(line_time, a);
2169 0 : bpp.full = dfixed_const(wm->bytes_per_pixel);
2170 0 : src_width.full = dfixed_const(wm->src_width);
2171 0 : bandwidth.full = dfixed_mul(src_width, bpp);
2172 0 : bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2173 0 : bandwidth.full = dfixed_div(bandwidth, line_time);
2174 :
2175 0 : return dfixed_trunc(bandwidth);
2176 : }
2177 :
2178 0 : static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2179 : {
2180 : /* First calcualte the latency in ns */
2181 : u32 mc_latency = 2000; /* 2000 ns. */
2182 0 : u32 available_bandwidth = evergreen_available_bandwidth(wm);
2183 0 : u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2184 0 : u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2185 0 : u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2186 0 : u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2187 0 : (wm->num_heads * cursor_line_pair_return_time);
2188 0 : u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2189 : u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2190 : fixed20_12 a, b, c;
2191 :
2192 0 : if (wm->num_heads == 0)
2193 0 : return 0;
2194 :
2195 : a.full = dfixed_const(2);
2196 : b.full = dfixed_const(1);
2197 0 : if ((wm->vsc.full > a.full) ||
2198 0 : ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2199 0 : (wm->vtaps >= 5) ||
2200 0 : ((wm->vsc.full >= a.full) && wm->interlaced))
2201 0 : max_src_lines_per_dst_line = 4;
2202 : else
2203 : max_src_lines_per_dst_line = 2;
2204 :
2205 0 : a.full = dfixed_const(available_bandwidth);
2206 0 : b.full = dfixed_const(wm->num_heads);
2207 0 : a.full = dfixed_div(a, b);
2208 :
2209 : b.full = dfixed_const(1000);
2210 0 : c.full = dfixed_const(wm->disp_clk);
2211 0 : b.full = dfixed_div(c, b);
2212 0 : c.full = dfixed_const(wm->bytes_per_pixel);
2213 0 : b.full = dfixed_mul(b, c);
2214 :
2215 0 : lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2216 :
2217 0 : a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2218 : b.full = dfixed_const(1000);
2219 0 : c.full = dfixed_const(lb_fill_bw);
2220 0 : b.full = dfixed_div(c, b);
2221 0 : a.full = dfixed_div(a, b);
2222 0 : line_fill_time = dfixed_trunc(a);
2223 :
2224 0 : if (line_fill_time < wm->active_time)
2225 0 : return latency;
2226 : else
2227 0 : return latency + (line_fill_time - wm->active_time);
2228 :
2229 0 : }
2230 :
2231 0 : static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2232 : {
2233 0 : if (evergreen_average_bandwidth(wm) <=
2234 0 : (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2235 0 : return true;
2236 : else
2237 0 : return false;
2238 0 : };
2239 :
2240 0 : static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2241 : {
2242 0 : if (evergreen_average_bandwidth(wm) <=
2243 0 : (evergreen_available_bandwidth(wm) / wm->num_heads))
2244 0 : return true;
2245 : else
2246 0 : return false;
2247 0 : };
2248 :
2249 0 : static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2250 : {
2251 0 : u32 lb_partitions = wm->lb_size / wm->src_width;
2252 0 : u32 line_time = wm->active_time + wm->blank_time;
2253 : u32 latency_tolerant_lines;
2254 : u32 latency_hiding;
2255 : fixed20_12 a;
2256 :
2257 : a.full = dfixed_const(1);
2258 0 : if (wm->vsc.full > a.full)
2259 0 : latency_tolerant_lines = 1;
2260 : else {
2261 0 : if (lb_partitions <= (wm->vtaps + 1))
2262 0 : latency_tolerant_lines = 1;
2263 : else
2264 : latency_tolerant_lines = 2;
2265 : }
2266 :
2267 0 : latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2268 :
2269 0 : if (evergreen_latency_watermark(wm) <= latency_hiding)
2270 0 : return true;
2271 : else
2272 0 : return false;
2273 0 : }
2274 :
2275 0 : static void evergreen_program_watermarks(struct radeon_device *rdev,
2276 : struct radeon_crtc *radeon_crtc,
2277 : u32 lb_size, u32 num_heads)
2278 : {
2279 0 : struct drm_display_mode *mode = &radeon_crtc->base.mode;
2280 0 : struct evergreen_wm_params wm_low, wm_high;
2281 : u32 dram_channels;
2282 : u32 pixel_period;
2283 : u32 line_time = 0;
2284 : u32 latency_watermark_a = 0, latency_watermark_b = 0;
2285 : u32 priority_a_mark = 0, priority_b_mark = 0;
2286 : u32 priority_a_cnt = PRIORITY_OFF;
2287 : u32 priority_b_cnt = PRIORITY_OFF;
2288 0 : u32 pipe_offset = radeon_crtc->crtc_id * 16;
2289 : u32 tmp, arb_control3;
2290 : fixed20_12 a, b, c;
2291 :
2292 0 : if (radeon_crtc->base.enabled && num_heads && mode) {
2293 0 : pixel_period = 1000000 / (u32)mode->clock;
2294 0 : line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2295 : priority_a_cnt = 0;
2296 : priority_b_cnt = 0;
2297 0 : dram_channels = evergreen_get_number_of_dram_channels(rdev);
2298 :
2299 : /* watermark for high clocks */
2300 0 : if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2301 0 : wm_high.yclk =
2302 0 : radeon_dpm_get_mclk(rdev, false) * 10;
2303 0 : wm_high.sclk =
2304 0 : radeon_dpm_get_sclk(rdev, false) * 10;
2305 0 : } else {
2306 0 : wm_high.yclk = rdev->pm.current_mclk * 10;
2307 0 : wm_high.sclk = rdev->pm.current_sclk * 10;
2308 : }
2309 :
2310 0 : wm_high.disp_clk = mode->clock;
2311 0 : wm_high.src_width = mode->crtc_hdisplay;
2312 0 : wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2313 0 : wm_high.blank_time = line_time - wm_high.active_time;
2314 0 : wm_high.interlaced = false;
2315 0 : if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2316 0 : wm_high.interlaced = true;
2317 0 : wm_high.vsc = radeon_crtc->vsc;
2318 0 : wm_high.vtaps = 1;
2319 0 : if (radeon_crtc->rmx_type != RMX_OFF)
2320 0 : wm_high.vtaps = 2;
2321 0 : wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2322 0 : wm_high.lb_size = lb_size;
2323 0 : wm_high.dram_channels = dram_channels;
2324 0 : wm_high.num_heads = num_heads;
2325 :
2326 : /* watermark for low clocks */
2327 0 : if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2328 0 : wm_low.yclk =
2329 0 : radeon_dpm_get_mclk(rdev, true) * 10;
2330 0 : wm_low.sclk =
2331 0 : radeon_dpm_get_sclk(rdev, true) * 10;
2332 0 : } else {
2333 0 : wm_low.yclk = rdev->pm.current_mclk * 10;
2334 0 : wm_low.sclk = rdev->pm.current_sclk * 10;
2335 : }
2336 :
2337 0 : wm_low.disp_clk = mode->clock;
2338 0 : wm_low.src_width = mode->crtc_hdisplay;
2339 0 : wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2340 0 : wm_low.blank_time = line_time - wm_low.active_time;
2341 0 : wm_low.interlaced = false;
2342 0 : if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2343 0 : wm_low.interlaced = true;
2344 0 : wm_low.vsc = radeon_crtc->vsc;
2345 0 : wm_low.vtaps = 1;
2346 0 : if (radeon_crtc->rmx_type != RMX_OFF)
2347 0 : wm_low.vtaps = 2;
2348 0 : wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2349 0 : wm_low.lb_size = lb_size;
2350 0 : wm_low.dram_channels = dram_channels;
2351 0 : wm_low.num_heads = num_heads;
2352 :
2353 : /* set for high clocks */
2354 0 : latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2355 : /* set for low clocks */
2356 0 : latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2357 :
2358 : /* possibly force display priority to high */
2359 : /* should really do this at mode validation time... */
2360 0 : if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2361 0 : !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2362 0 : !evergreen_check_latency_hiding(&wm_high) ||
2363 0 : (rdev->disp_priority == 2)) {
2364 : DRM_DEBUG_KMS("force priority a to high\n");
2365 : priority_a_cnt |= PRIORITY_ALWAYS_ON;
2366 0 : }
2367 0 : if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2368 0 : !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2369 0 : !evergreen_check_latency_hiding(&wm_low) ||
2370 0 : (rdev->disp_priority == 2)) {
2371 : DRM_DEBUG_KMS("force priority b to high\n");
2372 : priority_b_cnt |= PRIORITY_ALWAYS_ON;
2373 0 : }
2374 :
2375 : a.full = dfixed_const(1000);
2376 0 : b.full = dfixed_const(mode->clock);
2377 0 : b.full = dfixed_div(b, a);
2378 0 : c.full = dfixed_const(latency_watermark_a);
2379 0 : c.full = dfixed_mul(c, b);
2380 0 : c.full = dfixed_mul(c, radeon_crtc->hsc);
2381 0 : c.full = dfixed_div(c, a);
2382 : a.full = dfixed_const(16);
2383 0 : c.full = dfixed_div(c, a);
2384 0 : priority_a_mark = dfixed_trunc(c);
2385 0 : priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2386 :
2387 : a.full = dfixed_const(1000);
2388 0 : b.full = dfixed_const(mode->clock);
2389 0 : b.full = dfixed_div(b, a);
2390 0 : c.full = dfixed_const(latency_watermark_b);
2391 0 : c.full = dfixed_mul(c, b);
2392 0 : c.full = dfixed_mul(c, radeon_crtc->hsc);
2393 0 : c.full = dfixed_div(c, a);
2394 : a.full = dfixed_const(16);
2395 0 : c.full = dfixed_div(c, a);
2396 0 : priority_b_mark = dfixed_trunc(c);
2397 0 : priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2398 :
2399 : /* Save number of lines the linebuffer leads before the scanout */
2400 0 : radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2401 0 : }
2402 :
2403 : /* select wm A */
2404 0 : arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2405 : tmp = arb_control3;
2406 0 : tmp &= ~LATENCY_WATERMARK_MASK(3);
2407 0 : tmp |= LATENCY_WATERMARK_MASK(1);
2408 0 : WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2409 0 : WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2410 : (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2411 : LATENCY_HIGH_WATERMARK(line_time)));
2412 : /* select wm B */
2413 0 : tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2414 0 : tmp &= ~LATENCY_WATERMARK_MASK(3);
2415 0 : tmp |= LATENCY_WATERMARK_MASK(2);
2416 0 : WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2417 0 : WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2418 : (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2419 : LATENCY_HIGH_WATERMARK(line_time)));
2420 : /* restore original selection */
2421 0 : WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2422 :
2423 : /* write the priority marks */
2424 0 : WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2425 0 : WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2426 :
2427 : /* save values for DPM */
2428 0 : radeon_crtc->line_time = line_time;
2429 0 : radeon_crtc->wm_high = latency_watermark_a;
2430 0 : radeon_crtc->wm_low = latency_watermark_b;
2431 0 : }
2432 :
2433 : /**
2434 : * evergreen_bandwidth_update - update display watermarks callback.
2435 : *
2436 : * @rdev: radeon_device pointer
2437 : *
2438 : * Update the display watermarks based on the requested mode(s)
2439 : * (evergreen+).
2440 : */
2441 0 : void evergreen_bandwidth_update(struct radeon_device *rdev)
2442 : {
2443 : struct drm_display_mode *mode0 = NULL;
2444 : struct drm_display_mode *mode1 = NULL;
2445 : u32 num_heads = 0, lb_size;
2446 : int i;
2447 :
2448 0 : if (!rdev->mode_info.mode_config_initialized)
2449 0 : return;
2450 :
2451 0 : radeon_update_display_priority(rdev);
2452 :
2453 0 : for (i = 0; i < rdev->num_crtc; i++) {
2454 0 : if (rdev->mode_info.crtcs[i]->base.enabled)
2455 0 : num_heads++;
2456 : }
2457 0 : for (i = 0; i < rdev->num_crtc; i += 2) {
2458 0 : mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2459 0 : mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2460 0 : lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2461 0 : evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2462 0 : lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2463 0 : evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2464 : }
2465 0 : }
2466 :
2467 : /**
2468 : * evergreen_mc_wait_for_idle - wait for MC idle callback.
2469 : *
2470 : * @rdev: radeon_device pointer
2471 : *
2472 : * Wait for the MC (memory controller) to be idle.
2473 : * (evergreen+).
2474 : * Returns 0 if the MC is idle, -1 if not.
2475 : */
2476 0 : int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2477 : {
2478 : unsigned i;
2479 : u32 tmp;
2480 :
2481 0 : for (i = 0; i < rdev->usec_timeout; i++) {
2482 : /* read MC_STATUS */
2483 0 : tmp = RREG32(SRBM_STATUS) & 0x1F00;
2484 0 : if (!tmp)
2485 0 : return 0;
2486 0 : udelay(1);
2487 : }
2488 0 : return -1;
2489 0 : }
2490 :
2491 : /*
2492 : * GART
2493 : */
2494 0 : void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2495 : {
2496 : unsigned i;
2497 : u32 tmp;
2498 :
2499 0 : WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2500 :
2501 0 : WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2502 0 : for (i = 0; i < rdev->usec_timeout; i++) {
2503 : /* read MC_STATUS */
2504 0 : tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2505 0 : tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2506 0 : if (tmp == 2) {
2507 0 : printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2508 0 : return;
2509 : }
2510 0 : if (tmp) {
2511 0 : return;
2512 : }
2513 0 : udelay(1);
2514 : }
2515 0 : }
2516 :
2517 0 : static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2518 : {
2519 : u32 tmp;
2520 : int r;
2521 :
2522 0 : if (rdev->gart.robj == NULL) {
2523 0 : dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2524 0 : return -EINVAL;
2525 : }
2526 0 : r = radeon_gart_table_vram_pin(rdev);
2527 0 : if (r)
2528 0 : return r;
2529 : /* Setup L2 cache */
2530 0 : WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2531 : ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2532 : EFFECTIVE_L2_QUEUE_SIZE(7));
2533 0 : WREG32(VM_L2_CNTL2, 0);
2534 0 : WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2535 : /* Setup TLB control */
2536 : tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2537 : SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2538 : SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2539 : EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2540 0 : if (rdev->flags & RADEON_IS_IGP) {
2541 0 : WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2542 0 : WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2543 0 : WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2544 0 : } else {
2545 0 : WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2546 0 : WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2547 0 : WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2548 0 : if ((rdev->family == CHIP_JUNIPER) ||
2549 0 : (rdev->family == CHIP_CYPRESS) ||
2550 0 : (rdev->family == CHIP_HEMLOCK) ||
2551 0 : (rdev->family == CHIP_BARTS))
2552 0 : WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2553 : }
2554 0 : WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2555 0 : WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2556 0 : WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2557 0 : WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2558 0 : WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2559 0 : WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2560 0 : WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2561 0 : WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2562 : RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2563 0 : WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2564 : (u32)(rdev->dummy_page.addr >> 12));
2565 0 : WREG32(VM_CONTEXT1_CNTL, 0);
2566 :
2567 0 : evergreen_pcie_gart_tlb_flush(rdev);
2568 : DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2569 : (unsigned)(rdev->mc.gtt_size >> 20),
2570 : (unsigned long long)rdev->gart.table_addr);
2571 0 : rdev->gart.ready = true;
2572 0 : return 0;
2573 0 : }
2574 :
2575 0 : static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2576 : {
2577 : u32 tmp;
2578 :
2579 : /* Disable all tables */
2580 0 : WREG32(VM_CONTEXT0_CNTL, 0);
2581 0 : WREG32(VM_CONTEXT1_CNTL, 0);
2582 :
2583 : /* Setup L2 cache */
2584 0 : WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2585 : EFFECTIVE_L2_QUEUE_SIZE(7));
2586 0 : WREG32(VM_L2_CNTL2, 0);
2587 0 : WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2588 : /* Setup TLB control */
2589 : tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2590 0 : WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2591 0 : WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2592 0 : WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2593 0 : WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2594 0 : WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2595 0 : WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2596 0 : WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2597 0 : radeon_gart_table_vram_unpin(rdev);
2598 0 : }
2599 :
2600 0 : static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2601 : {
2602 0 : evergreen_pcie_gart_disable(rdev);
2603 0 : radeon_gart_table_vram_free(rdev);
2604 0 : radeon_gart_fini(rdev);
2605 0 : }
2606 :
2607 :
2608 0 : static void evergreen_agp_enable(struct radeon_device *rdev)
2609 : {
2610 : u32 tmp;
2611 :
2612 : /* Setup L2 cache */
2613 0 : WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2614 : ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2615 : EFFECTIVE_L2_QUEUE_SIZE(7));
2616 0 : WREG32(VM_L2_CNTL2, 0);
2617 0 : WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2618 : /* Setup TLB control */
2619 : tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2620 : SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2621 : SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2622 : EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2623 0 : WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2624 0 : WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2625 0 : WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2626 0 : WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2627 0 : WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2628 0 : WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2629 0 : WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2630 0 : WREG32(VM_CONTEXT0_CNTL, 0);
2631 0 : WREG32(VM_CONTEXT1_CNTL, 0);
2632 0 : }
2633 :
2634 : static const unsigned ni_dig_offsets[] =
2635 : {
2636 : NI_DIG0_REGISTER_OFFSET,
2637 : NI_DIG1_REGISTER_OFFSET,
2638 : NI_DIG2_REGISTER_OFFSET,
2639 : NI_DIG3_REGISTER_OFFSET,
2640 : NI_DIG4_REGISTER_OFFSET,
2641 : NI_DIG5_REGISTER_OFFSET
2642 : };
2643 :
2644 : static const unsigned ni_tx_offsets[] =
2645 : {
2646 : NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2647 : NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2648 : NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2649 : NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2650 : NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2651 : NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2652 : };
2653 :
2654 : static const unsigned evergreen_dp_offsets[] =
2655 : {
2656 : EVERGREEN_DP0_REGISTER_OFFSET,
2657 : EVERGREEN_DP1_REGISTER_OFFSET,
2658 : EVERGREEN_DP2_REGISTER_OFFSET,
2659 : EVERGREEN_DP3_REGISTER_OFFSET,
2660 : EVERGREEN_DP4_REGISTER_OFFSET,
2661 : EVERGREEN_DP5_REGISTER_OFFSET
2662 : };
2663 :
2664 :
2665 : /*
2666 : * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2667 : * We go from crtc to connector and it is not relible since it
2668 : * should be an opposite direction .If crtc is enable then
2669 : * find the dig_fe which selects this crtc and insure that it enable.
2670 : * if such dig_fe is found then find dig_be which selects found dig_be and
2671 : * insure that it enable and in DP_SST mode.
2672 : * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2673 : * from dp symbols clocks .
2674 : */
2675 0 : static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2676 : unsigned crtc_id, unsigned *ret_dig_fe)
2677 : {
2678 : unsigned i;
2679 : unsigned dig_fe;
2680 : unsigned dig_be;
2681 : unsigned dig_en_be;
2682 : unsigned uniphy_pll;
2683 : unsigned digs_fe_selected;
2684 : unsigned dig_be_mode;
2685 : unsigned dig_fe_mask;
2686 : bool is_enabled = false;
2687 : bool found_crtc = false;
2688 :
2689 : /* loop through all running dig_fe to find selected crtc */
2690 0 : for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2691 0 : dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2692 0 : if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2693 0 : crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2694 : /* found running pipe */
2695 : found_crtc = true;
2696 0 : dig_fe_mask = 1 << i;
2697 : dig_fe = i;
2698 0 : break;
2699 : }
2700 : }
2701 :
2702 0 : if (found_crtc) {
2703 : /* loop through all running dig_be to find selected dig_fe */
2704 0 : for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2705 0 : dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2706 : /* if dig_fe_selected by dig_be? */
2707 0 : digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2708 0 : dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2709 0 : if (dig_fe_mask & digs_fe_selected &&
2710 : /* if dig_be in sst mode? */
2711 0 : dig_be_mode == NI_DIG_BE_DPSST) {
2712 0 : dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2713 : ni_dig_offsets[i]);
2714 0 : uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2715 : ni_tx_offsets[i]);
2716 : /* dig_be enable and tx is running */
2717 0 : if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2718 0 : dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2719 0 : uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2720 : is_enabled = true;
2721 0 : *ret_dig_fe = dig_fe;
2722 0 : break;
2723 : }
2724 : }
2725 : }
2726 : }
2727 :
2728 0 : return is_enabled;
2729 : }
2730 :
2731 : /*
2732 : * Blank dig when in dp sst mode
2733 : * Dig ignores crtc timing
2734 : */
2735 0 : static void evergreen_blank_dp_output(struct radeon_device *rdev,
2736 : unsigned dig_fe)
2737 : {
2738 : unsigned stream_ctrl;
2739 : unsigned fifo_ctrl;
2740 : unsigned counter = 0;
2741 :
2742 0 : if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2743 0 : DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2744 0 : return;
2745 : }
2746 :
2747 0 : stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2748 : evergreen_dp_offsets[dig_fe]);
2749 0 : if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2750 0 : DRM_ERROR("dig %d , should be enable\n", dig_fe);
2751 0 : return;
2752 : }
2753 :
2754 0 : stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2755 0 : WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2756 : evergreen_dp_offsets[dig_fe], stream_ctrl);
2757 :
2758 0 : stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2759 : evergreen_dp_offsets[dig_fe]);
2760 0 : while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2761 0 : drm_msleep(1);
2762 0 : counter++;
2763 0 : stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2764 : evergreen_dp_offsets[dig_fe]);
2765 : }
2766 0 : if (counter >= 32 )
2767 0 : DRM_ERROR("counter exceeds %d\n", counter);
2768 :
2769 0 : fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2770 0 : fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2771 0 : WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2772 :
2773 0 : }
2774 :
2775 0 : void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2776 : {
2777 : u32 crtc_enabled, tmp, frame_count, blackout;
2778 : int i, j;
2779 0 : unsigned dig_fe;
2780 :
2781 0 : if (!ASIC_IS_NODCE(rdev)) {
2782 0 : save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2783 0 : save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2784 :
2785 : /* disable VGA render */
2786 0 : WREG32(VGA_RENDER_CONTROL, 0);
2787 0 : }
2788 : /* blank the display controllers */
2789 0 : for (i = 0; i < rdev->num_crtc; i++) {
2790 0 : crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2791 0 : if (crtc_enabled) {
2792 0 : save->crtc_enabled[i] = true;
2793 0 : if (ASIC_IS_DCE6(rdev)) {
2794 0 : tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2795 0 : if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2796 0 : radeon_wait_for_vblank(rdev, i);
2797 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2798 0 : tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2799 0 : WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2800 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2801 0 : }
2802 : } else {
2803 0 : tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2804 0 : if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2805 0 : radeon_wait_for_vblank(rdev, i);
2806 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2807 0 : tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2808 0 : WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2809 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2810 0 : }
2811 : }
2812 : /* wait for the next frame */
2813 0 : frame_count = radeon_get_vblank_counter(rdev, i);
2814 0 : for (j = 0; j < rdev->usec_timeout; j++) {
2815 0 : if (radeon_get_vblank_counter(rdev, i) != frame_count)
2816 : break;
2817 0 : udelay(1);
2818 : }
2819 : /*we should disable dig if it drives dp sst*/
2820 : /*but we are in radeon_device_init and the topology is unknown*/
2821 : /*and it is available after radeon_modeset_init*/
2822 : /*the following method radeon_atom_encoder_dpms_dig*/
2823 : /*does the job if we initialize it properly*/
2824 : /*for now we do it this manually*/
2825 : /**/
2826 0 : if (ASIC_IS_DCE5(rdev) &&
2827 0 : evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2828 0 : evergreen_blank_dp_output(rdev, dig_fe);
2829 : /*we could remove 6 lines below*/
2830 : /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2831 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2832 0 : tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2833 0 : tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2834 0 : WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2835 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2836 0 : save->crtc_enabled[i] = false;
2837 : /* ***** */
2838 0 : } else {
2839 0 : save->crtc_enabled[i] = false;
2840 : }
2841 : }
2842 :
2843 0 : radeon_mc_wait_for_idle(rdev);
2844 :
2845 0 : blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2846 0 : if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2847 : /* Block CPU access */
2848 0 : WREG32(BIF_FB_EN, 0);
2849 : /* blackout the MC */
2850 0 : blackout &= ~BLACKOUT_MODE_MASK;
2851 0 : WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2852 0 : }
2853 : /* wait for the MC to settle */
2854 0 : udelay(100);
2855 :
2856 : /* lock double buffered regs */
2857 0 : for (i = 0; i < rdev->num_crtc; i++) {
2858 0 : if (save->crtc_enabled[i]) {
2859 0 : tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2860 0 : if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2861 0 : tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2862 0 : WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2863 0 : }
2864 0 : tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2865 0 : if (!(tmp & 1)) {
2866 0 : tmp |= 1;
2867 0 : WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2868 0 : }
2869 : }
2870 : }
2871 0 : }
2872 :
2873 0 : void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2874 : {
2875 : u32 tmp, frame_count;
2876 : int i, j;
2877 :
2878 : /* update crtc base addresses */
2879 0 : for (i = 0; i < rdev->num_crtc; i++) {
2880 0 : WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2881 : upper_32_bits(rdev->mc.vram_start));
2882 0 : WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2883 : upper_32_bits(rdev->mc.vram_start));
2884 0 : WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2885 : (u32)rdev->mc.vram_start);
2886 0 : WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2887 : (u32)rdev->mc.vram_start);
2888 : }
2889 :
2890 0 : if (!ASIC_IS_NODCE(rdev)) {
2891 0 : WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2892 0 : WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2893 0 : }
2894 :
2895 : /* unlock regs and wait for update */
2896 0 : for (i = 0; i < rdev->num_crtc; i++) {
2897 0 : if (save->crtc_enabled[i]) {
2898 0 : tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2899 0 : if ((tmp & 0x7) != 3) {
2900 0 : tmp &= ~0x7;
2901 0 : tmp |= 0x3;
2902 0 : WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2903 0 : }
2904 0 : tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2905 0 : if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2906 0 : tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2907 0 : WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2908 0 : }
2909 0 : tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2910 0 : if (tmp & 1) {
2911 0 : tmp &= ~1;
2912 0 : WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2913 0 : }
2914 0 : for (j = 0; j < rdev->usec_timeout; j++) {
2915 0 : tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2916 0 : if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2917 : break;
2918 0 : udelay(1);
2919 : }
2920 : }
2921 : }
2922 :
2923 : /* unblackout the MC */
2924 0 : tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2925 0 : tmp &= ~BLACKOUT_MODE_MASK;
2926 0 : WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2927 : /* allow CPU access */
2928 0 : WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2929 :
2930 0 : for (i = 0; i < rdev->num_crtc; i++) {
2931 0 : if (save->crtc_enabled[i]) {
2932 0 : if (ASIC_IS_DCE6(rdev)) {
2933 0 : tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2934 0 : tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2935 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2936 0 : WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2937 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2938 0 : } else {
2939 0 : tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2940 0 : tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2941 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2942 0 : WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2943 0 : WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2944 : }
2945 : /* wait for the next frame */
2946 0 : frame_count = radeon_get_vblank_counter(rdev, i);
2947 0 : for (j = 0; j < rdev->usec_timeout; j++) {
2948 0 : if (radeon_get_vblank_counter(rdev, i) != frame_count)
2949 : break;
2950 0 : udelay(1);
2951 : }
2952 : }
2953 : }
2954 0 : if (!ASIC_IS_NODCE(rdev)) {
2955 : /* Unlock vga access */
2956 0 : WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2957 0 : mdelay(1);
2958 0 : WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2959 0 : }
2960 0 : }
2961 :
2962 0 : void evergreen_mc_program(struct radeon_device *rdev)
2963 : {
2964 0 : struct evergreen_mc_save save;
2965 : u32 tmp;
2966 : int i, j;
2967 :
2968 : /* Initialize HDP */
2969 0 : for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2970 0 : WREG32((0x2c14 + j), 0x00000000);
2971 0 : WREG32((0x2c18 + j), 0x00000000);
2972 0 : WREG32((0x2c1c + j), 0x00000000);
2973 0 : WREG32((0x2c20 + j), 0x00000000);
2974 0 : WREG32((0x2c24 + j), 0x00000000);
2975 : }
2976 0 : WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2977 :
2978 0 : evergreen_mc_stop(rdev, &save);
2979 0 : if (evergreen_mc_wait_for_idle(rdev)) {
2980 0 : dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2981 0 : }
2982 : /* Lockout access through VGA aperture*/
2983 0 : WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2984 : /* Update configuration */
2985 0 : if (rdev->flags & RADEON_IS_AGP) {
2986 0 : if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2987 : /* VRAM before AGP */
2988 0 : WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2989 : rdev->mc.vram_start >> 12);
2990 0 : WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2991 : rdev->mc.gtt_end >> 12);
2992 0 : } else {
2993 : /* VRAM after AGP */
2994 0 : WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2995 : rdev->mc.gtt_start >> 12);
2996 0 : WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2997 : rdev->mc.vram_end >> 12);
2998 : }
2999 : } else {
3000 0 : WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
3001 : rdev->mc.vram_start >> 12);
3002 0 : WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
3003 : rdev->mc.vram_end >> 12);
3004 : }
3005 0 : WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
3006 : /* llano/ontario only */
3007 0 : if ((rdev->family == CHIP_PALM) ||
3008 0 : (rdev->family == CHIP_SUMO) ||
3009 0 : (rdev->family == CHIP_SUMO2)) {
3010 0 : tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
3011 0 : tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
3012 0 : tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
3013 0 : WREG32(MC_FUS_VM_FB_OFFSET, tmp);
3014 0 : }
3015 0 : tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
3016 0 : tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
3017 0 : WREG32(MC_VM_FB_LOCATION, tmp);
3018 0 : WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3019 0 : WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3020 0 : WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3021 0 : if (rdev->flags & RADEON_IS_AGP) {
3022 0 : WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
3023 0 : WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
3024 0 : WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
3025 0 : } else {
3026 0 : WREG32(MC_VM_AGP_BASE, 0);
3027 0 : WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3028 0 : WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3029 : }
3030 0 : if (evergreen_mc_wait_for_idle(rdev)) {
3031 0 : dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3032 0 : }
3033 0 : evergreen_mc_resume(rdev, &save);
3034 : /* we need to own VRAM, so turn off the VGA renderer here
3035 : * to stop it overwriting our objects */
3036 0 : rv515_vga_render_disable(rdev);
3037 0 : }
3038 :
3039 : /*
3040 : * CP.
3041 : */
3042 0 : void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3043 : {
3044 0 : struct radeon_ring *ring = &rdev->ring[ib->ring];
3045 : u32 next_rptr;
3046 :
3047 : /* set to DX10/11 mode */
3048 0 : radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3049 0 : radeon_ring_write(ring, 1);
3050 :
3051 0 : if (ring->rptr_save_reg) {
3052 0 : next_rptr = ring->wptr + 3 + 4;
3053 0 : radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3054 0 : radeon_ring_write(ring, ((ring->rptr_save_reg -
3055 0 : PACKET3_SET_CONFIG_REG_START) >> 2));
3056 0 : radeon_ring_write(ring, next_rptr);
3057 0 : } else if (rdev->wb.enabled) {
3058 0 : next_rptr = ring->wptr + 5 + 4;
3059 0 : radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3060 0 : radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3061 0 : radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3062 0 : radeon_ring_write(ring, next_rptr);
3063 0 : radeon_ring_write(ring, 0);
3064 0 : }
3065 :
3066 0 : radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3067 0 : radeon_ring_write(ring,
3068 : #ifdef __BIG_ENDIAN
3069 : (2 << 0) |
3070 : #endif
3071 0 : (ib->gpu_addr & 0xFFFFFFFC));
3072 0 : radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3073 0 : radeon_ring_write(ring, ib->length_dw);
3074 0 : }
3075 :
3076 :
3077 0 : static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3078 : {
3079 : const __be32 *fw_data;
3080 : int i;
3081 :
3082 0 : if (!rdev->me_fw || !rdev->pfp_fw)
3083 0 : return -EINVAL;
3084 :
3085 0 : r700_cp_stop(rdev);
3086 0 : WREG32(CP_RB_CNTL,
3087 : #ifdef __BIG_ENDIAN
3088 : BUF_SWAP_32BIT |
3089 : #endif
3090 : RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3091 :
3092 0 : fw_data = (const __be32 *)rdev->pfp_fw->data;
3093 0 : WREG32(CP_PFP_UCODE_ADDR, 0);
3094 0 : for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3095 0 : WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3096 0 : WREG32(CP_PFP_UCODE_ADDR, 0);
3097 :
3098 0 : fw_data = (const __be32 *)rdev->me_fw->data;
3099 0 : WREG32(CP_ME_RAM_WADDR, 0);
3100 0 : for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3101 0 : WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3102 :
3103 0 : WREG32(CP_PFP_UCODE_ADDR, 0);
3104 0 : WREG32(CP_ME_RAM_WADDR, 0);
3105 0 : WREG32(CP_ME_RAM_RADDR, 0);
3106 0 : return 0;
3107 0 : }
3108 :
3109 0 : static int evergreen_cp_start(struct radeon_device *rdev)
3110 : {
3111 0 : struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3112 : int r, i;
3113 : uint32_t cp_me;
3114 :
3115 0 : r = radeon_ring_lock(rdev, ring, 7);
3116 0 : if (r) {
3117 0 : DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3118 0 : return r;
3119 : }
3120 0 : radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3121 0 : radeon_ring_write(ring, 0x1);
3122 0 : radeon_ring_write(ring, 0x0);
3123 0 : radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3124 0 : radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3125 0 : radeon_ring_write(ring, 0);
3126 0 : radeon_ring_write(ring, 0);
3127 0 : radeon_ring_unlock_commit(rdev, ring, false);
3128 :
3129 : cp_me = 0xff;
3130 0 : WREG32(CP_ME_CNTL, cp_me);
3131 :
3132 0 : r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3133 0 : if (r) {
3134 0 : DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3135 0 : return r;
3136 : }
3137 :
3138 : /* setup clear context state */
3139 0 : radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3140 0 : radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3141 :
3142 0 : for (i = 0; i < evergreen_default_size; i++)
3143 0 : radeon_ring_write(ring, evergreen_default_state[i]);
3144 :
3145 0 : radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3146 0 : radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3147 :
3148 : /* set clear context state */
3149 0 : radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3150 0 : radeon_ring_write(ring, 0);
3151 :
3152 : /* SQ_VTX_BASE_VTX_LOC */
3153 0 : radeon_ring_write(ring, 0xc0026f00);
3154 0 : radeon_ring_write(ring, 0x00000000);
3155 0 : radeon_ring_write(ring, 0x00000000);
3156 0 : radeon_ring_write(ring, 0x00000000);
3157 :
3158 : /* Clear consts */
3159 0 : radeon_ring_write(ring, 0xc0036f00);
3160 0 : radeon_ring_write(ring, 0x00000bc4);
3161 0 : radeon_ring_write(ring, 0xffffffff);
3162 0 : radeon_ring_write(ring, 0xffffffff);
3163 0 : radeon_ring_write(ring, 0xffffffff);
3164 :
3165 0 : radeon_ring_write(ring, 0xc0026900);
3166 0 : radeon_ring_write(ring, 0x00000316);
3167 0 : radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3168 0 : radeon_ring_write(ring, 0x00000010); /* */
3169 :
3170 0 : radeon_ring_unlock_commit(rdev, ring, false);
3171 :
3172 0 : return 0;
3173 0 : }
3174 :
3175 0 : static int evergreen_cp_resume(struct radeon_device *rdev)
3176 : {
3177 0 : struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3178 : u32 tmp;
3179 : u32 rb_bufsz;
3180 : int r;
3181 :
3182 : /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3183 0 : WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3184 : SOFT_RESET_PA |
3185 : SOFT_RESET_SH |
3186 : SOFT_RESET_VGT |
3187 : SOFT_RESET_SPI |
3188 : SOFT_RESET_SX));
3189 0 : RREG32(GRBM_SOFT_RESET);
3190 0 : mdelay(15);
3191 0 : WREG32(GRBM_SOFT_RESET, 0);
3192 0 : RREG32(GRBM_SOFT_RESET);
3193 :
3194 : /* Set ring buffer size */
3195 0 : rb_bufsz = order_base_2(ring->ring_size / 8);
3196 0 : tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3197 : #ifdef __BIG_ENDIAN
3198 : tmp |= BUF_SWAP_32BIT;
3199 : #endif
3200 0 : WREG32(CP_RB_CNTL, tmp);
3201 0 : WREG32(CP_SEM_WAIT_TIMER, 0x0);
3202 0 : WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3203 :
3204 : /* Set the write pointer delay */
3205 0 : WREG32(CP_RB_WPTR_DELAY, 0);
3206 :
3207 : /* Initialize the ring buffer's read and write pointers */
3208 0 : WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3209 0 : WREG32(CP_RB_RPTR_WR, 0);
3210 0 : ring->wptr = 0;
3211 0 : WREG32(CP_RB_WPTR, ring->wptr);
3212 :
3213 : /* set the wb address whether it's enabled or not */
3214 0 : WREG32(CP_RB_RPTR_ADDR,
3215 : ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3216 0 : WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3217 0 : WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3218 :
3219 0 : if (rdev->wb.enabled)
3220 0 : WREG32(SCRATCH_UMSK, 0xff);
3221 : else {
3222 0 : tmp |= RB_NO_UPDATE;
3223 0 : WREG32(SCRATCH_UMSK, 0);
3224 : }
3225 :
3226 0 : mdelay(1);
3227 0 : WREG32(CP_RB_CNTL, tmp);
3228 :
3229 0 : WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3230 0 : WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3231 :
3232 0 : evergreen_cp_start(rdev);
3233 0 : ring->ready = true;
3234 0 : r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3235 0 : if (r) {
3236 0 : ring->ready = false;
3237 0 : return r;
3238 : }
3239 0 : return 0;
3240 0 : }
3241 :
3242 : /*
3243 : * Core functions
3244 : */
3245 0 : static void evergreen_gpu_init(struct radeon_device *rdev)
3246 : {
3247 : u32 gb_addr_config;
3248 : u32 mc_shared_chmap, mc_arb_ramcfg;
3249 : u32 sx_debug_1;
3250 : u32 smx_dc_ctl0;
3251 : u32 sq_config;
3252 : u32 sq_lds_resource_mgmt;
3253 : u32 sq_gpr_resource_mgmt_1;
3254 : u32 sq_gpr_resource_mgmt_2;
3255 : u32 sq_gpr_resource_mgmt_3;
3256 : u32 sq_thread_resource_mgmt;
3257 : u32 sq_thread_resource_mgmt_2;
3258 : u32 sq_stack_resource_mgmt_1;
3259 : u32 sq_stack_resource_mgmt_2;
3260 : u32 sq_stack_resource_mgmt_3;
3261 : u32 vgt_cache_invalidation;
3262 : u32 hdp_host_path_cntl, tmp;
3263 : u32 disabled_rb_mask;
3264 : int i, j, ps_thread_count;
3265 :
3266 0 : switch (rdev->family) {
3267 : case CHIP_CYPRESS:
3268 : case CHIP_HEMLOCK:
3269 0 : rdev->config.evergreen.num_ses = 2;
3270 0 : rdev->config.evergreen.max_pipes = 4;
3271 0 : rdev->config.evergreen.max_tile_pipes = 8;
3272 0 : rdev->config.evergreen.max_simds = 10;
3273 0 : rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3274 0 : rdev->config.evergreen.max_gprs = 256;
3275 0 : rdev->config.evergreen.max_threads = 248;
3276 0 : rdev->config.evergreen.max_gs_threads = 32;
3277 0 : rdev->config.evergreen.max_stack_entries = 512;
3278 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3279 0 : rdev->config.evergreen.sx_max_export_size = 256;
3280 0 : rdev->config.evergreen.sx_max_export_pos_size = 64;
3281 0 : rdev->config.evergreen.sx_max_export_smx_size = 192;
3282 0 : rdev->config.evergreen.max_hw_contexts = 8;
3283 0 : rdev->config.evergreen.sq_num_cf_insts = 2;
3284 :
3285 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3286 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3287 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3288 : gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3289 0 : break;
3290 : case CHIP_JUNIPER:
3291 0 : rdev->config.evergreen.num_ses = 1;
3292 0 : rdev->config.evergreen.max_pipes = 4;
3293 0 : rdev->config.evergreen.max_tile_pipes = 4;
3294 0 : rdev->config.evergreen.max_simds = 10;
3295 0 : rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3296 0 : rdev->config.evergreen.max_gprs = 256;
3297 0 : rdev->config.evergreen.max_threads = 248;
3298 0 : rdev->config.evergreen.max_gs_threads = 32;
3299 0 : rdev->config.evergreen.max_stack_entries = 512;
3300 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3301 0 : rdev->config.evergreen.sx_max_export_size = 256;
3302 0 : rdev->config.evergreen.sx_max_export_pos_size = 64;
3303 0 : rdev->config.evergreen.sx_max_export_smx_size = 192;
3304 0 : rdev->config.evergreen.max_hw_contexts = 8;
3305 0 : rdev->config.evergreen.sq_num_cf_insts = 2;
3306 :
3307 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3308 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3309 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3310 : gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3311 0 : break;
3312 : case CHIP_REDWOOD:
3313 0 : rdev->config.evergreen.num_ses = 1;
3314 0 : rdev->config.evergreen.max_pipes = 4;
3315 0 : rdev->config.evergreen.max_tile_pipes = 4;
3316 0 : rdev->config.evergreen.max_simds = 5;
3317 0 : rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3318 0 : rdev->config.evergreen.max_gprs = 256;
3319 0 : rdev->config.evergreen.max_threads = 248;
3320 0 : rdev->config.evergreen.max_gs_threads = 32;
3321 0 : rdev->config.evergreen.max_stack_entries = 256;
3322 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3323 0 : rdev->config.evergreen.sx_max_export_size = 256;
3324 0 : rdev->config.evergreen.sx_max_export_pos_size = 64;
3325 0 : rdev->config.evergreen.sx_max_export_smx_size = 192;
3326 0 : rdev->config.evergreen.max_hw_contexts = 8;
3327 0 : rdev->config.evergreen.sq_num_cf_insts = 2;
3328 :
3329 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3330 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3331 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3332 : gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3333 0 : break;
3334 : case CHIP_CEDAR:
3335 : default:
3336 0 : rdev->config.evergreen.num_ses = 1;
3337 0 : rdev->config.evergreen.max_pipes = 2;
3338 0 : rdev->config.evergreen.max_tile_pipes = 2;
3339 0 : rdev->config.evergreen.max_simds = 2;
3340 0 : rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3341 0 : rdev->config.evergreen.max_gprs = 256;
3342 0 : rdev->config.evergreen.max_threads = 192;
3343 0 : rdev->config.evergreen.max_gs_threads = 16;
3344 0 : rdev->config.evergreen.max_stack_entries = 256;
3345 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3346 0 : rdev->config.evergreen.sx_max_export_size = 128;
3347 0 : rdev->config.evergreen.sx_max_export_pos_size = 32;
3348 0 : rdev->config.evergreen.sx_max_export_smx_size = 96;
3349 0 : rdev->config.evergreen.max_hw_contexts = 4;
3350 0 : rdev->config.evergreen.sq_num_cf_insts = 1;
3351 :
3352 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3353 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3354 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3355 : gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3356 0 : break;
3357 : case CHIP_PALM:
3358 0 : rdev->config.evergreen.num_ses = 1;
3359 0 : rdev->config.evergreen.max_pipes = 2;
3360 0 : rdev->config.evergreen.max_tile_pipes = 2;
3361 0 : rdev->config.evergreen.max_simds = 2;
3362 0 : rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3363 0 : rdev->config.evergreen.max_gprs = 256;
3364 0 : rdev->config.evergreen.max_threads = 192;
3365 0 : rdev->config.evergreen.max_gs_threads = 16;
3366 0 : rdev->config.evergreen.max_stack_entries = 256;
3367 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3368 0 : rdev->config.evergreen.sx_max_export_size = 128;
3369 0 : rdev->config.evergreen.sx_max_export_pos_size = 32;
3370 0 : rdev->config.evergreen.sx_max_export_smx_size = 96;
3371 0 : rdev->config.evergreen.max_hw_contexts = 4;
3372 0 : rdev->config.evergreen.sq_num_cf_insts = 1;
3373 :
3374 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3375 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3376 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3377 : gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3378 0 : break;
3379 : case CHIP_SUMO:
3380 0 : rdev->config.evergreen.num_ses = 1;
3381 0 : rdev->config.evergreen.max_pipes = 4;
3382 0 : rdev->config.evergreen.max_tile_pipes = 4;
3383 0 : if (rdev->pdev->device == 0x9648)
3384 0 : rdev->config.evergreen.max_simds = 3;
3385 0 : else if ((rdev->pdev->device == 0x9647) ||
3386 0 : (rdev->pdev->device == 0x964a))
3387 0 : rdev->config.evergreen.max_simds = 4;
3388 : else
3389 0 : rdev->config.evergreen.max_simds = 5;
3390 0 : rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3391 0 : rdev->config.evergreen.max_gprs = 256;
3392 0 : rdev->config.evergreen.max_threads = 248;
3393 0 : rdev->config.evergreen.max_gs_threads = 32;
3394 0 : rdev->config.evergreen.max_stack_entries = 256;
3395 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3396 0 : rdev->config.evergreen.sx_max_export_size = 256;
3397 0 : rdev->config.evergreen.sx_max_export_pos_size = 64;
3398 0 : rdev->config.evergreen.sx_max_export_smx_size = 192;
3399 0 : rdev->config.evergreen.max_hw_contexts = 8;
3400 0 : rdev->config.evergreen.sq_num_cf_insts = 2;
3401 :
3402 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3403 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3404 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3405 : gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3406 0 : break;
3407 : case CHIP_SUMO2:
3408 0 : rdev->config.evergreen.num_ses = 1;
3409 0 : rdev->config.evergreen.max_pipes = 4;
3410 0 : rdev->config.evergreen.max_tile_pipes = 4;
3411 0 : rdev->config.evergreen.max_simds = 2;
3412 0 : rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3413 0 : rdev->config.evergreen.max_gprs = 256;
3414 0 : rdev->config.evergreen.max_threads = 248;
3415 0 : rdev->config.evergreen.max_gs_threads = 32;
3416 0 : rdev->config.evergreen.max_stack_entries = 512;
3417 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3418 0 : rdev->config.evergreen.sx_max_export_size = 256;
3419 0 : rdev->config.evergreen.sx_max_export_pos_size = 64;
3420 0 : rdev->config.evergreen.sx_max_export_smx_size = 192;
3421 0 : rdev->config.evergreen.max_hw_contexts = 4;
3422 0 : rdev->config.evergreen.sq_num_cf_insts = 2;
3423 :
3424 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3425 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3426 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3427 : gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3428 0 : break;
3429 : case CHIP_BARTS:
3430 0 : rdev->config.evergreen.num_ses = 2;
3431 0 : rdev->config.evergreen.max_pipes = 4;
3432 0 : rdev->config.evergreen.max_tile_pipes = 8;
3433 0 : rdev->config.evergreen.max_simds = 7;
3434 0 : rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3435 0 : rdev->config.evergreen.max_gprs = 256;
3436 0 : rdev->config.evergreen.max_threads = 248;
3437 0 : rdev->config.evergreen.max_gs_threads = 32;
3438 0 : rdev->config.evergreen.max_stack_entries = 512;
3439 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3440 0 : rdev->config.evergreen.sx_max_export_size = 256;
3441 0 : rdev->config.evergreen.sx_max_export_pos_size = 64;
3442 0 : rdev->config.evergreen.sx_max_export_smx_size = 192;
3443 0 : rdev->config.evergreen.max_hw_contexts = 8;
3444 0 : rdev->config.evergreen.sq_num_cf_insts = 2;
3445 :
3446 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3447 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3448 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3449 : gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3450 0 : break;
3451 : case CHIP_TURKS:
3452 0 : rdev->config.evergreen.num_ses = 1;
3453 0 : rdev->config.evergreen.max_pipes = 4;
3454 0 : rdev->config.evergreen.max_tile_pipes = 4;
3455 0 : rdev->config.evergreen.max_simds = 6;
3456 0 : rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3457 0 : rdev->config.evergreen.max_gprs = 256;
3458 0 : rdev->config.evergreen.max_threads = 248;
3459 0 : rdev->config.evergreen.max_gs_threads = 32;
3460 0 : rdev->config.evergreen.max_stack_entries = 256;
3461 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3462 0 : rdev->config.evergreen.sx_max_export_size = 256;
3463 0 : rdev->config.evergreen.sx_max_export_pos_size = 64;
3464 0 : rdev->config.evergreen.sx_max_export_smx_size = 192;
3465 0 : rdev->config.evergreen.max_hw_contexts = 8;
3466 0 : rdev->config.evergreen.sq_num_cf_insts = 2;
3467 :
3468 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3469 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3470 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3471 : gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3472 0 : break;
3473 : case CHIP_CAICOS:
3474 0 : rdev->config.evergreen.num_ses = 1;
3475 0 : rdev->config.evergreen.max_pipes = 2;
3476 0 : rdev->config.evergreen.max_tile_pipes = 2;
3477 0 : rdev->config.evergreen.max_simds = 2;
3478 0 : rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3479 0 : rdev->config.evergreen.max_gprs = 256;
3480 0 : rdev->config.evergreen.max_threads = 192;
3481 0 : rdev->config.evergreen.max_gs_threads = 16;
3482 0 : rdev->config.evergreen.max_stack_entries = 256;
3483 0 : rdev->config.evergreen.sx_num_of_sets = 4;
3484 0 : rdev->config.evergreen.sx_max_export_size = 128;
3485 0 : rdev->config.evergreen.sx_max_export_pos_size = 32;
3486 0 : rdev->config.evergreen.sx_max_export_smx_size = 96;
3487 0 : rdev->config.evergreen.max_hw_contexts = 4;
3488 0 : rdev->config.evergreen.sq_num_cf_insts = 1;
3489 :
3490 0 : rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3491 0 : rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3492 0 : rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3493 : gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3494 0 : break;
3495 : }
3496 :
3497 : /* Initialize HDP */
3498 0 : for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3499 0 : WREG32((0x2c14 + j), 0x00000000);
3500 0 : WREG32((0x2c18 + j), 0x00000000);
3501 0 : WREG32((0x2c1c + j), 0x00000000);
3502 0 : WREG32((0x2c20 + j), 0x00000000);
3503 0 : WREG32((0x2c24 + j), 0x00000000);
3504 : }
3505 :
3506 0 : WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3507 0 : WREG32(SRBM_INT_CNTL, 0x1);
3508 0 : WREG32(SRBM_INT_ACK, 0x1);
3509 :
3510 0 : evergreen_fix_pci_max_read_req_size(rdev);
3511 :
3512 0 : mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3513 0 : if ((rdev->family == CHIP_PALM) ||
3514 0 : (rdev->family == CHIP_SUMO) ||
3515 0 : (rdev->family == CHIP_SUMO2))
3516 0 : mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3517 : else
3518 0 : mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3519 :
3520 : /* setup tiling info dword. gb_addr_config is not adequate since it does
3521 : * not have bank info, so create a custom tiling dword.
3522 : * bits 3:0 num_pipes
3523 : * bits 7:4 num_banks
3524 : * bits 11:8 group_size
3525 : * bits 15:12 row_size
3526 : */
3527 0 : rdev->config.evergreen.tile_config = 0;
3528 0 : switch (rdev->config.evergreen.max_tile_pipes) {
3529 : case 1:
3530 : default:
3531 0 : rdev->config.evergreen.tile_config |= (0 << 0);
3532 0 : break;
3533 : case 2:
3534 0 : rdev->config.evergreen.tile_config |= (1 << 0);
3535 0 : break;
3536 : case 4:
3537 0 : rdev->config.evergreen.tile_config |= (2 << 0);
3538 0 : break;
3539 : case 8:
3540 0 : rdev->config.evergreen.tile_config |= (3 << 0);
3541 0 : break;
3542 : }
3543 : /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3544 0 : if (rdev->flags & RADEON_IS_IGP)
3545 0 : rdev->config.evergreen.tile_config |= 1 << 4;
3546 : else {
3547 0 : switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3548 : case 0: /* four banks */
3549 0 : rdev->config.evergreen.tile_config |= 0 << 4;
3550 0 : break;
3551 : case 1: /* eight banks */
3552 0 : rdev->config.evergreen.tile_config |= 1 << 4;
3553 0 : break;
3554 : case 2: /* sixteen banks */
3555 : default:
3556 0 : rdev->config.evergreen.tile_config |= 2 << 4;
3557 0 : break;
3558 : }
3559 : }
3560 0 : rdev->config.evergreen.tile_config |= 0 << 8;
3561 : rdev->config.evergreen.tile_config |=
3562 : ((gb_addr_config & 0x30000000) >> 28) << 12;
3563 :
3564 0 : if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3565 : u32 efuse_straps_4;
3566 : u32 efuse_straps_3;
3567 :
3568 0 : efuse_straps_4 = RREG32_RCU(0x204);
3569 0 : efuse_straps_3 = RREG32_RCU(0x203);
3570 0 : tmp = (((efuse_straps_4 & 0xf) << 4) |
3571 0 : ((efuse_straps_3 & 0xf0000000) >> 28));
3572 0 : } else {
3573 : tmp = 0;
3574 0 : for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3575 : u32 rb_disable_bitmap;
3576 :
3577 0 : WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3578 0 : WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3579 0 : rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3580 0 : tmp <<= 4;
3581 0 : tmp |= rb_disable_bitmap;
3582 : }
3583 : }
3584 : /* enabled rb are just the one not disabled :) */
3585 : disabled_rb_mask = tmp;
3586 : tmp = 0;
3587 0 : for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3588 0 : tmp |= (1 << i);
3589 : /* if all the backends are disabled, fix it up here */
3590 0 : if ((disabled_rb_mask & tmp) == tmp) {
3591 0 : for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3592 0 : disabled_rb_mask &= ~(1 << i);
3593 : }
3594 :
3595 0 : for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3596 : u32 simd_disable_bitmap;
3597 :
3598 0 : WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3599 0 : WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3600 0 : simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3601 0 : simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3602 0 : tmp <<= 16;
3603 0 : tmp |= simd_disable_bitmap;
3604 : }
3605 0 : rdev->config.evergreen.active_simds = hweight32(~tmp);
3606 :
3607 0 : WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3608 0 : WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3609 :
3610 0 : WREG32(GB_ADDR_CONFIG, gb_addr_config);
3611 0 : WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3612 0 : WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3613 0 : WREG32(DMA_TILING_CONFIG, gb_addr_config);
3614 0 : WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3615 0 : WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3616 0 : WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3617 :
3618 0 : if ((rdev->config.evergreen.max_backends == 1) &&
3619 0 : (rdev->flags & RADEON_IS_IGP)) {
3620 0 : if ((disabled_rb_mask & 3) == 1) {
3621 : /* RB0 disabled, RB1 enabled */
3622 : tmp = 0x11111111;
3623 0 : } else {
3624 : /* RB1 disabled, RB0 enabled */
3625 : tmp = 0x00000000;
3626 : }
3627 : } else {
3628 0 : tmp = gb_addr_config & NUM_PIPES_MASK;
3629 0 : tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3630 : EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3631 : }
3632 0 : WREG32(GB_BACKEND_MAP, tmp);
3633 :
3634 0 : WREG32(CGTS_SYS_TCC_DISABLE, 0);
3635 0 : WREG32(CGTS_TCC_DISABLE, 0);
3636 0 : WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3637 0 : WREG32(CGTS_USER_TCC_DISABLE, 0);
3638 :
3639 : /* set HW defaults for 3D engine */
3640 0 : WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3641 : ROQ_IB2_START(0x2b)));
3642 :
3643 0 : WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3644 :
3645 0 : WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3646 : SYNC_GRADIENT |
3647 : SYNC_WALKER |
3648 : SYNC_ALIGNER));
3649 :
3650 0 : sx_debug_1 = RREG32(SX_DEBUG_1);
3651 0 : sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3652 0 : WREG32(SX_DEBUG_1, sx_debug_1);
3653 :
3654 :
3655 0 : smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3656 0 : smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3657 0 : smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3658 0 : WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3659 :
3660 0 : if (rdev->family <= CHIP_SUMO2)
3661 0 : WREG32(SMX_SAR_CTL0, 0x00010000);
3662 :
3663 0 : WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3664 : POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3665 : SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3666 :
3667 0 : WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3668 : SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3669 : SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3670 :
3671 0 : WREG32(VGT_NUM_INSTANCES, 1);
3672 0 : WREG32(SPI_CONFIG_CNTL, 0);
3673 0 : WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3674 0 : WREG32(CP_PERFMON_CNTL, 0);
3675 :
3676 0 : WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3677 : FETCH_FIFO_HIWATER(0x4) |
3678 : DONE_FIFO_HIWATER(0xe0) |
3679 : ALU_UPDATE_FIFO_HIWATER(0x8)));
3680 :
3681 0 : sq_config = RREG32(SQ_CONFIG);
3682 0 : sq_config &= ~(PS_PRIO(3) |
3683 : VS_PRIO(3) |
3684 : GS_PRIO(3) |
3685 : ES_PRIO(3));
3686 0 : sq_config |= (VC_ENABLE |
3687 : EXPORT_SRC_C |
3688 : PS_PRIO(0) |
3689 : VS_PRIO(1) |
3690 : GS_PRIO(2) |
3691 : ES_PRIO(3));
3692 :
3693 0 : switch (rdev->family) {
3694 : case CHIP_CEDAR:
3695 : case CHIP_PALM:
3696 : case CHIP_SUMO:
3697 : case CHIP_SUMO2:
3698 : case CHIP_CAICOS:
3699 : /* no vertex cache */
3700 0 : sq_config &= ~VC_ENABLE;
3701 0 : break;
3702 : default:
3703 : break;
3704 : }
3705 :
3706 0 : sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3707 :
3708 0 : sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3709 0 : sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3710 0 : sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3711 0 : sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3712 0 : sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3713 0 : sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3714 0 : sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3715 :
3716 0 : switch (rdev->family) {
3717 : case CHIP_CEDAR:
3718 : case CHIP_PALM:
3719 : case CHIP_SUMO:
3720 : case CHIP_SUMO2:
3721 : ps_thread_count = 96;
3722 0 : break;
3723 : default:
3724 : ps_thread_count = 128;
3725 0 : break;
3726 : }
3727 :
3728 : sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3729 0 : sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3730 0 : sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3731 0 : sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3732 : sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3733 0 : sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3734 :
3735 0 : sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3736 0 : sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3737 : sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3738 : sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3739 : sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3740 : sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3741 :
3742 0 : WREG32(SQ_CONFIG, sq_config);
3743 0 : WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3744 0 : WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3745 0 : WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3746 0 : WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3747 0 : WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3748 0 : WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3749 0 : WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3750 0 : WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3751 0 : WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3752 0 : WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3753 :
3754 0 : WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3755 : FORCE_EOV_MAX_REZ_CNT(255)));
3756 :
3757 0 : switch (rdev->family) {
3758 : case CHIP_CEDAR:
3759 : case CHIP_PALM:
3760 : case CHIP_SUMO:
3761 : case CHIP_SUMO2:
3762 : case CHIP_CAICOS:
3763 : vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3764 0 : break;
3765 : default:
3766 : vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3767 0 : break;
3768 : }
3769 0 : vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3770 0 : WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3771 :
3772 0 : WREG32(VGT_GS_VERTEX_REUSE, 16);
3773 0 : WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3774 0 : WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3775 :
3776 0 : WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3777 0 : WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3778 :
3779 0 : WREG32(CB_PERF_CTR0_SEL_0, 0);
3780 0 : WREG32(CB_PERF_CTR0_SEL_1, 0);
3781 0 : WREG32(CB_PERF_CTR1_SEL_0, 0);
3782 0 : WREG32(CB_PERF_CTR1_SEL_1, 0);
3783 0 : WREG32(CB_PERF_CTR2_SEL_0, 0);
3784 0 : WREG32(CB_PERF_CTR2_SEL_1, 0);
3785 0 : WREG32(CB_PERF_CTR3_SEL_0, 0);
3786 0 : WREG32(CB_PERF_CTR3_SEL_1, 0);
3787 :
3788 : /* clear render buffer base addresses */
3789 0 : WREG32(CB_COLOR0_BASE, 0);
3790 0 : WREG32(CB_COLOR1_BASE, 0);
3791 0 : WREG32(CB_COLOR2_BASE, 0);
3792 0 : WREG32(CB_COLOR3_BASE, 0);
3793 0 : WREG32(CB_COLOR4_BASE, 0);
3794 0 : WREG32(CB_COLOR5_BASE, 0);
3795 0 : WREG32(CB_COLOR6_BASE, 0);
3796 0 : WREG32(CB_COLOR7_BASE, 0);
3797 0 : WREG32(CB_COLOR8_BASE, 0);
3798 0 : WREG32(CB_COLOR9_BASE, 0);
3799 0 : WREG32(CB_COLOR10_BASE, 0);
3800 0 : WREG32(CB_COLOR11_BASE, 0);
3801 :
3802 : /* set the shader const cache sizes to 0 */
3803 0 : for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3804 0 : WREG32(i, 0);
3805 0 : for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3806 0 : WREG32(i, 0);
3807 :
3808 0 : tmp = RREG32(HDP_MISC_CNTL);
3809 0 : tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3810 0 : WREG32(HDP_MISC_CNTL, tmp);
3811 :
3812 0 : hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3813 0 : WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3814 :
3815 0 : WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3816 :
3817 0 : udelay(50);
3818 :
3819 0 : }
3820 :
3821 0 : int evergreen_mc_init(struct radeon_device *rdev)
3822 : {
3823 : u32 tmp;
3824 : int chansize, numchan;
3825 :
3826 : /* Get VRAM informations */
3827 0 : rdev->mc.vram_is_ddr = true;
3828 0 : if ((rdev->family == CHIP_PALM) ||
3829 0 : (rdev->family == CHIP_SUMO) ||
3830 0 : (rdev->family == CHIP_SUMO2))
3831 0 : tmp = RREG32(FUS_MC_ARB_RAMCFG);
3832 : else
3833 0 : tmp = RREG32(MC_ARB_RAMCFG);
3834 0 : if (tmp & CHANSIZE_OVERRIDE) {
3835 : chansize = 16;
3836 0 : } else if (tmp & CHANSIZE_MASK) {
3837 : chansize = 64;
3838 0 : } else {
3839 : chansize = 32;
3840 : }
3841 0 : tmp = RREG32(MC_SHARED_CHMAP);
3842 0 : switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3843 : case 0:
3844 : default:
3845 : numchan = 1;
3846 0 : break;
3847 : case 1:
3848 : numchan = 2;
3849 0 : break;
3850 : case 2:
3851 : numchan = 4;
3852 0 : break;
3853 : case 3:
3854 : numchan = 8;
3855 0 : break;
3856 : }
3857 0 : rdev->mc.vram_width = numchan * chansize;
3858 : /* Could aper size report 0 ? */
3859 0 : rdev->mc.aper_base = rdev->fb_aper_offset;
3860 0 : rdev->mc.aper_size = rdev->fb_aper_size;
3861 : /* Setup GPU memory space */
3862 0 : if ((rdev->family == CHIP_PALM) ||
3863 0 : (rdev->family == CHIP_SUMO) ||
3864 0 : (rdev->family == CHIP_SUMO2)) {
3865 : /* size in bytes on fusion */
3866 0 : rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3867 0 : rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3868 0 : } else {
3869 : /* size in MB on evergreen/cayman/tn */
3870 0 : rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3871 0 : rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3872 : }
3873 0 : rdev->mc.visible_vram_size = rdev->mc.aper_size;
3874 0 : r700_vram_gtt_location(rdev, &rdev->mc);
3875 0 : radeon_update_bandwidth_info(rdev);
3876 :
3877 0 : return 0;
3878 : }
3879 :
3880 0 : void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3881 : {
3882 : dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3883 : RREG32(GRBM_STATUS));
3884 : dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3885 : RREG32(GRBM_STATUS_SE0));
3886 : dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3887 : RREG32(GRBM_STATUS_SE1));
3888 : dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3889 : RREG32(SRBM_STATUS));
3890 : dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3891 : RREG32(SRBM_STATUS2));
3892 : dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3893 : RREG32(CP_STALLED_STAT1));
3894 : dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3895 : RREG32(CP_STALLED_STAT2));
3896 : dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3897 : RREG32(CP_BUSY_STAT));
3898 : dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3899 : RREG32(CP_STAT));
3900 : dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3901 : RREG32(DMA_STATUS_REG));
3902 0 : if (rdev->family >= CHIP_CAYMAN) {
3903 : dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3904 : RREG32(DMA_STATUS_REG + 0x800));
3905 : }
3906 0 : }
3907 :
3908 0 : bool evergreen_is_display_hung(struct radeon_device *rdev)
3909 : {
3910 : u32 crtc_hung = 0;
3911 0 : u32 crtc_status[6];
3912 : u32 i, j, tmp;
3913 :
3914 0 : for (i = 0; i < rdev->num_crtc; i++) {
3915 0 : if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3916 0 : crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3917 0 : crtc_hung |= (1 << i);
3918 0 : }
3919 : }
3920 :
3921 0 : for (j = 0; j < 10; j++) {
3922 0 : for (i = 0; i < rdev->num_crtc; i++) {
3923 0 : if (crtc_hung & (1 << i)) {
3924 0 : tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3925 0 : if (tmp != crtc_status[i])
3926 0 : crtc_hung &= ~(1 << i);
3927 : }
3928 : }
3929 0 : if (crtc_hung == 0)
3930 0 : return false;
3931 0 : udelay(100);
3932 : }
3933 :
3934 0 : return true;
3935 0 : }
3936 :
3937 0 : u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3938 : {
3939 : u32 reset_mask = 0;
3940 : u32 tmp;
3941 :
3942 : /* GRBM_STATUS */
3943 0 : tmp = RREG32(GRBM_STATUS);
3944 0 : if (tmp & (PA_BUSY | SC_BUSY |
3945 : SH_BUSY | SX_BUSY |
3946 : TA_BUSY | VGT_BUSY |
3947 : DB_BUSY | CB_BUSY |
3948 : SPI_BUSY | VGT_BUSY_NO_DMA))
3949 0 : reset_mask |= RADEON_RESET_GFX;
3950 :
3951 0 : if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3952 : CP_BUSY | CP_COHERENCY_BUSY))
3953 0 : reset_mask |= RADEON_RESET_CP;
3954 :
3955 0 : if (tmp & GRBM_EE_BUSY)
3956 0 : reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3957 :
3958 : /* DMA_STATUS_REG */
3959 0 : tmp = RREG32(DMA_STATUS_REG);
3960 0 : if (!(tmp & DMA_IDLE))
3961 0 : reset_mask |= RADEON_RESET_DMA;
3962 :
3963 : /* SRBM_STATUS2 */
3964 0 : tmp = RREG32(SRBM_STATUS2);
3965 0 : if (tmp & DMA_BUSY)
3966 0 : reset_mask |= RADEON_RESET_DMA;
3967 :
3968 : /* SRBM_STATUS */
3969 0 : tmp = RREG32(SRBM_STATUS);
3970 0 : if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3971 0 : reset_mask |= RADEON_RESET_RLC;
3972 :
3973 0 : if (tmp & IH_BUSY)
3974 0 : reset_mask |= RADEON_RESET_IH;
3975 :
3976 0 : if (tmp & SEM_BUSY)
3977 0 : reset_mask |= RADEON_RESET_SEM;
3978 :
3979 0 : if (tmp & GRBM_RQ_PENDING)
3980 0 : reset_mask |= RADEON_RESET_GRBM;
3981 :
3982 0 : if (tmp & VMC_BUSY)
3983 0 : reset_mask |= RADEON_RESET_VMC;
3984 :
3985 0 : if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3986 : MCC_BUSY | MCD_BUSY))
3987 0 : reset_mask |= RADEON_RESET_MC;
3988 :
3989 0 : if (evergreen_is_display_hung(rdev))
3990 0 : reset_mask |= RADEON_RESET_DISPLAY;
3991 :
3992 : /* VM_L2_STATUS */
3993 0 : tmp = RREG32(VM_L2_STATUS);
3994 0 : if (tmp & L2_BUSY)
3995 0 : reset_mask |= RADEON_RESET_VMC;
3996 :
3997 : /* Skip MC reset as it's mostly likely not hung, just busy */
3998 0 : if (reset_mask & RADEON_RESET_MC) {
3999 : DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
4000 0 : reset_mask &= ~RADEON_RESET_MC;
4001 0 : }
4002 :
4003 0 : return reset_mask;
4004 : }
4005 :
4006 0 : static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
4007 : {
4008 0 : struct evergreen_mc_save save;
4009 : u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
4010 : u32 tmp;
4011 :
4012 0 : if (reset_mask == 0)
4013 0 : return;
4014 :
4015 : dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
4016 :
4017 0 : evergreen_print_gpu_status_regs(rdev);
4018 :
4019 : /* Disable CP parsing/prefetching */
4020 0 : WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4021 :
4022 0 : if (reset_mask & RADEON_RESET_DMA) {
4023 : /* Disable DMA */
4024 0 : tmp = RREG32(DMA_RB_CNTL);
4025 0 : tmp &= ~DMA_RB_ENABLE;
4026 0 : WREG32(DMA_RB_CNTL, tmp);
4027 0 : }
4028 :
4029 0 : udelay(50);
4030 :
4031 0 : evergreen_mc_stop(rdev, &save);
4032 0 : if (evergreen_mc_wait_for_idle(rdev)) {
4033 0 : dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4034 0 : }
4035 :
4036 0 : if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4037 : grbm_soft_reset |= SOFT_RESET_DB |
4038 : SOFT_RESET_CB |
4039 : SOFT_RESET_PA |
4040 : SOFT_RESET_SC |
4041 : SOFT_RESET_SPI |
4042 : SOFT_RESET_SX |
4043 : SOFT_RESET_SH |
4044 : SOFT_RESET_TC |
4045 : SOFT_RESET_TA |
4046 : SOFT_RESET_VC |
4047 : SOFT_RESET_VGT;
4048 0 : }
4049 :
4050 0 : if (reset_mask & RADEON_RESET_CP) {
4051 0 : grbm_soft_reset |= SOFT_RESET_CP |
4052 : SOFT_RESET_VGT;
4053 :
4054 : srbm_soft_reset |= SOFT_RESET_GRBM;
4055 0 : }
4056 :
4057 0 : if (reset_mask & RADEON_RESET_DMA)
4058 0 : srbm_soft_reset |= SOFT_RESET_DMA;
4059 :
4060 0 : if (reset_mask & RADEON_RESET_DISPLAY)
4061 0 : srbm_soft_reset |= SOFT_RESET_DC;
4062 :
4063 0 : if (reset_mask & RADEON_RESET_RLC)
4064 0 : srbm_soft_reset |= SOFT_RESET_RLC;
4065 :
4066 0 : if (reset_mask & RADEON_RESET_SEM)
4067 0 : srbm_soft_reset |= SOFT_RESET_SEM;
4068 :
4069 0 : if (reset_mask & RADEON_RESET_IH)
4070 0 : srbm_soft_reset |= SOFT_RESET_IH;
4071 :
4072 0 : if (reset_mask & RADEON_RESET_GRBM)
4073 0 : srbm_soft_reset |= SOFT_RESET_GRBM;
4074 :
4075 0 : if (reset_mask & RADEON_RESET_VMC)
4076 0 : srbm_soft_reset |= SOFT_RESET_VMC;
4077 :
4078 0 : if (!(rdev->flags & RADEON_IS_IGP)) {
4079 0 : if (reset_mask & RADEON_RESET_MC)
4080 0 : srbm_soft_reset |= SOFT_RESET_MC;
4081 : }
4082 :
4083 0 : if (grbm_soft_reset) {
4084 0 : tmp = RREG32(GRBM_SOFT_RESET);
4085 0 : tmp |= grbm_soft_reset;
4086 : dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4087 0 : WREG32(GRBM_SOFT_RESET, tmp);
4088 0 : tmp = RREG32(GRBM_SOFT_RESET);
4089 :
4090 0 : udelay(50);
4091 :
4092 0 : tmp &= ~grbm_soft_reset;
4093 0 : WREG32(GRBM_SOFT_RESET, tmp);
4094 0 : tmp = RREG32(GRBM_SOFT_RESET);
4095 0 : }
4096 :
4097 0 : if (srbm_soft_reset) {
4098 0 : tmp = RREG32(SRBM_SOFT_RESET);
4099 0 : tmp |= srbm_soft_reset;
4100 : dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4101 0 : WREG32(SRBM_SOFT_RESET, tmp);
4102 0 : tmp = RREG32(SRBM_SOFT_RESET);
4103 :
4104 0 : udelay(50);
4105 :
4106 0 : tmp &= ~srbm_soft_reset;
4107 0 : WREG32(SRBM_SOFT_RESET, tmp);
4108 0 : tmp = RREG32(SRBM_SOFT_RESET);
4109 0 : }
4110 :
4111 : /* Wait a little for things to settle down */
4112 0 : udelay(50);
4113 :
4114 0 : evergreen_mc_resume(rdev, &save);
4115 0 : udelay(50);
4116 :
4117 0 : evergreen_print_gpu_status_regs(rdev);
4118 0 : }
4119 :
4120 0 : void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4121 : {
4122 0 : struct evergreen_mc_save save;
4123 : u32 tmp, i;
4124 :
4125 : dev_info(rdev->dev, "GPU pci config reset\n");
4126 :
4127 : /* disable dpm? */
4128 :
4129 : /* Disable CP parsing/prefetching */
4130 0 : WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4131 0 : udelay(50);
4132 : /* Disable DMA */
4133 0 : tmp = RREG32(DMA_RB_CNTL);
4134 0 : tmp &= ~DMA_RB_ENABLE;
4135 0 : WREG32(DMA_RB_CNTL, tmp);
4136 : /* XXX other engines? */
4137 :
4138 : /* halt the rlc */
4139 0 : r600_rlc_stop(rdev);
4140 :
4141 0 : udelay(50);
4142 :
4143 : /* set mclk/sclk to bypass */
4144 0 : rv770_set_clk_bypass_mode(rdev);
4145 : /* disable BM */
4146 : pci_clear_master(rdev->pdev);
4147 : /* disable mem access */
4148 0 : evergreen_mc_stop(rdev, &save);
4149 0 : if (evergreen_mc_wait_for_idle(rdev)) {
4150 0 : dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4151 0 : }
4152 : /* reset */
4153 0 : radeon_pci_config_reset(rdev);
4154 : /* wait for asic to come out of reset */
4155 0 : for (i = 0; i < rdev->usec_timeout; i++) {
4156 0 : if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4157 : break;
4158 0 : udelay(1);
4159 : }
4160 0 : }
4161 :
4162 0 : int evergreen_asic_reset(struct radeon_device *rdev)
4163 : {
4164 : u32 reset_mask;
4165 :
4166 0 : reset_mask = evergreen_gpu_check_soft_reset(rdev);
4167 :
4168 0 : if (reset_mask)
4169 0 : r600_set_bios_scratch_engine_hung(rdev, true);
4170 :
4171 : /* try soft reset */
4172 0 : evergreen_gpu_soft_reset(rdev, reset_mask);
4173 :
4174 0 : reset_mask = evergreen_gpu_check_soft_reset(rdev);
4175 :
4176 : /* try pci config reset */
4177 0 : if (reset_mask && radeon_hard_reset)
4178 0 : evergreen_gpu_pci_config_reset(rdev);
4179 :
4180 0 : reset_mask = evergreen_gpu_check_soft_reset(rdev);
4181 :
4182 0 : if (!reset_mask)
4183 0 : r600_set_bios_scratch_engine_hung(rdev, false);
4184 :
4185 0 : return 0;
4186 : }
4187 :
4188 : /**
4189 : * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4190 : *
4191 : * @rdev: radeon_device pointer
4192 : * @ring: radeon_ring structure holding ring information
4193 : *
4194 : * Check if the GFX engine is locked up.
4195 : * Returns true if the engine appears to be locked up, false if not.
4196 : */
4197 0 : bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4198 : {
4199 0 : u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4200 :
4201 0 : if (!(reset_mask & (RADEON_RESET_GFX |
4202 : RADEON_RESET_COMPUTE |
4203 : RADEON_RESET_CP))) {
4204 0 : radeon_ring_lockup_update(rdev, ring);
4205 0 : return false;
4206 : }
4207 0 : return radeon_ring_test_lockup(rdev, ring);
4208 0 : }
4209 :
4210 : /*
4211 : * RLC
4212 : */
4213 : #define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
4214 : #define RLC_CLEAR_STATE_END_MARKER 0x00000001
4215 :
4216 0 : void sumo_rlc_fini(struct radeon_device *rdev)
4217 : {
4218 : int r;
4219 :
4220 : /* save restore block */
4221 0 : if (rdev->rlc.save_restore_obj) {
4222 0 : r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4223 0 : if (unlikely(r != 0))
4224 0 : dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4225 0 : radeon_bo_unpin(rdev->rlc.save_restore_obj);
4226 0 : radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4227 :
4228 0 : radeon_bo_unref(&rdev->rlc.save_restore_obj);
4229 0 : rdev->rlc.save_restore_obj = NULL;
4230 0 : }
4231 :
4232 : /* clear state block */
4233 0 : if (rdev->rlc.clear_state_obj) {
4234 0 : r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4235 0 : if (unlikely(r != 0))
4236 0 : dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4237 0 : radeon_bo_unpin(rdev->rlc.clear_state_obj);
4238 0 : radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4239 :
4240 0 : radeon_bo_unref(&rdev->rlc.clear_state_obj);
4241 0 : rdev->rlc.clear_state_obj = NULL;
4242 0 : }
4243 :
4244 : /* clear state block */
4245 0 : if (rdev->rlc.cp_table_obj) {
4246 0 : r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4247 0 : if (unlikely(r != 0))
4248 0 : dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4249 0 : radeon_bo_unpin(rdev->rlc.cp_table_obj);
4250 0 : radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4251 :
4252 0 : radeon_bo_unref(&rdev->rlc.cp_table_obj);
4253 0 : rdev->rlc.cp_table_obj = NULL;
4254 0 : }
4255 0 : }
4256 :
4257 : #define CP_ME_TABLE_SIZE 96
4258 :
4259 0 : int sumo_rlc_init(struct radeon_device *rdev)
4260 : {
4261 : const u32 *src_ptr;
4262 : volatile u32 *dst_ptr;
4263 : u32 dws, data, i, j, k, reg_num;
4264 : u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4265 : u64 reg_list_mc_addr;
4266 : const struct cs_section_def *cs_data;
4267 : int r;
4268 :
4269 0 : src_ptr = rdev->rlc.reg_list;
4270 0 : dws = rdev->rlc.reg_list_size;
4271 0 : if (rdev->family >= CHIP_BONAIRE) {
4272 0 : dws += (5 * 16) + 48 + 48 + 64;
4273 0 : }
4274 0 : cs_data = rdev->rlc.cs_data;
4275 :
4276 0 : if (src_ptr) {
4277 : /* save restore block */
4278 0 : if (rdev->rlc.save_restore_obj == NULL) {
4279 0 : r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4280 : RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4281 : NULL, &rdev->rlc.save_restore_obj);
4282 0 : if (r) {
4283 0 : dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4284 0 : return r;
4285 : }
4286 : }
4287 :
4288 0 : r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4289 0 : if (unlikely(r != 0)) {
4290 0 : sumo_rlc_fini(rdev);
4291 0 : return r;
4292 : }
4293 0 : r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4294 0 : &rdev->rlc.save_restore_gpu_addr);
4295 0 : if (r) {
4296 0 : radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4297 0 : dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4298 0 : sumo_rlc_fini(rdev);
4299 0 : return r;
4300 : }
4301 :
4302 0 : r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4303 0 : if (r) {
4304 0 : dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4305 0 : sumo_rlc_fini(rdev);
4306 0 : return r;
4307 : }
4308 : /* write the sr buffer */
4309 0 : dst_ptr = rdev->rlc.sr_ptr;
4310 0 : if (rdev->family >= CHIP_TAHITI) {
4311 : /* SI */
4312 0 : for (i = 0; i < rdev->rlc.reg_list_size; i++)
4313 0 : dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4314 : } else {
4315 : /* ON/LN/TN */
4316 : /* format:
4317 : * dw0: (reg2 << 16) | reg1
4318 : * dw1: reg1 save space
4319 : * dw2: reg2 save space
4320 : */
4321 0 : for (i = 0; i < dws; i++) {
4322 0 : data = src_ptr[i] >> 2;
4323 0 : i++;
4324 0 : if (i < dws)
4325 0 : data |= (src_ptr[i] >> 2) << 16;
4326 0 : j = (((i - 1) * 3) / 2);
4327 0 : dst_ptr[j] = cpu_to_le32(data);
4328 : }
4329 0 : j = ((i * 3) / 2);
4330 0 : dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4331 : }
4332 0 : radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4333 0 : radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4334 0 : }
4335 :
4336 0 : if (cs_data) {
4337 : /* clear state block */
4338 0 : if (rdev->family >= CHIP_BONAIRE) {
4339 0 : rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4340 0 : } else if (rdev->family >= CHIP_TAHITI) {
4341 0 : rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4342 0 : dws = rdev->rlc.clear_state_size + (256 / 4);
4343 0 : } else {
4344 : reg_list_num = 0;
4345 : dws = 0;
4346 0 : for (i = 0; cs_data[i].section != NULL; i++) {
4347 0 : for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4348 0 : reg_list_num++;
4349 0 : dws += cs_data[i].section[j].reg_count;
4350 : }
4351 : }
4352 0 : reg_list_blk_index = (3 * reg_list_num + 2);
4353 0 : dws += reg_list_blk_index;
4354 0 : rdev->rlc.clear_state_size = dws;
4355 : }
4356 :
4357 0 : if (rdev->rlc.clear_state_obj == NULL) {
4358 0 : r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4359 : RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4360 : NULL, &rdev->rlc.clear_state_obj);
4361 0 : if (r) {
4362 0 : dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4363 0 : sumo_rlc_fini(rdev);
4364 0 : return r;
4365 : }
4366 : }
4367 0 : r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4368 0 : if (unlikely(r != 0)) {
4369 0 : sumo_rlc_fini(rdev);
4370 0 : return r;
4371 : }
4372 0 : r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4373 0 : &rdev->rlc.clear_state_gpu_addr);
4374 0 : if (r) {
4375 0 : radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4376 0 : dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4377 0 : sumo_rlc_fini(rdev);
4378 0 : return r;
4379 : }
4380 :
4381 0 : r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4382 0 : if (r) {
4383 0 : dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4384 0 : sumo_rlc_fini(rdev);
4385 0 : return r;
4386 : }
4387 : /* set up the cs buffer */
4388 0 : dst_ptr = rdev->rlc.cs_ptr;
4389 0 : if (rdev->family >= CHIP_BONAIRE) {
4390 0 : cik_get_csb_buffer(rdev, dst_ptr);
4391 0 : } else if (rdev->family >= CHIP_TAHITI) {
4392 0 : reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4393 0 : dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4394 0 : dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4395 0 : dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4396 0 : si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4397 0 : } else {
4398 : reg_list_hdr_blk_index = 0;
4399 0 : reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4400 0 : data = upper_32_bits(reg_list_mc_addr);
4401 0 : dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4402 : reg_list_hdr_blk_index++;
4403 0 : for (i = 0; cs_data[i].section != NULL; i++) {
4404 0 : for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4405 0 : reg_num = cs_data[i].section[j].reg_count;
4406 0 : data = reg_list_mc_addr & 0xffffffff;
4407 0 : dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4408 0 : reg_list_hdr_blk_index++;
4409 :
4410 0 : data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4411 0 : dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4412 0 : reg_list_hdr_blk_index++;
4413 :
4414 0 : data = 0x08000000 | (reg_num * 4);
4415 0 : dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4416 0 : reg_list_hdr_blk_index++;
4417 :
4418 0 : for (k = 0; k < reg_num; k++) {
4419 0 : data = cs_data[i].section[j].extent[k];
4420 0 : dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4421 : }
4422 0 : reg_list_mc_addr += reg_num * 4;
4423 0 : reg_list_blk_index += reg_num;
4424 : }
4425 : }
4426 0 : dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4427 : }
4428 0 : radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4429 0 : radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4430 0 : }
4431 :
4432 0 : if (rdev->rlc.cp_table_size) {
4433 0 : if (rdev->rlc.cp_table_obj == NULL) {
4434 0 : r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4435 : PAGE_SIZE, true,
4436 : RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4437 : NULL, &rdev->rlc.cp_table_obj);
4438 0 : if (r) {
4439 0 : dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4440 0 : sumo_rlc_fini(rdev);
4441 0 : return r;
4442 : }
4443 : }
4444 :
4445 0 : r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4446 0 : if (unlikely(r != 0)) {
4447 0 : dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4448 0 : sumo_rlc_fini(rdev);
4449 0 : return r;
4450 : }
4451 0 : r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4452 0 : &rdev->rlc.cp_table_gpu_addr);
4453 0 : if (r) {
4454 0 : radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4455 0 : dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4456 0 : sumo_rlc_fini(rdev);
4457 0 : return r;
4458 : }
4459 0 : r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4460 0 : if (r) {
4461 0 : dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4462 0 : sumo_rlc_fini(rdev);
4463 0 : return r;
4464 : }
4465 :
4466 0 : cik_init_cp_pg_table(rdev);
4467 :
4468 0 : radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4469 0 : radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4470 :
4471 0 : }
4472 :
4473 0 : return 0;
4474 0 : }
4475 :
4476 0 : static void evergreen_rlc_start(struct radeon_device *rdev)
4477 : {
4478 : u32 mask = RLC_ENABLE;
4479 :
4480 0 : if (rdev->flags & RADEON_IS_IGP) {
4481 : mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4482 0 : }
4483 :
4484 0 : WREG32(RLC_CNTL, mask);
4485 0 : }
4486 :
4487 0 : int evergreen_rlc_resume(struct radeon_device *rdev)
4488 : {
4489 : u32 i;
4490 : const __be32 *fw_data;
4491 :
4492 0 : if (!rdev->rlc_fw)
4493 0 : return -EINVAL;
4494 :
4495 0 : r600_rlc_stop(rdev);
4496 :
4497 0 : WREG32(RLC_HB_CNTL, 0);
4498 :
4499 0 : if (rdev->flags & RADEON_IS_IGP) {
4500 0 : if (rdev->family == CHIP_ARUBA) {
4501 : u32 always_on_bitmap =
4502 0 : 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4503 : /* find out the number of active simds */
4504 0 : u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4505 0 : tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4506 0 : tmp = hweight32(~tmp);
4507 0 : if (tmp == rdev->config.cayman.max_simds_per_se) {
4508 0 : WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4509 0 : WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4510 0 : WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4511 0 : WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4512 0 : WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4513 0 : }
4514 0 : } else {
4515 0 : WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4516 0 : WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4517 : }
4518 0 : WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4519 0 : WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4520 0 : } else {
4521 0 : WREG32(RLC_HB_BASE, 0);
4522 0 : WREG32(RLC_HB_RPTR, 0);
4523 0 : WREG32(RLC_HB_WPTR, 0);
4524 0 : WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4525 0 : WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4526 : }
4527 0 : WREG32(RLC_MC_CNTL, 0);
4528 0 : WREG32(RLC_UCODE_CNTL, 0);
4529 :
4530 0 : fw_data = (const __be32 *)rdev->rlc_fw->data;
4531 0 : if (rdev->family >= CHIP_ARUBA) {
4532 0 : for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4533 0 : WREG32(RLC_UCODE_ADDR, i);
4534 0 : WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4535 : }
4536 0 : } else if (rdev->family >= CHIP_CAYMAN) {
4537 0 : for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4538 0 : WREG32(RLC_UCODE_ADDR, i);
4539 0 : WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4540 : }
4541 : } else {
4542 0 : for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4543 0 : WREG32(RLC_UCODE_ADDR, i);
4544 0 : WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4545 : }
4546 : }
4547 0 : WREG32(RLC_UCODE_ADDR, 0);
4548 :
4549 0 : evergreen_rlc_start(rdev);
4550 :
4551 0 : return 0;
4552 0 : }
4553 :
4554 : /* Interrupts */
4555 :
4556 0 : u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4557 : {
4558 0 : if (crtc >= rdev->num_crtc)
4559 0 : return 0;
4560 : else
4561 0 : return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4562 0 : }
4563 :
4564 0 : void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4565 : {
4566 : u32 tmp;
4567 :
4568 0 : if (rdev->family >= CHIP_CAYMAN) {
4569 0 : cayman_cp_int_cntl_setup(rdev, 0,
4570 : CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4571 0 : cayman_cp_int_cntl_setup(rdev, 1, 0);
4572 0 : cayman_cp_int_cntl_setup(rdev, 2, 0);
4573 0 : tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4574 0 : WREG32(CAYMAN_DMA1_CNTL, tmp);
4575 0 : } else
4576 0 : WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4577 0 : tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4578 0 : WREG32(DMA_CNTL, tmp);
4579 0 : WREG32(GRBM_INT_CNTL, 0);
4580 0 : WREG32(SRBM_INT_CNTL, 0);
4581 0 : WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4582 0 : WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4583 0 : if (rdev->num_crtc >= 4) {
4584 0 : WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4585 0 : WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4586 0 : }
4587 0 : if (rdev->num_crtc >= 6) {
4588 0 : WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4589 0 : WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4590 0 : }
4591 :
4592 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4593 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4594 0 : if (rdev->num_crtc >= 4) {
4595 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4596 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4597 0 : }
4598 0 : if (rdev->num_crtc >= 6) {
4599 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4600 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4601 0 : }
4602 :
4603 : /* only one DAC on DCE5 */
4604 0 : if (!ASIC_IS_DCE5(rdev))
4605 0 : WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4606 0 : WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4607 :
4608 0 : tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4609 0 : WREG32(DC_HPD1_INT_CONTROL, tmp);
4610 0 : tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4611 0 : WREG32(DC_HPD2_INT_CONTROL, tmp);
4612 0 : tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4613 0 : WREG32(DC_HPD3_INT_CONTROL, tmp);
4614 0 : tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4615 0 : WREG32(DC_HPD4_INT_CONTROL, tmp);
4616 0 : tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4617 0 : WREG32(DC_HPD5_INT_CONTROL, tmp);
4618 0 : tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4619 0 : WREG32(DC_HPD6_INT_CONTROL, tmp);
4620 :
4621 0 : }
4622 :
4623 0 : int evergreen_irq_set(struct radeon_device *rdev)
4624 : {
4625 : u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4626 : u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4627 : u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4628 : u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4629 : u32 grbm_int_cntl = 0;
4630 : u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4631 : u32 dma_cntl, dma_cntl1 = 0;
4632 : u32 thermal_int = 0;
4633 :
4634 0 : if (!rdev->irq.installed) {
4635 0 : WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4636 0 : return -EINVAL;
4637 : }
4638 : /* don't enable anything if the ih is disabled */
4639 0 : if (!rdev->ih.enabled) {
4640 0 : r600_disable_interrupts(rdev);
4641 : /* force the active interrupt state to all disabled */
4642 0 : evergreen_disable_interrupt_state(rdev);
4643 0 : return 0;
4644 : }
4645 :
4646 0 : hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4647 0 : hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4648 0 : hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4649 0 : hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4650 0 : hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4651 0 : hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4652 0 : if (rdev->family == CHIP_ARUBA)
4653 0 : thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4654 : ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4655 : else
4656 0 : thermal_int = RREG32(CG_THERMAL_INT) &
4657 : ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4658 :
4659 0 : afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4660 0 : afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4661 0 : afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4662 0 : afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4663 0 : afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4664 0 : afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4665 :
4666 0 : dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4667 :
4668 0 : if (rdev->family >= CHIP_CAYMAN) {
4669 : /* enable CP interrupts on all rings */
4670 0 : if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4671 : DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4672 : cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4673 0 : }
4674 0 : if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4675 : DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4676 : cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4677 0 : }
4678 0 : if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4679 : DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4680 : cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4681 0 : }
4682 : } else {
4683 0 : if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4684 : DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4685 : cp_int_cntl |= RB_INT_ENABLE;
4686 : cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4687 0 : }
4688 : }
4689 :
4690 0 : if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4691 : DRM_DEBUG("r600_irq_set: sw int dma\n");
4692 0 : dma_cntl |= TRAP_ENABLE;
4693 0 : }
4694 :
4695 0 : if (rdev->family >= CHIP_CAYMAN) {
4696 0 : dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4697 0 : if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4698 : DRM_DEBUG("r600_irq_set: sw int dma1\n");
4699 0 : dma_cntl1 |= TRAP_ENABLE;
4700 0 : }
4701 : }
4702 :
4703 0 : if (rdev->irq.dpm_thermal) {
4704 : DRM_DEBUG("dpm thermal\n");
4705 0 : thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4706 0 : }
4707 :
4708 0 : if (rdev->irq.crtc_vblank_int[0] ||
4709 0 : atomic_read(&rdev->irq.pflip[0])) {
4710 : DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4711 : crtc1 |= VBLANK_INT_MASK;
4712 0 : }
4713 0 : if (rdev->irq.crtc_vblank_int[1] ||
4714 0 : atomic_read(&rdev->irq.pflip[1])) {
4715 : DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4716 : crtc2 |= VBLANK_INT_MASK;
4717 0 : }
4718 0 : if (rdev->irq.crtc_vblank_int[2] ||
4719 0 : atomic_read(&rdev->irq.pflip[2])) {
4720 : DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4721 : crtc3 |= VBLANK_INT_MASK;
4722 0 : }
4723 0 : if (rdev->irq.crtc_vblank_int[3] ||
4724 0 : atomic_read(&rdev->irq.pflip[3])) {
4725 : DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4726 : crtc4 |= VBLANK_INT_MASK;
4727 0 : }
4728 0 : if (rdev->irq.crtc_vblank_int[4] ||
4729 0 : atomic_read(&rdev->irq.pflip[4])) {
4730 : DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4731 : crtc5 |= VBLANK_INT_MASK;
4732 0 : }
4733 0 : if (rdev->irq.crtc_vblank_int[5] ||
4734 0 : atomic_read(&rdev->irq.pflip[5])) {
4735 : DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4736 : crtc6 |= VBLANK_INT_MASK;
4737 0 : }
4738 0 : if (rdev->irq.hpd[0]) {
4739 : DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4740 0 : hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4741 0 : }
4742 0 : if (rdev->irq.hpd[1]) {
4743 : DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4744 0 : hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4745 0 : }
4746 0 : if (rdev->irq.hpd[2]) {
4747 : DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4748 0 : hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4749 0 : }
4750 0 : if (rdev->irq.hpd[3]) {
4751 : DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4752 0 : hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4753 0 : }
4754 0 : if (rdev->irq.hpd[4]) {
4755 : DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4756 0 : hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4757 0 : }
4758 0 : if (rdev->irq.hpd[5]) {
4759 : DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4760 0 : hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4761 0 : }
4762 0 : if (rdev->irq.afmt[0]) {
4763 : DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4764 0 : afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4765 0 : }
4766 0 : if (rdev->irq.afmt[1]) {
4767 : DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4768 0 : afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4769 0 : }
4770 0 : if (rdev->irq.afmt[2]) {
4771 : DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4772 0 : afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4773 0 : }
4774 0 : if (rdev->irq.afmt[3]) {
4775 : DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4776 0 : afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4777 0 : }
4778 0 : if (rdev->irq.afmt[4]) {
4779 : DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4780 0 : afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4781 0 : }
4782 0 : if (rdev->irq.afmt[5]) {
4783 : DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4784 0 : afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4785 0 : }
4786 :
4787 0 : if (rdev->family >= CHIP_CAYMAN) {
4788 0 : cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4789 0 : cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4790 0 : cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4791 0 : } else
4792 0 : WREG32(CP_INT_CNTL, cp_int_cntl);
4793 :
4794 0 : WREG32(DMA_CNTL, dma_cntl);
4795 :
4796 0 : if (rdev->family >= CHIP_CAYMAN)
4797 0 : WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4798 :
4799 0 : WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4800 :
4801 0 : WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4802 0 : WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4803 0 : if (rdev->num_crtc >= 4) {
4804 0 : WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4805 0 : WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4806 0 : }
4807 0 : if (rdev->num_crtc >= 6) {
4808 0 : WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4809 0 : WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4810 0 : }
4811 :
4812 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4813 : GRPH_PFLIP_INT_MASK);
4814 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4815 : GRPH_PFLIP_INT_MASK);
4816 0 : if (rdev->num_crtc >= 4) {
4817 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4818 : GRPH_PFLIP_INT_MASK);
4819 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4820 : GRPH_PFLIP_INT_MASK);
4821 0 : }
4822 0 : if (rdev->num_crtc >= 6) {
4823 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4824 : GRPH_PFLIP_INT_MASK);
4825 0 : WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4826 : GRPH_PFLIP_INT_MASK);
4827 0 : }
4828 :
4829 0 : WREG32(DC_HPD1_INT_CONTROL, hpd1);
4830 0 : WREG32(DC_HPD2_INT_CONTROL, hpd2);
4831 0 : WREG32(DC_HPD3_INT_CONTROL, hpd3);
4832 0 : WREG32(DC_HPD4_INT_CONTROL, hpd4);
4833 0 : WREG32(DC_HPD5_INT_CONTROL, hpd5);
4834 0 : WREG32(DC_HPD6_INT_CONTROL, hpd6);
4835 0 : if (rdev->family == CHIP_ARUBA)
4836 0 : WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4837 : else
4838 0 : WREG32(CG_THERMAL_INT, thermal_int);
4839 :
4840 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4841 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4842 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4843 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4844 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4845 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4846 :
4847 : /* posting read */
4848 0 : RREG32(SRBM_STATUS);
4849 :
4850 0 : return 0;
4851 0 : }
4852 :
4853 0 : static void evergreen_irq_ack(struct radeon_device *rdev)
4854 : {
4855 : u32 tmp;
4856 :
4857 0 : rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4858 0 : rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4859 0 : rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4860 0 : rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4861 0 : rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4862 0 : rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4863 0 : rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4864 0 : rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4865 0 : if (rdev->num_crtc >= 4) {
4866 0 : rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4867 0 : rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4868 0 : }
4869 0 : if (rdev->num_crtc >= 6) {
4870 0 : rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4871 0 : rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4872 0 : }
4873 :
4874 0 : rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4875 0 : rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4876 0 : rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4877 0 : rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4878 0 : rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4879 0 : rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4880 :
4881 0 : if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4882 0 : WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4883 0 : if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4884 0 : WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4885 0 : if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4886 0 : WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4887 0 : if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4888 0 : WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4889 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4890 0 : WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4891 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4892 0 : WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4893 :
4894 0 : if (rdev->num_crtc >= 4) {
4895 0 : if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4896 0 : WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4897 0 : if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4898 0 : WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4899 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4900 0 : WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4901 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4902 0 : WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4903 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4904 0 : WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4905 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4906 0 : WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4907 : }
4908 :
4909 0 : if (rdev->num_crtc >= 6) {
4910 0 : if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4911 0 : WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4912 0 : if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4913 0 : WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4914 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4915 0 : WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4916 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4917 0 : WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4918 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4919 0 : WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4920 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4921 0 : WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4922 : }
4923 :
4924 0 : if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4925 0 : tmp = RREG32(DC_HPD1_INT_CONTROL);
4926 0 : tmp |= DC_HPDx_INT_ACK;
4927 0 : WREG32(DC_HPD1_INT_CONTROL, tmp);
4928 0 : }
4929 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4930 0 : tmp = RREG32(DC_HPD2_INT_CONTROL);
4931 0 : tmp |= DC_HPDx_INT_ACK;
4932 0 : WREG32(DC_HPD2_INT_CONTROL, tmp);
4933 0 : }
4934 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4935 0 : tmp = RREG32(DC_HPD3_INT_CONTROL);
4936 0 : tmp |= DC_HPDx_INT_ACK;
4937 0 : WREG32(DC_HPD3_INT_CONTROL, tmp);
4938 0 : }
4939 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4940 0 : tmp = RREG32(DC_HPD4_INT_CONTROL);
4941 0 : tmp |= DC_HPDx_INT_ACK;
4942 0 : WREG32(DC_HPD4_INT_CONTROL, tmp);
4943 0 : }
4944 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4945 0 : tmp = RREG32(DC_HPD5_INT_CONTROL);
4946 0 : tmp |= DC_HPDx_INT_ACK;
4947 0 : WREG32(DC_HPD5_INT_CONTROL, tmp);
4948 0 : }
4949 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4950 0 : tmp = RREG32(DC_HPD6_INT_CONTROL);
4951 0 : tmp |= DC_HPDx_INT_ACK;
4952 0 : WREG32(DC_HPD6_INT_CONTROL, tmp);
4953 0 : }
4954 :
4955 0 : if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4956 0 : tmp = RREG32(DC_HPD1_INT_CONTROL);
4957 0 : tmp |= DC_HPDx_RX_INT_ACK;
4958 0 : WREG32(DC_HPD1_INT_CONTROL, tmp);
4959 0 : }
4960 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4961 0 : tmp = RREG32(DC_HPD2_INT_CONTROL);
4962 0 : tmp |= DC_HPDx_RX_INT_ACK;
4963 0 : WREG32(DC_HPD2_INT_CONTROL, tmp);
4964 0 : }
4965 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4966 0 : tmp = RREG32(DC_HPD3_INT_CONTROL);
4967 0 : tmp |= DC_HPDx_RX_INT_ACK;
4968 0 : WREG32(DC_HPD3_INT_CONTROL, tmp);
4969 0 : }
4970 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4971 0 : tmp = RREG32(DC_HPD4_INT_CONTROL);
4972 0 : tmp |= DC_HPDx_RX_INT_ACK;
4973 0 : WREG32(DC_HPD4_INT_CONTROL, tmp);
4974 0 : }
4975 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4976 0 : tmp = RREG32(DC_HPD5_INT_CONTROL);
4977 0 : tmp |= DC_HPDx_RX_INT_ACK;
4978 0 : WREG32(DC_HPD5_INT_CONTROL, tmp);
4979 0 : }
4980 0 : if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4981 0 : tmp = RREG32(DC_HPD6_INT_CONTROL);
4982 0 : tmp |= DC_HPDx_RX_INT_ACK;
4983 0 : WREG32(DC_HPD6_INT_CONTROL, tmp);
4984 0 : }
4985 :
4986 0 : if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4987 0 : tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4988 0 : tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4989 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4990 0 : }
4991 0 : if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4992 0 : tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4993 0 : tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4994 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4995 0 : }
4996 0 : if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4997 0 : tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4998 0 : tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4999 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
5000 0 : }
5001 0 : if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
5002 0 : tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
5003 0 : tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
5004 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
5005 0 : }
5006 0 : if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
5007 0 : tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
5008 0 : tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
5009 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
5010 0 : }
5011 0 : if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5012 0 : tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
5013 0 : tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
5014 0 : WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
5015 0 : }
5016 0 : }
5017 :
5018 0 : static void evergreen_irq_disable(struct radeon_device *rdev)
5019 : {
5020 0 : r600_disable_interrupts(rdev);
5021 : /* Wait and acknowledge irq */
5022 0 : mdelay(1);
5023 0 : evergreen_irq_ack(rdev);
5024 0 : evergreen_disable_interrupt_state(rdev);
5025 0 : }
5026 :
5027 0 : void evergreen_irq_suspend(struct radeon_device *rdev)
5028 : {
5029 0 : evergreen_irq_disable(rdev);
5030 0 : r600_rlc_stop(rdev);
5031 0 : }
5032 :
5033 0 : static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5034 : {
5035 : u32 wptr, tmp;
5036 :
5037 0 : if (rdev->wb.enabled)
5038 0 : wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5039 : else
5040 0 : wptr = RREG32(IH_RB_WPTR);
5041 :
5042 0 : if (wptr & RB_OVERFLOW) {
5043 0 : wptr &= ~RB_OVERFLOW;
5044 : /* When a ring buffer overflow happen start parsing interrupt
5045 : * from the last not overwritten vector (wptr + 16). Hopefully
5046 : * this should allow us to catchup.
5047 : */
5048 0 : dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5049 : wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5050 0 : rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5051 0 : tmp = RREG32(IH_RB_CNTL);
5052 0 : tmp |= IH_WPTR_OVERFLOW_CLEAR;
5053 0 : WREG32(IH_RB_CNTL, tmp);
5054 0 : }
5055 0 : return (wptr & rdev->ih.ptr_mask);
5056 : }
5057 :
5058 0 : int evergreen_irq_process(struct radeon_device *rdev)
5059 : {
5060 : u32 wptr;
5061 : u32 rptr;
5062 : u32 src_id, src_data;
5063 : u32 ring_index;
5064 : bool queue_hotplug = false;
5065 : bool queue_hdmi = false;
5066 : bool queue_dp = false;
5067 : bool queue_thermal = false;
5068 : u32 status, addr;
5069 :
5070 0 : if (!rdev->ih.enabled || rdev->shutdown)
5071 0 : return IRQ_NONE;
5072 :
5073 0 : wptr = evergreen_get_ih_wptr(rdev);
5074 :
5075 0 : if (wptr == rdev->ih.rptr)
5076 0 : return IRQ_NONE;
5077 : restart_ih:
5078 : /* is somebody else already processing irqs? */
5079 0 : if (atomic_xchg(&rdev->ih.lock, 1))
5080 0 : return IRQ_NONE;
5081 :
5082 0 : rptr = rdev->ih.rptr;
5083 : DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5084 :
5085 : /* Order reading of wptr vs. reading of IH ring data */
5086 0 : rmb();
5087 :
5088 : /* display interrupts */
5089 0 : evergreen_irq_ack(rdev);
5090 :
5091 0 : while (rptr != wptr) {
5092 : /* wptr/rptr are in bytes! */
5093 0 : ring_index = rptr / 4;
5094 0 : src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5095 0 : src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5096 :
5097 0 : switch (src_id) {
5098 : case 1: /* D1 vblank/vline */
5099 0 : switch (src_data) {
5100 : case 0: /* D1 vblank */
5101 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5102 : DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5103 :
5104 0 : if (rdev->irq.crtc_vblank_int[0]) {
5105 0 : drm_handle_vblank(rdev->ddev, 0);
5106 0 : rdev->pm.vblank_sync = true;
5107 0 : wake_up(&rdev->irq.vblank_queue);
5108 0 : }
5109 0 : if (atomic_read(&rdev->irq.pflip[0]))
5110 0 : radeon_crtc_handle_vblank(rdev, 0);
5111 0 : rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5112 : DRM_DEBUG("IH: D1 vblank\n");
5113 :
5114 0 : break;
5115 : case 1: /* D1 vline */
5116 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5117 : DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5118 :
5119 0 : rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5120 : DRM_DEBUG("IH: D1 vline\n");
5121 :
5122 0 : break;
5123 : default:
5124 : DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5125 : break;
5126 : }
5127 : break;
5128 : case 2: /* D2 vblank/vline */
5129 0 : switch (src_data) {
5130 : case 0: /* D2 vblank */
5131 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5132 : DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5133 :
5134 0 : if (rdev->irq.crtc_vblank_int[1]) {
5135 0 : drm_handle_vblank(rdev->ddev, 1);
5136 0 : rdev->pm.vblank_sync = true;
5137 0 : wake_up(&rdev->irq.vblank_queue);
5138 0 : }
5139 0 : if (atomic_read(&rdev->irq.pflip[1]))
5140 0 : radeon_crtc_handle_vblank(rdev, 1);
5141 0 : rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5142 : DRM_DEBUG("IH: D2 vblank\n");
5143 :
5144 0 : break;
5145 : case 1: /* D2 vline */
5146 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5147 : DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5148 :
5149 0 : rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5150 : DRM_DEBUG("IH: D2 vline\n");
5151 :
5152 0 : break;
5153 : default:
5154 : DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5155 : break;
5156 : }
5157 : break;
5158 : case 3: /* D3 vblank/vline */
5159 0 : switch (src_data) {
5160 : case 0: /* D3 vblank */
5161 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5162 : DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5163 :
5164 0 : if (rdev->irq.crtc_vblank_int[2]) {
5165 0 : drm_handle_vblank(rdev->ddev, 2);
5166 0 : rdev->pm.vblank_sync = true;
5167 0 : wake_up(&rdev->irq.vblank_queue);
5168 0 : }
5169 0 : if (atomic_read(&rdev->irq.pflip[2]))
5170 0 : radeon_crtc_handle_vblank(rdev, 2);
5171 0 : rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5172 : DRM_DEBUG("IH: D3 vblank\n");
5173 :
5174 0 : break;
5175 : case 1: /* D3 vline */
5176 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5177 : DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5178 :
5179 0 : rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5180 : DRM_DEBUG("IH: D3 vline\n");
5181 :
5182 0 : break;
5183 : default:
5184 : DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5185 : break;
5186 : }
5187 : break;
5188 : case 4: /* D4 vblank/vline */
5189 0 : switch (src_data) {
5190 : case 0: /* D4 vblank */
5191 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5192 : DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5193 :
5194 0 : if (rdev->irq.crtc_vblank_int[3]) {
5195 0 : drm_handle_vblank(rdev->ddev, 3);
5196 0 : rdev->pm.vblank_sync = true;
5197 0 : wake_up(&rdev->irq.vblank_queue);
5198 0 : }
5199 0 : if (atomic_read(&rdev->irq.pflip[3]))
5200 0 : radeon_crtc_handle_vblank(rdev, 3);
5201 0 : rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5202 : DRM_DEBUG("IH: D4 vblank\n");
5203 :
5204 0 : break;
5205 : case 1: /* D4 vline */
5206 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5207 : DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5208 :
5209 0 : rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5210 : DRM_DEBUG("IH: D4 vline\n");
5211 :
5212 0 : break;
5213 : default:
5214 : DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5215 : break;
5216 : }
5217 : break;
5218 : case 5: /* D5 vblank/vline */
5219 0 : switch (src_data) {
5220 : case 0: /* D5 vblank */
5221 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5222 : DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5223 :
5224 0 : if (rdev->irq.crtc_vblank_int[4]) {
5225 0 : drm_handle_vblank(rdev->ddev, 4);
5226 0 : rdev->pm.vblank_sync = true;
5227 0 : wake_up(&rdev->irq.vblank_queue);
5228 0 : }
5229 0 : if (atomic_read(&rdev->irq.pflip[4]))
5230 0 : radeon_crtc_handle_vblank(rdev, 4);
5231 0 : rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5232 : DRM_DEBUG("IH: D5 vblank\n");
5233 :
5234 0 : break;
5235 : case 1: /* D5 vline */
5236 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5237 : DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5238 :
5239 0 : rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5240 : DRM_DEBUG("IH: D5 vline\n");
5241 :
5242 0 : break;
5243 : default:
5244 : DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5245 : break;
5246 : }
5247 : break;
5248 : case 6: /* D6 vblank/vline */
5249 0 : switch (src_data) {
5250 : case 0: /* D6 vblank */
5251 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5252 : DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5253 :
5254 0 : if (rdev->irq.crtc_vblank_int[5]) {
5255 0 : drm_handle_vblank(rdev->ddev, 5);
5256 0 : rdev->pm.vblank_sync = true;
5257 0 : wake_up(&rdev->irq.vblank_queue);
5258 0 : }
5259 0 : if (atomic_read(&rdev->irq.pflip[5]))
5260 0 : radeon_crtc_handle_vblank(rdev, 5);
5261 0 : rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5262 : DRM_DEBUG("IH: D6 vblank\n");
5263 :
5264 0 : break;
5265 : case 1: /* D6 vline */
5266 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5267 : DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5268 :
5269 0 : rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5270 : DRM_DEBUG("IH: D6 vline\n");
5271 :
5272 0 : break;
5273 : default:
5274 : DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5275 : break;
5276 : }
5277 : break;
5278 : case 8: /* D1 page flip */
5279 : case 10: /* D2 page flip */
5280 : case 12: /* D3 page flip */
5281 : case 14: /* D4 page flip */
5282 : case 16: /* D5 page flip */
5283 : case 18: /* D6 page flip */
5284 : DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5285 0 : if (radeon_use_pflipirq > 0)
5286 0 : radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5287 : break;
5288 : case 42: /* HPD hotplug */
5289 0 : switch (src_data) {
5290 : case 0:
5291 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5292 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5293 :
5294 0 : rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5295 : queue_hotplug = true;
5296 : DRM_DEBUG("IH: HPD1\n");
5297 0 : break;
5298 : case 1:
5299 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5300 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5301 :
5302 0 : rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5303 : queue_hotplug = true;
5304 : DRM_DEBUG("IH: HPD2\n");
5305 0 : break;
5306 : case 2:
5307 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5308 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5309 :
5310 0 : rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5311 : queue_hotplug = true;
5312 : DRM_DEBUG("IH: HPD3\n");
5313 0 : break;
5314 : case 3:
5315 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5316 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5317 :
5318 0 : rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5319 : queue_hotplug = true;
5320 : DRM_DEBUG("IH: HPD4\n");
5321 0 : break;
5322 : case 4:
5323 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5324 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5325 :
5326 0 : rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5327 : queue_hotplug = true;
5328 : DRM_DEBUG("IH: HPD5\n");
5329 0 : break;
5330 : case 5:
5331 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5332 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5333 :
5334 0 : rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5335 : queue_hotplug = true;
5336 : DRM_DEBUG("IH: HPD6\n");
5337 0 : break;
5338 : case 6:
5339 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5340 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5341 :
5342 0 : rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5343 : queue_dp = true;
5344 : DRM_DEBUG("IH: HPD_RX 1\n");
5345 0 : break;
5346 : case 7:
5347 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5348 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5349 :
5350 0 : rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5351 : queue_dp = true;
5352 : DRM_DEBUG("IH: HPD_RX 2\n");
5353 0 : break;
5354 : case 8:
5355 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5356 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5357 :
5358 0 : rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5359 : queue_dp = true;
5360 : DRM_DEBUG("IH: HPD_RX 3\n");
5361 0 : break;
5362 : case 9:
5363 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5364 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5365 :
5366 0 : rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5367 : queue_dp = true;
5368 : DRM_DEBUG("IH: HPD_RX 4\n");
5369 0 : break;
5370 : case 10:
5371 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5372 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5373 :
5374 0 : rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5375 : queue_dp = true;
5376 : DRM_DEBUG("IH: HPD_RX 5\n");
5377 0 : break;
5378 : case 11:
5379 0 : if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5380 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5381 :
5382 0 : rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5383 : queue_dp = true;
5384 : DRM_DEBUG("IH: HPD_RX 6\n");
5385 0 : break;
5386 : default:
5387 : DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5388 : break;
5389 : }
5390 : break;
5391 : case 44: /* hdmi */
5392 0 : switch (src_data) {
5393 : case 0:
5394 0 : if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5395 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5396 :
5397 0 : rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5398 : queue_hdmi = true;
5399 : DRM_DEBUG("IH: HDMI0\n");
5400 0 : break;
5401 : case 1:
5402 0 : if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5403 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5404 :
5405 0 : rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5406 : queue_hdmi = true;
5407 : DRM_DEBUG("IH: HDMI1\n");
5408 0 : break;
5409 : case 2:
5410 0 : if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5411 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5412 :
5413 0 : rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5414 : queue_hdmi = true;
5415 : DRM_DEBUG("IH: HDMI2\n");
5416 0 : break;
5417 : case 3:
5418 0 : if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5419 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5420 :
5421 0 : rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5422 : queue_hdmi = true;
5423 : DRM_DEBUG("IH: HDMI3\n");
5424 0 : break;
5425 : case 4:
5426 0 : if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5427 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5428 :
5429 0 : rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5430 : queue_hdmi = true;
5431 : DRM_DEBUG("IH: HDMI4\n");
5432 0 : break;
5433 : case 5:
5434 0 : if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5435 : DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5436 :
5437 0 : rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5438 : queue_hdmi = true;
5439 : DRM_DEBUG("IH: HDMI5\n");
5440 0 : break;
5441 : default:
5442 0 : DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5443 0 : break;
5444 : }
5445 : case 96:
5446 0 : DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5447 0 : WREG32(SRBM_INT_ACK, 0x1);
5448 0 : break;
5449 : case 124: /* UVD */
5450 : DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5451 0 : radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5452 0 : break;
5453 : case 146:
5454 : case 147:
5455 0 : addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5456 0 : status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5457 : /* reset addr and status */
5458 0 : WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5459 0 : if (addr == 0x0 && status == 0x0)
5460 : break;
5461 0 : dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5462 0 : dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
5463 : addr);
5464 0 : dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5465 : status);
5466 0 : cayman_vm_decode_fault(rdev, status, addr);
5467 0 : break;
5468 : case 176: /* CP_INT in ring buffer */
5469 : case 177: /* CP_INT in IB1 */
5470 : case 178: /* CP_INT in IB2 */
5471 : DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5472 0 : radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5473 0 : break;
5474 : case 181: /* CP EOP event */
5475 : DRM_DEBUG("IH: CP EOP\n");
5476 0 : if (rdev->family >= CHIP_CAYMAN) {
5477 0 : switch (src_data) {
5478 : case 0:
5479 0 : radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5480 0 : break;
5481 : case 1:
5482 0 : radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5483 0 : break;
5484 : case 2:
5485 0 : radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5486 0 : break;
5487 : }
5488 : } else
5489 0 : radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5490 : break;
5491 : case 224: /* DMA trap event */
5492 : DRM_DEBUG("IH: DMA trap\n");
5493 0 : radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5494 0 : break;
5495 : case 230: /* thermal low to high */
5496 : DRM_DEBUG("IH: thermal low to high\n");
5497 0 : rdev->pm.dpm.thermal.high_to_low = false;
5498 : queue_thermal = true;
5499 0 : break;
5500 : case 231: /* thermal high to low */
5501 : DRM_DEBUG("IH: thermal high to low\n");
5502 0 : rdev->pm.dpm.thermal.high_to_low = true;
5503 : queue_thermal = true;
5504 0 : break;
5505 : case 233: /* GUI IDLE */
5506 : DRM_DEBUG("IH: GUI idle\n");
5507 : break;
5508 : case 244: /* DMA trap event */
5509 0 : if (rdev->family >= CHIP_CAYMAN) {
5510 : DRM_DEBUG("IH: DMA1 trap\n");
5511 0 : radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5512 0 : }
5513 : break;
5514 : default:
5515 : DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5516 : break;
5517 : }
5518 :
5519 : /* wptr/rptr are in bytes! */
5520 0 : rptr += 16;
5521 0 : rptr &= rdev->ih.ptr_mask;
5522 0 : WREG32(IH_RB_RPTR, rptr);
5523 : }
5524 0 : if (queue_dp)
5525 0 : schedule_work(&rdev->dp_work);
5526 0 : if (queue_hotplug)
5527 0 : schedule_delayed_work(&rdev->hotplug_work, 0);
5528 0 : if (queue_hdmi)
5529 0 : schedule_work(&rdev->audio_work);
5530 0 : if (queue_thermal && rdev->pm.dpm_enabled)
5531 0 : schedule_work(&rdev->pm.dpm.thermal.work);
5532 0 : rdev->ih.rptr = rptr;
5533 0 : atomic_set(&rdev->ih.lock, 0);
5534 :
5535 : /* make sure wptr hasn't changed while processing */
5536 0 : wptr = evergreen_get_ih_wptr(rdev);
5537 0 : if (wptr != rptr)
5538 0 : goto restart_ih;
5539 :
5540 0 : return IRQ_HANDLED;
5541 0 : }
5542 :
5543 0 : static int evergreen_startup(struct radeon_device *rdev)
5544 : {
5545 : struct radeon_ring *ring;
5546 : int r;
5547 :
5548 : /* enable pcie gen2 link */
5549 0 : evergreen_pcie_gen2_enable(rdev);
5550 : /* enable aspm */
5551 0 : evergreen_program_aspm(rdev);
5552 :
5553 : /* scratch needs to be initialized before MC */
5554 0 : r = r600_vram_scratch_init(rdev);
5555 0 : if (r)
5556 0 : return r;
5557 :
5558 0 : evergreen_mc_program(rdev);
5559 :
5560 0 : if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5561 0 : r = ni_mc_load_microcode(rdev);
5562 0 : if (r) {
5563 0 : DRM_ERROR("Failed to load MC firmware!\n");
5564 0 : return r;
5565 : }
5566 : }
5567 :
5568 0 : if (rdev->flags & RADEON_IS_AGP) {
5569 0 : evergreen_agp_enable(rdev);
5570 0 : } else {
5571 0 : r = evergreen_pcie_gart_enable(rdev);
5572 0 : if (r)
5573 0 : return r;
5574 : }
5575 0 : evergreen_gpu_init(rdev);
5576 :
5577 : /* allocate rlc buffers */
5578 0 : if (rdev->flags & RADEON_IS_IGP) {
5579 0 : rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5580 0 : rdev->rlc.reg_list_size =
5581 : (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5582 0 : rdev->rlc.cs_data = evergreen_cs_data;
5583 0 : r = sumo_rlc_init(rdev);
5584 0 : if (r) {
5585 0 : DRM_ERROR("Failed to init rlc BOs!\n");
5586 0 : return r;
5587 : }
5588 : }
5589 :
5590 : /* allocate wb buffer */
5591 0 : r = radeon_wb_init(rdev);
5592 0 : if (r)
5593 0 : return r;
5594 :
5595 0 : r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5596 0 : if (r) {
5597 0 : dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5598 0 : return r;
5599 : }
5600 :
5601 0 : r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5602 0 : if (r) {
5603 0 : dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5604 0 : return r;
5605 : }
5606 :
5607 0 : r = uvd_v2_2_resume(rdev);
5608 0 : if (!r) {
5609 0 : r = radeon_fence_driver_start_ring(rdev,
5610 : R600_RING_TYPE_UVD_INDEX);
5611 0 : if (r)
5612 0 : dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5613 : }
5614 :
5615 0 : if (r)
5616 0 : rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5617 :
5618 : /* Enable IRQ */
5619 0 : if (!rdev->irq.installed) {
5620 0 : r = radeon_irq_kms_init(rdev);
5621 0 : if (r)
5622 0 : return r;
5623 : }
5624 :
5625 0 : r = r600_irq_init(rdev);
5626 0 : if (r) {
5627 0 : DRM_ERROR("radeon: IH init failed (%d).\n", r);
5628 0 : radeon_irq_kms_fini(rdev);
5629 0 : return r;
5630 : }
5631 0 : evergreen_irq_set(rdev);
5632 :
5633 0 : ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5634 0 : r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5635 : RADEON_CP_PACKET2);
5636 0 : if (r)
5637 0 : return r;
5638 :
5639 0 : ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5640 0 : r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5641 : DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5642 0 : if (r)
5643 0 : return r;
5644 :
5645 0 : r = evergreen_cp_load_microcode(rdev);
5646 0 : if (r)
5647 0 : return r;
5648 0 : r = evergreen_cp_resume(rdev);
5649 0 : if (r)
5650 0 : return r;
5651 0 : r = r600_dma_resume(rdev);
5652 0 : if (r)
5653 0 : return r;
5654 :
5655 0 : ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5656 0 : if (ring->ring_size) {
5657 0 : r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5658 : RADEON_CP_PACKET2);
5659 0 : if (!r)
5660 0 : r = uvd_v1_0_init(rdev);
5661 :
5662 0 : if (r)
5663 0 : DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5664 : }
5665 :
5666 0 : r = radeon_ib_pool_init(rdev);
5667 0 : if (r) {
5668 0 : dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5669 0 : return r;
5670 : }
5671 :
5672 0 : r = radeon_audio_init(rdev);
5673 0 : if (r) {
5674 0 : DRM_ERROR("radeon: audio init failed\n");
5675 0 : return r;
5676 : }
5677 :
5678 0 : return 0;
5679 0 : }
5680 :
5681 0 : int evergreen_resume(struct radeon_device *rdev)
5682 : {
5683 : int r;
5684 :
5685 : /* reset the asic, the gfx blocks are often in a bad state
5686 : * after the driver is unloaded or after a resume
5687 : */
5688 0 : if (radeon_asic_reset(rdev))
5689 0 : dev_warn(rdev->dev, "GPU reset failed !\n");
5690 : /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5691 : * posting will perform necessary task to bring back GPU into good
5692 : * shape.
5693 : */
5694 : /* post card */
5695 0 : atom_asic_init(rdev->mode_info.atom_context);
5696 :
5697 : /* init golden registers */
5698 0 : evergreen_init_golden_registers(rdev);
5699 :
5700 0 : if (rdev->pm.pm_method == PM_METHOD_DPM)
5701 0 : radeon_pm_resume(rdev);
5702 :
5703 0 : rdev->accel_working = true;
5704 0 : r = evergreen_startup(rdev);
5705 0 : if (r) {
5706 0 : DRM_ERROR("evergreen startup failed on resume\n");
5707 0 : rdev->accel_working = false;
5708 0 : return r;
5709 : }
5710 :
5711 0 : return r;
5712 :
5713 0 : }
5714 :
5715 0 : int evergreen_suspend(struct radeon_device *rdev)
5716 : {
5717 0 : radeon_pm_suspend(rdev);
5718 0 : radeon_audio_fini(rdev);
5719 0 : uvd_v1_0_fini(rdev);
5720 0 : radeon_uvd_suspend(rdev);
5721 0 : r700_cp_stop(rdev);
5722 0 : r600_dma_stop(rdev);
5723 0 : evergreen_irq_suspend(rdev);
5724 0 : radeon_wb_disable(rdev);
5725 0 : evergreen_pcie_gart_disable(rdev);
5726 :
5727 0 : return 0;
5728 : }
5729 :
5730 : /* Plan is to move initialization in that function and use
5731 : * helper function so that radeon_device_init pretty much
5732 : * do nothing more than calling asic specific function. This
5733 : * should also allow to remove a bunch of callback function
5734 : * like vram_info.
5735 : */
5736 0 : int evergreen_init(struct radeon_device *rdev)
5737 : {
5738 : int r;
5739 :
5740 : /* Read BIOS */
5741 0 : if (!radeon_get_bios(rdev)) {
5742 0 : if (ASIC_IS_AVIVO(rdev))
5743 0 : return -EINVAL;
5744 : }
5745 : /* Must be an ATOMBIOS */
5746 0 : if (!rdev->is_atom_bios) {
5747 0 : dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5748 0 : return -EINVAL;
5749 : }
5750 0 : r = radeon_atombios_init(rdev);
5751 0 : if (r)
5752 0 : return r;
5753 : /* reset the asic, the gfx blocks are often in a bad state
5754 : * after the driver is unloaded or after a resume
5755 : */
5756 0 : if (radeon_asic_reset(rdev))
5757 0 : dev_warn(rdev->dev, "GPU reset failed !\n");
5758 : /* Post card if necessary */
5759 0 : if (!radeon_card_posted(rdev)) {
5760 0 : if (!rdev->bios) {
5761 0 : dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5762 0 : return -EINVAL;
5763 : }
5764 : DRM_INFO("GPU not posted. posting now...\n");
5765 0 : atom_asic_init(rdev->mode_info.atom_context);
5766 0 : }
5767 : /* init golden registers */
5768 0 : evergreen_init_golden_registers(rdev);
5769 : /* Initialize scratch registers */
5770 0 : r600_scratch_init(rdev);
5771 : /* Initialize surface registers */
5772 0 : radeon_surface_init(rdev);
5773 : /* Initialize clocks */
5774 0 : radeon_get_clock_info(rdev->ddev);
5775 : /* Fence driver */
5776 0 : r = radeon_fence_driver_init(rdev);
5777 0 : if (r)
5778 0 : return r;
5779 : /* initialize AGP */
5780 0 : if (rdev->flags & RADEON_IS_AGP) {
5781 0 : r = radeon_agp_init(rdev);
5782 0 : if (r)
5783 0 : radeon_agp_disable(rdev);
5784 : }
5785 : /* initialize memory controller */
5786 0 : r = evergreen_mc_init(rdev);
5787 0 : if (r)
5788 0 : return r;
5789 : /* Memory manager */
5790 0 : r = radeon_bo_init(rdev);
5791 0 : if (r)
5792 0 : return r;
5793 :
5794 0 : if (ASIC_IS_DCE5(rdev)) {
5795 0 : if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5796 0 : r = ni_init_microcode(rdev);
5797 0 : if (r) {
5798 0 : DRM_ERROR("Failed to load firmware!\n");
5799 0 : return r;
5800 : }
5801 : }
5802 : } else {
5803 0 : if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5804 0 : r = r600_init_microcode(rdev);
5805 0 : if (r) {
5806 0 : DRM_ERROR("Failed to load firmware!\n");
5807 0 : return r;
5808 : }
5809 : }
5810 : }
5811 :
5812 : /* Initialize power management */
5813 0 : radeon_pm_init(rdev);
5814 :
5815 0 : rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5816 0 : r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5817 :
5818 0 : rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5819 0 : r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5820 :
5821 0 : r = radeon_uvd_init(rdev);
5822 0 : if (!r) {
5823 0 : rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5824 0 : r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5825 : 4096);
5826 0 : }
5827 :
5828 0 : rdev->ih.ring_obj = NULL;
5829 0 : r600_ih_ring_init(rdev, 64 * 1024);
5830 :
5831 0 : r = r600_pcie_gart_init(rdev);
5832 0 : if (r)
5833 0 : return r;
5834 :
5835 0 : rdev->accel_working = true;
5836 0 : r = evergreen_startup(rdev);
5837 0 : if (r) {
5838 0 : dev_err(rdev->dev, "disabling GPU acceleration\n");
5839 0 : r700_cp_fini(rdev);
5840 0 : r600_dma_fini(rdev);
5841 0 : r600_irq_fini(rdev);
5842 0 : if (rdev->flags & RADEON_IS_IGP)
5843 0 : sumo_rlc_fini(rdev);
5844 0 : radeon_wb_fini(rdev);
5845 0 : radeon_ib_pool_fini(rdev);
5846 0 : radeon_irq_kms_fini(rdev);
5847 0 : evergreen_pcie_gart_fini(rdev);
5848 0 : rdev->accel_working = false;
5849 0 : }
5850 :
5851 : /* Don't start up if the MC ucode is missing on BTC parts.
5852 : * The default clocks and voltages before the MC ucode
5853 : * is loaded are not suffient for advanced operations.
5854 : */
5855 0 : if (ASIC_IS_DCE5(rdev)) {
5856 0 : if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5857 0 : DRM_ERROR("radeon: MC ucode required for NI+.\n");
5858 0 : return -EINVAL;
5859 : }
5860 : }
5861 :
5862 0 : return 0;
5863 0 : }
5864 :
5865 0 : void evergreen_fini(struct radeon_device *rdev)
5866 : {
5867 0 : radeon_pm_fini(rdev);
5868 0 : radeon_audio_fini(rdev);
5869 0 : r700_cp_fini(rdev);
5870 0 : r600_dma_fini(rdev);
5871 0 : r600_irq_fini(rdev);
5872 0 : if (rdev->flags & RADEON_IS_IGP)
5873 0 : sumo_rlc_fini(rdev);
5874 0 : radeon_wb_fini(rdev);
5875 0 : radeon_ib_pool_fini(rdev);
5876 0 : radeon_irq_kms_fini(rdev);
5877 0 : uvd_v1_0_fini(rdev);
5878 0 : radeon_uvd_fini(rdev);
5879 0 : evergreen_pcie_gart_fini(rdev);
5880 0 : r600_vram_scratch_fini(rdev);
5881 0 : radeon_gem_fini(rdev);
5882 0 : radeon_fence_driver_fini(rdev);
5883 0 : radeon_agp_fini(rdev);
5884 0 : radeon_bo_fini(rdev);
5885 0 : radeon_atombios_fini(rdev);
5886 0 : kfree(rdev->bios);
5887 0 : rdev->bios = NULL;
5888 0 : }
5889 :
5890 0 : void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5891 : {
5892 : u32 link_width_cntl, speed_cntl;
5893 0 : u32 mask;
5894 :
5895 0 : if (radeon_pcie_gen2 == 0)
5896 0 : return;
5897 :
5898 0 : if (rdev->flags & RADEON_IS_IGP)
5899 0 : return;
5900 :
5901 0 : if (!(rdev->flags & RADEON_IS_PCIE))
5902 0 : return;
5903 :
5904 : /* x2 cards have a special sequence */
5905 0 : if (ASIC_IS_X2(rdev))
5906 0 : return;
5907 :
5908 0 : if (drm_pcie_get_speed_cap_mask(rdev->ddev, &mask))
5909 0 : return;
5910 :
5911 0 : if (!(mask & (DRM_PCIE_SPEED_50|DRM_PCIE_SPEED_80)))
5912 0 : return;
5913 :
5914 0 : speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5915 0 : if (speed_cntl & LC_CURRENT_DATA_RATE) {
5916 : DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5917 0 : return;
5918 : }
5919 :
5920 : DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5921 :
5922 0 : if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5923 0 : (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5924 :
5925 0 : link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5926 0 : link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5927 0 : WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5928 :
5929 0 : speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5930 0 : speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5931 0 : WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5932 :
5933 0 : speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5934 0 : speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5935 0 : WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5936 :
5937 0 : speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5938 0 : speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5939 0 : WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5940 :
5941 0 : speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5942 0 : speed_cntl |= LC_GEN2_EN_STRAP;
5943 0 : WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5944 :
5945 0 : } else {
5946 0 : link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5947 : /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5948 : if (1)
5949 0 : link_width_cntl |= LC_UPCONFIGURE_DIS;
5950 : else
5951 : link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5952 0 : WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5953 : }
5954 0 : }
5955 :
5956 0 : void evergreen_program_aspm(struct radeon_device *rdev)
5957 : {
5958 : u32 data, orig;
5959 : u32 pcie_lc_cntl, pcie_lc_cntl_old;
5960 : bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5961 : /* fusion_platform = true
5962 : * if the system is a fusion system
5963 : * (APU or DGPU in a fusion system).
5964 : * todo: check if the system is a fusion platform.
5965 : */
5966 : bool fusion_platform = false;
5967 :
5968 0 : if (radeon_aspm == 0)
5969 0 : return;
5970 :
5971 0 : if (!(rdev->flags & RADEON_IS_PCIE))
5972 0 : return;
5973 :
5974 0 : switch (rdev->family) {
5975 : case CHIP_CYPRESS:
5976 : case CHIP_HEMLOCK:
5977 : case CHIP_JUNIPER:
5978 : case CHIP_REDWOOD:
5979 : case CHIP_CEDAR:
5980 : case CHIP_SUMO:
5981 : case CHIP_SUMO2:
5982 : case CHIP_PALM:
5983 : case CHIP_ARUBA:
5984 : disable_l0s = true;
5985 0 : break;
5986 : default:
5987 : disable_l0s = false;
5988 0 : break;
5989 : }
5990 :
5991 0 : if (rdev->flags & RADEON_IS_IGP)
5992 0 : fusion_platform = true; /* XXX also dGPUs in a fusion system */
5993 :
5994 0 : data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5995 0 : if (fusion_platform)
5996 0 : data &= ~MULTI_PIF;
5997 : else
5998 0 : data |= MULTI_PIF;
5999 0 : if (data != orig)
6000 0 : WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6001 :
6002 0 : data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6003 0 : if (fusion_platform)
6004 0 : data &= ~MULTI_PIF;
6005 : else
6006 0 : data |= MULTI_PIF;
6007 0 : if (data != orig)
6008 0 : WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6009 :
6010 0 : pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6011 0 : pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6012 0 : if (!disable_l0s) {
6013 0 : if (rdev->family >= CHIP_BARTS)
6014 0 : pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6015 : else
6016 0 : pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6017 : }
6018 :
6019 0 : if (!disable_l1) {
6020 0 : if (rdev->family >= CHIP_BARTS)
6021 0 : pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6022 : else
6023 0 : pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6024 :
6025 0 : if (!disable_plloff_in_l1) {
6026 0 : data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6027 0 : data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6028 0 : data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6029 0 : if (data != orig)
6030 0 : WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6031 :
6032 0 : data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6033 0 : data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6034 0 : data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6035 0 : if (data != orig)
6036 0 : WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6037 :
6038 0 : data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6039 0 : data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6040 0 : data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6041 0 : if (data != orig)
6042 0 : WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6043 :
6044 0 : data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6045 0 : data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6046 0 : data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6047 0 : if (data != orig)
6048 0 : WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6049 :
6050 0 : if (rdev->family >= CHIP_BARTS) {
6051 0 : data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6052 0 : data &= ~PLL_RAMP_UP_TIME_0_MASK;
6053 0 : data |= PLL_RAMP_UP_TIME_0(4);
6054 0 : if (data != orig)
6055 0 : WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6056 :
6057 0 : data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6058 0 : data &= ~PLL_RAMP_UP_TIME_1_MASK;
6059 0 : data |= PLL_RAMP_UP_TIME_1(4);
6060 0 : if (data != orig)
6061 0 : WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6062 :
6063 0 : data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6064 0 : data &= ~PLL_RAMP_UP_TIME_0_MASK;
6065 0 : data |= PLL_RAMP_UP_TIME_0(4);
6066 0 : if (data != orig)
6067 0 : WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6068 :
6069 0 : data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6070 0 : data &= ~PLL_RAMP_UP_TIME_1_MASK;
6071 0 : data |= PLL_RAMP_UP_TIME_1(4);
6072 0 : if (data != orig)
6073 0 : WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6074 : }
6075 :
6076 0 : data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6077 0 : data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6078 0 : data |= LC_DYN_LANES_PWR_STATE(3);
6079 0 : if (data != orig)
6080 0 : WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6081 :
6082 0 : if (rdev->family >= CHIP_BARTS) {
6083 0 : data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6084 0 : data &= ~LS2_EXIT_TIME_MASK;
6085 0 : data |= LS2_EXIT_TIME(1);
6086 0 : if (data != orig)
6087 0 : WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6088 :
6089 0 : data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6090 0 : data &= ~LS2_EXIT_TIME_MASK;
6091 0 : data |= LS2_EXIT_TIME(1);
6092 0 : if (data != orig)
6093 0 : WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6094 : }
6095 : }
6096 : }
6097 :
6098 : /* evergreen parts only */
6099 0 : if (rdev->family < CHIP_BARTS)
6100 0 : pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6101 :
6102 0 : if (pcie_lc_cntl != pcie_lc_cntl_old)
6103 0 : WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6104 0 : }
|