drm/radeon/kms/evergreen: implement irq support
[linux-2.6.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include "drmP.h"
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_drm.h"
30 #include "evergreend.h"
31 #include "atom.h"
32 #include "avivod.h"
33 #include "evergreen_reg.h"
34
35 #define EVERGREEN_PFP_UCODE_SIZE 1120
36 #define EVERGREEN_PM4_UCODE_SIZE 1376
37
38 static void evergreen_gpu_init(struct radeon_device *rdev);
39 void evergreen_fini(struct radeon_device *rdev);
40
41 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
42 {
43         bool connected = false;
44         /* XXX */
45         return connected;
46 }
47
48 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
49                                 enum radeon_hpd_id hpd)
50 {
51         /* XXX */
52 }
53
54 void evergreen_hpd_init(struct radeon_device *rdev)
55 {
56         /* XXX */
57 }
58
59
60 void evergreen_bandwidth_update(struct radeon_device *rdev)
61 {
62         /* XXX */
63 }
64
65 void evergreen_hpd_fini(struct radeon_device *rdev)
66 {
67         /* XXX */
68 }
69
70 static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
71 {
72         unsigned i;
73         u32 tmp;
74
75         for (i = 0; i < rdev->usec_timeout; i++) {
76                 /* read MC_STATUS */
77                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
78                 if (!tmp)
79                         return 0;
80                 udelay(1);
81         }
82         return -1;
83 }
84
85 /*
86  * GART
87  */
88 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
89 {
90         unsigned i;
91         u32 tmp;
92
93         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
94         for (i = 0; i < rdev->usec_timeout; i++) {
95                 /* read MC_STATUS */
96                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
97                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
98                 if (tmp == 2) {
99                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
100                         return;
101                 }
102                 if (tmp) {
103                         return;
104                 }
105                 udelay(1);
106         }
107 }
108
109 int evergreen_pcie_gart_enable(struct radeon_device *rdev)
110 {
111         u32 tmp;
112         int r;
113
114         if (rdev->gart.table.vram.robj == NULL) {
115                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
116                 return -EINVAL;
117         }
118         r = radeon_gart_table_vram_pin(rdev);
119         if (r)
120                 return r;
121         radeon_gart_restore(rdev);
122         /* Setup L2 cache */
123         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
124                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
125                                 EFFECTIVE_L2_QUEUE_SIZE(7));
126         WREG32(VM_L2_CNTL2, 0);
127         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
128         /* Setup TLB control */
129         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
130                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
131                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
132                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
133         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
134         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
135         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
136         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
137         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
138         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
139         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
140         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
141         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
142         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
143         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
144                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
145         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
146                         (u32)(rdev->dummy_page.addr >> 12));
147         WREG32(VM_CONTEXT1_CNTL, 0);
148
149         evergreen_pcie_gart_tlb_flush(rdev);
150         rdev->gart.ready = true;
151         return 0;
152 }
153
154 void evergreen_pcie_gart_disable(struct radeon_device *rdev)
155 {
156         u32 tmp;
157         int r;
158
159         /* Disable all tables */
160         WREG32(VM_CONTEXT0_CNTL, 0);
161         WREG32(VM_CONTEXT1_CNTL, 0);
162
163         /* Setup L2 cache */
164         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
165                                 EFFECTIVE_L2_QUEUE_SIZE(7));
166         WREG32(VM_L2_CNTL2, 0);
167         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
168         /* Setup TLB control */
169         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
170         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
171         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
172         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
173         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
174         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
175         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
176         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
177         if (rdev->gart.table.vram.robj) {
178                 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
179                 if (likely(r == 0)) {
180                         radeon_bo_kunmap(rdev->gart.table.vram.robj);
181                         radeon_bo_unpin(rdev->gart.table.vram.robj);
182                         radeon_bo_unreserve(rdev->gart.table.vram.robj);
183                 }
184         }
185 }
186
187 void evergreen_pcie_gart_fini(struct radeon_device *rdev)
188 {
189         evergreen_pcie_gart_disable(rdev);
190         radeon_gart_table_vram_free(rdev);
191         radeon_gart_fini(rdev);
192 }
193
194
195 void evergreen_agp_enable(struct radeon_device *rdev)
196 {
197         u32 tmp;
198
199         /* Setup L2 cache */
200         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
201                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
202                                 EFFECTIVE_L2_QUEUE_SIZE(7));
203         WREG32(VM_L2_CNTL2, 0);
204         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
205         /* Setup TLB control */
206         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
207                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
208                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
209                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
210         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
211         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
212         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
213         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
214         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
215         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
216         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
217         WREG32(VM_CONTEXT0_CNTL, 0);
218         WREG32(VM_CONTEXT1_CNTL, 0);
219 }
220
221 static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
222 {
223         save->vga_control[0] = RREG32(D1VGA_CONTROL);
224         save->vga_control[1] = RREG32(D2VGA_CONTROL);
225         save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
226         save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
227         save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
228         save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
229         save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
230         save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
231         save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
232         save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
233         save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
234         save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
235         save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
236         save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
237
238         /* Stop all video */
239         WREG32(VGA_RENDER_CONTROL, 0);
240         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
241         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
242         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
243         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
244         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
245         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
246         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
247         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
248         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
249         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
250         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
251         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
252         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
253         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
254         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
255         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
256         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
257         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
258
259         WREG32(D1VGA_CONTROL, 0);
260         WREG32(D2VGA_CONTROL, 0);
261         WREG32(EVERGREEN_D3VGA_CONTROL, 0);
262         WREG32(EVERGREEN_D4VGA_CONTROL, 0);
263         WREG32(EVERGREEN_D5VGA_CONTROL, 0);
264         WREG32(EVERGREEN_D6VGA_CONTROL, 0);
265 }
266
267 static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
268 {
269         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
270                upper_32_bits(rdev->mc.vram_start));
271         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
272                upper_32_bits(rdev->mc.vram_start));
273         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
274                (u32)rdev->mc.vram_start);
275         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
276                (u32)rdev->mc.vram_start);
277
278         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
279                upper_32_bits(rdev->mc.vram_start));
280         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
281                upper_32_bits(rdev->mc.vram_start));
282         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
283                (u32)rdev->mc.vram_start);
284         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
285                (u32)rdev->mc.vram_start);
286
287         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
288                upper_32_bits(rdev->mc.vram_start));
289         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
290                upper_32_bits(rdev->mc.vram_start));
291         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
292                (u32)rdev->mc.vram_start);
293         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
294                (u32)rdev->mc.vram_start);
295
296         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
297                upper_32_bits(rdev->mc.vram_start));
298         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
299                upper_32_bits(rdev->mc.vram_start));
300         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
301                (u32)rdev->mc.vram_start);
302         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
303                (u32)rdev->mc.vram_start);
304
305         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
306                upper_32_bits(rdev->mc.vram_start));
307         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
308                upper_32_bits(rdev->mc.vram_start));
309         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
310                (u32)rdev->mc.vram_start);
311         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
312                (u32)rdev->mc.vram_start);
313
314         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
315                upper_32_bits(rdev->mc.vram_start));
316         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
317                upper_32_bits(rdev->mc.vram_start));
318         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
319                (u32)rdev->mc.vram_start);
320         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
321                (u32)rdev->mc.vram_start);
322
323         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
324         WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
325         /* Unlock host access */
326         WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
327         mdelay(1);
328         /* Restore video state */
329         WREG32(D1VGA_CONTROL, save->vga_control[0]);
330         WREG32(D2VGA_CONTROL, save->vga_control[1]);
331         WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
332         WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
333         WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
334         WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
335         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
336         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
337         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
338         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
339         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
340         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
341         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
342         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
343         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
344         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
345         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
346         WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
347         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
348         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
349         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
350         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
351         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
352         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
353         WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
354 }
355
356 static void evergreen_mc_program(struct radeon_device *rdev)
357 {
358         struct evergreen_mc_save save;
359         u32 tmp;
360         int i, j;
361
362         /* Initialize HDP */
363         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
364                 WREG32((0x2c14 + j), 0x00000000);
365                 WREG32((0x2c18 + j), 0x00000000);
366                 WREG32((0x2c1c + j), 0x00000000);
367                 WREG32((0x2c20 + j), 0x00000000);
368                 WREG32((0x2c24 + j), 0x00000000);
369         }
370         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
371
372         evergreen_mc_stop(rdev, &save);
373         if (evergreen_mc_wait_for_idle(rdev)) {
374                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
375         }
376         /* Lockout access through VGA aperture*/
377         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
378         /* Update configuration */
379         if (rdev->flags & RADEON_IS_AGP) {
380                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
381                         /* VRAM before AGP */
382                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
383                                 rdev->mc.vram_start >> 12);
384                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
385                                 rdev->mc.gtt_end >> 12);
386                 } else {
387                         /* VRAM after AGP */
388                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
389                                 rdev->mc.gtt_start >> 12);
390                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
391                                 rdev->mc.vram_end >> 12);
392                 }
393         } else {
394                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
395                         rdev->mc.vram_start >> 12);
396                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
397                         rdev->mc.vram_end >> 12);
398         }
399         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
400         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
401         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
402         WREG32(MC_VM_FB_LOCATION, tmp);
403         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
404         WREG32(HDP_NONSURFACE_INFO, (2 << 7));
405         WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
406         if (rdev->flags & RADEON_IS_AGP) {
407                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
408                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
409                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
410         } else {
411                 WREG32(MC_VM_AGP_BASE, 0);
412                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
413                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
414         }
415         if (evergreen_mc_wait_for_idle(rdev)) {
416                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
417         }
418         evergreen_mc_resume(rdev, &save);
419         /* we need to own VRAM, so turn off the VGA renderer here
420          * to stop it overwriting our objects */
421         rv515_vga_render_disable(rdev);
422 }
423
424 /*
425  * CP.
426  */
427
428 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
429 {
430         const __be32 *fw_data;
431         int i;
432
433         if (!rdev->me_fw || !rdev->pfp_fw)
434                 return -EINVAL;
435
436         r700_cp_stop(rdev);
437         WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
438
439         fw_data = (const __be32 *)rdev->pfp_fw->data;
440         WREG32(CP_PFP_UCODE_ADDR, 0);
441         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
442                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
443         WREG32(CP_PFP_UCODE_ADDR, 0);
444
445         fw_data = (const __be32 *)rdev->me_fw->data;
446         WREG32(CP_ME_RAM_WADDR, 0);
447         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
448                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
449
450         WREG32(CP_PFP_UCODE_ADDR, 0);
451         WREG32(CP_ME_RAM_WADDR, 0);
452         WREG32(CP_ME_RAM_RADDR, 0);
453         return 0;
454 }
455
456 int evergreen_cp_resume(struct radeon_device *rdev)
457 {
458         u32 tmp;
459         u32 rb_bufsz;
460         int r;
461
462         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
463         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
464                                  SOFT_RESET_PA |
465                                  SOFT_RESET_SH |
466                                  SOFT_RESET_VGT |
467                                  SOFT_RESET_SX));
468         RREG32(GRBM_SOFT_RESET);
469         mdelay(15);
470         WREG32(GRBM_SOFT_RESET, 0);
471         RREG32(GRBM_SOFT_RESET);
472
473         /* Set ring buffer size */
474         rb_bufsz = drm_order(rdev->cp.ring_size / 8);
475         tmp = RB_NO_UPDATE | (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
476 #ifdef __BIG_ENDIAN
477         tmp |= BUF_SWAP_32BIT;
478 #endif
479         WREG32(CP_RB_CNTL, tmp);
480         WREG32(CP_SEM_WAIT_TIMER, 0x4);
481
482         /* Set the write pointer delay */
483         WREG32(CP_RB_WPTR_DELAY, 0);
484
485         /* Initialize the ring buffer's read and write pointers */
486         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
487         WREG32(CP_RB_RPTR_WR, 0);
488         WREG32(CP_RB_WPTR, 0);
489         WREG32(CP_RB_RPTR_ADDR, rdev->cp.gpu_addr & 0xFFFFFFFF);
490         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->cp.gpu_addr));
491         mdelay(1);
492         WREG32(CP_RB_CNTL, tmp);
493
494         WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
495         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
496
497         rdev->cp.rptr = RREG32(CP_RB_RPTR);
498         rdev->cp.wptr = RREG32(CP_RB_WPTR);
499
500         r600_cp_start(rdev);
501         rdev->cp.ready = true;
502         r = radeon_ring_test(rdev);
503         if (r) {
504                 rdev->cp.ready = false;
505                 return r;
506         }
507         return 0;
508 }
509
510 /*
511  * Core functions
512  */
513 static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
514                                                   u32 num_tile_pipes,
515                                                   u32 num_backends,
516                                                   u32 backend_disable_mask)
517 {
518         u32 backend_map = 0;
519         u32 enabled_backends_mask = 0;
520         u32 enabled_backends_count = 0;
521         u32 cur_pipe;
522         u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
523         u32 cur_backend = 0;
524         u32 i;
525         bool force_no_swizzle;
526
527         if (num_tile_pipes > EVERGREEN_MAX_PIPES)
528                 num_tile_pipes = EVERGREEN_MAX_PIPES;
529         if (num_tile_pipes < 1)
530                 num_tile_pipes = 1;
531         if (num_backends > EVERGREEN_MAX_BACKENDS)
532                 num_backends = EVERGREEN_MAX_BACKENDS;
533         if (num_backends < 1)
534                 num_backends = 1;
535
536         for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
537                 if (((backend_disable_mask >> i) & 1) == 0) {
538                         enabled_backends_mask |= (1 << i);
539                         ++enabled_backends_count;
540                 }
541                 if (enabled_backends_count == num_backends)
542                         break;
543         }
544
545         if (enabled_backends_count == 0) {
546                 enabled_backends_mask = 1;
547                 enabled_backends_count = 1;
548         }
549
550         if (enabled_backends_count != num_backends)
551                 num_backends = enabled_backends_count;
552
553         memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
554         switch (rdev->family) {
555         case CHIP_CEDAR:
556         case CHIP_REDWOOD:
557                 force_no_swizzle = false;
558                 break;
559         case CHIP_CYPRESS:
560         case CHIP_HEMLOCK:
561         case CHIP_JUNIPER:
562         default:
563                 force_no_swizzle = true;
564                 break;
565         }
566         if (force_no_swizzle) {
567                 bool last_backend_enabled = false;
568
569                 force_no_swizzle = false;
570                 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
571                         if (((enabled_backends_mask >> i) & 1) == 1) {
572                                 if (last_backend_enabled)
573                                         force_no_swizzle = true;
574                                 last_backend_enabled = true;
575                         } else
576                                 last_backend_enabled = false;
577                 }
578         }
579
580         switch (num_tile_pipes) {
581         case 1:
582         case 3:
583         case 5:
584         case 7:
585                 DRM_ERROR("odd number of pipes!\n");
586                 break;
587         case 2:
588                 swizzle_pipe[0] = 0;
589                 swizzle_pipe[1] = 1;
590                 break;
591         case 4:
592                 if (force_no_swizzle) {
593                         swizzle_pipe[0] = 0;
594                         swizzle_pipe[1] = 1;
595                         swizzle_pipe[2] = 2;
596                         swizzle_pipe[3] = 3;
597                 } else {
598                         swizzle_pipe[0] = 0;
599                         swizzle_pipe[1] = 2;
600                         swizzle_pipe[2] = 1;
601                         swizzle_pipe[3] = 3;
602                 }
603                 break;
604         case 6:
605                 if (force_no_swizzle) {
606                         swizzle_pipe[0] = 0;
607                         swizzle_pipe[1] = 1;
608                         swizzle_pipe[2] = 2;
609                         swizzle_pipe[3] = 3;
610                         swizzle_pipe[4] = 4;
611                         swizzle_pipe[5] = 5;
612                 } else {
613                         swizzle_pipe[0] = 0;
614                         swizzle_pipe[1] = 2;
615                         swizzle_pipe[2] = 4;
616                         swizzle_pipe[3] = 1;
617                         swizzle_pipe[4] = 3;
618                         swizzle_pipe[5] = 5;
619                 }
620                 break;
621         case 8:
622                 if (force_no_swizzle) {
623                         swizzle_pipe[0] = 0;
624                         swizzle_pipe[1] = 1;
625                         swizzle_pipe[2] = 2;
626                         swizzle_pipe[3] = 3;
627                         swizzle_pipe[4] = 4;
628                         swizzle_pipe[5] = 5;
629                         swizzle_pipe[6] = 6;
630                         swizzle_pipe[7] = 7;
631                 } else {
632                         swizzle_pipe[0] = 0;
633                         swizzle_pipe[1] = 2;
634                         swizzle_pipe[2] = 4;
635                         swizzle_pipe[3] = 6;
636                         swizzle_pipe[4] = 1;
637                         swizzle_pipe[5] = 3;
638                         swizzle_pipe[6] = 5;
639                         swizzle_pipe[7] = 7;
640                 }
641                 break;
642         }
643
644         for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
645                 while (((1 << cur_backend) & enabled_backends_mask) == 0)
646                         cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
647
648                 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
649
650                 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
651         }
652
653         return backend_map;
654 }
655
656 static void evergreen_gpu_init(struct radeon_device *rdev)
657 {
658         u32 cc_rb_backend_disable = 0;
659         u32 cc_gc_shader_pipe_config;
660         u32 gb_addr_config = 0;
661         u32 mc_shared_chmap, mc_arb_ramcfg;
662         u32 gb_backend_map;
663         u32 grbm_gfx_index;
664         u32 sx_debug_1;
665         u32 smx_dc_ctl0;
666         u32 sq_config;
667         u32 sq_lds_resource_mgmt;
668         u32 sq_gpr_resource_mgmt_1;
669         u32 sq_gpr_resource_mgmt_2;
670         u32 sq_gpr_resource_mgmt_3;
671         u32 sq_thread_resource_mgmt;
672         u32 sq_thread_resource_mgmt_2;
673         u32 sq_stack_resource_mgmt_1;
674         u32 sq_stack_resource_mgmt_2;
675         u32 sq_stack_resource_mgmt_3;
676         u32 vgt_cache_invalidation;
677         u32 hdp_host_path_cntl;
678         int i, j, num_shader_engines, ps_thread_count;
679
680         switch (rdev->family) {
681         case CHIP_CYPRESS:
682         case CHIP_HEMLOCK:
683                 rdev->config.evergreen.num_ses = 2;
684                 rdev->config.evergreen.max_pipes = 4;
685                 rdev->config.evergreen.max_tile_pipes = 8;
686                 rdev->config.evergreen.max_simds = 10;
687                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
688                 rdev->config.evergreen.max_gprs = 256;
689                 rdev->config.evergreen.max_threads = 248;
690                 rdev->config.evergreen.max_gs_threads = 32;
691                 rdev->config.evergreen.max_stack_entries = 512;
692                 rdev->config.evergreen.sx_num_of_sets = 4;
693                 rdev->config.evergreen.sx_max_export_size = 256;
694                 rdev->config.evergreen.sx_max_export_pos_size = 64;
695                 rdev->config.evergreen.sx_max_export_smx_size = 192;
696                 rdev->config.evergreen.max_hw_contexts = 8;
697                 rdev->config.evergreen.sq_num_cf_insts = 2;
698
699                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
700                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
701                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
702                 break;
703         case CHIP_JUNIPER:
704                 rdev->config.evergreen.num_ses = 1;
705                 rdev->config.evergreen.max_pipes = 4;
706                 rdev->config.evergreen.max_tile_pipes = 4;
707                 rdev->config.evergreen.max_simds = 10;
708                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
709                 rdev->config.evergreen.max_gprs = 256;
710                 rdev->config.evergreen.max_threads = 248;
711                 rdev->config.evergreen.max_gs_threads = 32;
712                 rdev->config.evergreen.max_stack_entries = 512;
713                 rdev->config.evergreen.sx_num_of_sets = 4;
714                 rdev->config.evergreen.sx_max_export_size = 256;
715                 rdev->config.evergreen.sx_max_export_pos_size = 64;
716                 rdev->config.evergreen.sx_max_export_smx_size = 192;
717                 rdev->config.evergreen.max_hw_contexts = 8;
718                 rdev->config.evergreen.sq_num_cf_insts = 2;
719
720                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
721                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
722                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
723                 break;
724         case CHIP_REDWOOD:
725                 rdev->config.evergreen.num_ses = 1;
726                 rdev->config.evergreen.max_pipes = 4;
727                 rdev->config.evergreen.max_tile_pipes = 4;
728                 rdev->config.evergreen.max_simds = 5;
729                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
730                 rdev->config.evergreen.max_gprs = 256;
731                 rdev->config.evergreen.max_threads = 248;
732                 rdev->config.evergreen.max_gs_threads = 32;
733                 rdev->config.evergreen.max_stack_entries = 256;
734                 rdev->config.evergreen.sx_num_of_sets = 4;
735                 rdev->config.evergreen.sx_max_export_size = 256;
736                 rdev->config.evergreen.sx_max_export_pos_size = 64;
737                 rdev->config.evergreen.sx_max_export_smx_size = 192;
738                 rdev->config.evergreen.max_hw_contexts = 8;
739                 rdev->config.evergreen.sq_num_cf_insts = 2;
740
741                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
742                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
743                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
744                 break;
745         case CHIP_CEDAR:
746         default:
747                 rdev->config.evergreen.num_ses = 1;
748                 rdev->config.evergreen.max_pipes = 2;
749                 rdev->config.evergreen.max_tile_pipes = 2;
750                 rdev->config.evergreen.max_simds = 2;
751                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
752                 rdev->config.evergreen.max_gprs = 256;
753                 rdev->config.evergreen.max_threads = 192;
754                 rdev->config.evergreen.max_gs_threads = 16;
755                 rdev->config.evergreen.max_stack_entries = 256;
756                 rdev->config.evergreen.sx_num_of_sets = 4;
757                 rdev->config.evergreen.sx_max_export_size = 128;
758                 rdev->config.evergreen.sx_max_export_pos_size = 32;
759                 rdev->config.evergreen.sx_max_export_smx_size = 96;
760                 rdev->config.evergreen.max_hw_contexts = 4;
761                 rdev->config.evergreen.sq_num_cf_insts = 1;
762
763                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
764                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
765                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
766                 break;
767         }
768
769         /* Initialize HDP */
770         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
771                 WREG32((0x2c14 + j), 0x00000000);
772                 WREG32((0x2c18 + j), 0x00000000);
773                 WREG32((0x2c1c + j), 0x00000000);
774                 WREG32((0x2c20 + j), 0x00000000);
775                 WREG32((0x2c24 + j), 0x00000000);
776         }
777
778         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
779
780         cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
781
782         cc_gc_shader_pipe_config |=
783                 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
784                                   & EVERGREEN_MAX_PIPES_MASK);
785         cc_gc_shader_pipe_config |=
786                 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
787                                & EVERGREEN_MAX_SIMDS_MASK);
788
789         cc_rb_backend_disable =
790                 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
791                                 & EVERGREEN_MAX_BACKENDS_MASK);
792
793
794         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
795         mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
796
797         switch (rdev->config.evergreen.max_tile_pipes) {
798         case 1:
799         default:
800                 gb_addr_config |= NUM_PIPES(0);
801                 break;
802         case 2:
803                 gb_addr_config |= NUM_PIPES(1);
804                 break;
805         case 4:
806                 gb_addr_config |= NUM_PIPES(2);
807                 break;
808         case 8:
809                 gb_addr_config |= NUM_PIPES(3);
810                 break;
811         }
812
813         gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
814         gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
815         gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
816         gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
817         gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
818         gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
819
820         if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
821                 gb_addr_config |= ROW_SIZE(2);
822         else
823                 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
824
825         if (rdev->ddev->pdev->device == 0x689e) {
826                 u32 efuse_straps_4;
827                 u32 efuse_straps_3;
828                 u8 efuse_box_bit_131_124;
829
830                 WREG32(RCU_IND_INDEX, 0x204);
831                 efuse_straps_4 = RREG32(RCU_IND_DATA);
832                 WREG32(RCU_IND_INDEX, 0x203);
833                 efuse_straps_3 = RREG32(RCU_IND_DATA);
834                 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
835
836                 switch(efuse_box_bit_131_124) {
837                 case 0x00:
838                         gb_backend_map = 0x76543210;
839                         break;
840                 case 0x55:
841                         gb_backend_map = 0x77553311;
842                         break;
843                 case 0x56:
844                         gb_backend_map = 0x77553300;
845                         break;
846                 case 0x59:
847                         gb_backend_map = 0x77552211;
848                         break;
849                 case 0x66:
850                         gb_backend_map = 0x77443300;
851                         break;
852                 case 0x99:
853                         gb_backend_map = 0x66552211;
854                         break;
855                 case 0x5a:
856                         gb_backend_map = 0x77552200;
857                         break;
858                 case 0xaa:
859                         gb_backend_map = 0x66442200;
860                         break;
861                 case 0x95:
862                         gb_backend_map = 0x66553311;
863                         break;
864                 default:
865                         DRM_ERROR("bad backend map, using default\n");
866                         gb_backend_map =
867                                 evergreen_get_tile_pipe_to_backend_map(rdev,
868                                                                        rdev->config.evergreen.max_tile_pipes,
869                                                                        rdev->config.evergreen.max_backends,
870                                                                        ((EVERGREEN_MAX_BACKENDS_MASK <<
871                                                                    rdev->config.evergreen.max_backends) &
872                                                                         EVERGREEN_MAX_BACKENDS_MASK));
873                         break;
874                 }
875         } else if (rdev->ddev->pdev->device == 0x68b9) {
876                 u32 efuse_straps_3;
877                 u8 efuse_box_bit_127_124;
878
879                 WREG32(RCU_IND_INDEX, 0x203);
880                 efuse_straps_3 = RREG32(RCU_IND_DATA);
881                 efuse_box_bit_127_124 = (u8)(efuse_straps_3 & 0xF0000000) >> 28;
882
883                 switch(efuse_box_bit_127_124) {
884                 case 0x0:
885                         gb_backend_map = 0x00003210;
886                         break;
887                 case 0x5:
888                 case 0x6:
889                 case 0x9:
890                 case 0xa:
891                         gb_backend_map = 0x00003311;
892                         break;
893                 default:
894                         DRM_ERROR("bad backend map, using default\n");
895                         gb_backend_map =
896                                 evergreen_get_tile_pipe_to_backend_map(rdev,
897                                                                        rdev->config.evergreen.max_tile_pipes,
898                                                                        rdev->config.evergreen.max_backends,
899                                                                        ((EVERGREEN_MAX_BACKENDS_MASK <<
900                                                                    rdev->config.evergreen.max_backends) &
901                                                                         EVERGREEN_MAX_BACKENDS_MASK));
902                         break;
903                 }
904         } else
905                 gb_backend_map =
906                         evergreen_get_tile_pipe_to_backend_map(rdev,
907                                                                rdev->config.evergreen.max_tile_pipes,
908                                                                rdev->config.evergreen.max_backends,
909                                                                ((EVERGREEN_MAX_BACKENDS_MASK <<
910                                                                  rdev->config.evergreen.max_backends) &
911                                                                 EVERGREEN_MAX_BACKENDS_MASK));
912
913         WREG32(GB_BACKEND_MAP, gb_backend_map);
914         WREG32(GB_ADDR_CONFIG, gb_addr_config);
915         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
916         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
917
918         num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
919         grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
920
921         for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
922                 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
923                 u32 sp = cc_gc_shader_pipe_config;
924                 u32 gfx = grbm_gfx_index | SE_INDEX(i);
925
926                 if (i == num_shader_engines) {
927                         rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
928                         sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
929                 }
930
931                 WREG32(GRBM_GFX_INDEX, gfx);
932                 WREG32(RLC_GFX_INDEX, gfx);
933
934                 WREG32(CC_RB_BACKEND_DISABLE, rb);
935                 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
936                 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
937                 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
938         }
939
940         grbm_gfx_index |= SE_BROADCAST_WRITES;
941         WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
942         WREG32(RLC_GFX_INDEX, grbm_gfx_index);
943
944         WREG32(CGTS_SYS_TCC_DISABLE, 0);
945         WREG32(CGTS_TCC_DISABLE, 0);
946         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
947         WREG32(CGTS_USER_TCC_DISABLE, 0);
948
949         /* set HW defaults for 3D engine */
950         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
951                                      ROQ_IB2_START(0x2b)));
952
953         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
954
955         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
956                              SYNC_GRADIENT |
957                              SYNC_WALKER |
958                              SYNC_ALIGNER));
959
960         sx_debug_1 = RREG32(SX_DEBUG_1);
961         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
962         WREG32(SX_DEBUG_1, sx_debug_1);
963
964
965         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
966         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
967         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
968         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
969
970         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
971                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
972                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
973
974         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
975                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
976                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
977
978         WREG32(VGT_NUM_INSTANCES, 1);
979         WREG32(SPI_CONFIG_CNTL, 0);
980         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
981         WREG32(CP_PERFMON_CNTL, 0);
982
983         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
984                                   FETCH_FIFO_HIWATER(0x4) |
985                                   DONE_FIFO_HIWATER(0xe0) |
986                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
987
988         sq_config = RREG32(SQ_CONFIG);
989         sq_config &= ~(PS_PRIO(3) |
990                        VS_PRIO(3) |
991                        GS_PRIO(3) |
992                        ES_PRIO(3));
993         sq_config |= (VC_ENABLE |
994                       EXPORT_SRC_C |
995                       PS_PRIO(0) |
996                       VS_PRIO(1) |
997                       GS_PRIO(2) |
998                       ES_PRIO(3));
999
1000         if (rdev->family == CHIP_CEDAR)
1001                 /* no vertex cache */
1002                 sq_config &= ~VC_ENABLE;
1003
1004         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1005
1006         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1007         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1008         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1009         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1010         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1011         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1012         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1013
1014         if (rdev->family == CHIP_CEDAR)
1015                 ps_thread_count = 96;
1016         else
1017                 ps_thread_count = 128;
1018
1019         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
1020         sq_thread_resource_mgmt |= NUM_VS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1021         sq_thread_resource_mgmt |= NUM_GS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1022         sq_thread_resource_mgmt |= NUM_ES_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1023         sq_thread_resource_mgmt_2 = NUM_HS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1024         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
1025
1026         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1027         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1028         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1029         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1030         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1031         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1032
1033         WREG32(SQ_CONFIG, sq_config);
1034         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1035         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1036         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1037         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1038         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1039         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1040         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1041         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1042         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1043         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1044
1045         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1046                                           FORCE_EOV_MAX_REZ_CNT(255)));
1047
1048         if (rdev->family == CHIP_CEDAR)
1049                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1050         else
1051                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1052         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1053         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1054
1055         WREG32(VGT_GS_VERTEX_REUSE, 16);
1056         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1057
1058         WREG32(CB_PERF_CTR0_SEL_0, 0);
1059         WREG32(CB_PERF_CTR0_SEL_1, 0);
1060         WREG32(CB_PERF_CTR1_SEL_0, 0);
1061         WREG32(CB_PERF_CTR1_SEL_1, 0);
1062         WREG32(CB_PERF_CTR2_SEL_0, 0);
1063         WREG32(CB_PERF_CTR2_SEL_1, 0);
1064         WREG32(CB_PERF_CTR3_SEL_0, 0);
1065         WREG32(CB_PERF_CTR3_SEL_1, 0);
1066
1067         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1068         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1069
1070         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1071
1072         udelay(50);
1073
1074 }
1075
1076 int evergreen_mc_init(struct radeon_device *rdev)
1077 {
1078         u32 tmp;
1079         int chansize, numchan;
1080
1081         /* Get VRAM informations */
1082         rdev->mc.vram_is_ddr = true;
1083         tmp = RREG32(MC_ARB_RAMCFG);
1084         if (tmp & CHANSIZE_OVERRIDE) {
1085                 chansize = 16;
1086         } else if (tmp & CHANSIZE_MASK) {
1087                 chansize = 64;
1088         } else {
1089                 chansize = 32;
1090         }
1091         tmp = RREG32(MC_SHARED_CHMAP);
1092         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1093         case 0:
1094         default:
1095                 numchan = 1;
1096                 break;
1097         case 1:
1098                 numchan = 2;
1099                 break;
1100         case 2:
1101                 numchan = 4;
1102                 break;
1103         case 3:
1104                 numchan = 8;
1105                 break;
1106         }
1107         rdev->mc.vram_width = numchan * chansize;
1108         /* Could aper size report 0 ? */
1109         rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
1110         rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
1111         /* Setup GPU memory space */
1112         /* size in MB on evergreen */
1113         rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1114         rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1115         rdev->mc.visible_vram_size = rdev->mc.aper_size;
1116         /* FIXME remove this once we support unmappable VRAM */
1117         if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
1118                 rdev->mc.mc_vram_size = rdev->mc.aper_size;
1119                 rdev->mc.real_vram_size = rdev->mc.aper_size;
1120         }
1121         r600_vram_gtt_location(rdev, &rdev->mc);
1122         radeon_update_bandwidth_info(rdev);
1123
1124         return 0;
1125 }
1126
1127 bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1128 {
1129         /* FIXME: implement for evergreen */
1130         return false;
1131 }
1132
1133 static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1134 {
1135         struct evergreen_mc_save save;
1136         u32 srbm_reset = 0;
1137         u32 grbm_reset = 0;
1138
1139         dev_info(rdev->dev, "GPU softreset \n");
1140         dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1141                 RREG32(GRBM_STATUS));
1142         dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1143                 RREG32(GRBM_STATUS_SE0));
1144         dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1145                 RREG32(GRBM_STATUS_SE1));
1146         dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1147                 RREG32(SRBM_STATUS));
1148         evergreen_mc_stop(rdev, &save);
1149         if (evergreen_mc_wait_for_idle(rdev)) {
1150                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1151         }
1152         /* Disable CP parsing/prefetching */
1153         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1154
1155         /* reset all the gfx blocks */
1156         grbm_reset = (SOFT_RESET_CP |
1157                       SOFT_RESET_CB |
1158                       SOFT_RESET_DB |
1159                       SOFT_RESET_PA |
1160                       SOFT_RESET_SC |
1161                       SOFT_RESET_SPI |
1162                       SOFT_RESET_SH |
1163                       SOFT_RESET_SX |
1164                       SOFT_RESET_TC |
1165                       SOFT_RESET_TA |
1166                       SOFT_RESET_VC |
1167                       SOFT_RESET_VGT);
1168
1169         dev_info(rdev->dev, "  GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1170         WREG32(GRBM_SOFT_RESET, grbm_reset);
1171         (void)RREG32(GRBM_SOFT_RESET);
1172         udelay(50);
1173         WREG32(GRBM_SOFT_RESET, 0);
1174         (void)RREG32(GRBM_SOFT_RESET);
1175
1176         /* reset all the system blocks */
1177         srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
1178
1179         dev_info(rdev->dev, "  SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
1180         WREG32(SRBM_SOFT_RESET, srbm_reset);
1181         (void)RREG32(SRBM_SOFT_RESET);
1182         udelay(50);
1183         WREG32(SRBM_SOFT_RESET, 0);
1184         (void)RREG32(SRBM_SOFT_RESET);
1185         /* Wait a little for things to settle down */
1186         udelay(50);
1187         dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
1188                 RREG32(GRBM_STATUS));
1189         dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
1190                 RREG32(GRBM_STATUS_SE0));
1191         dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
1192                 RREG32(GRBM_STATUS_SE1));
1193         dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
1194                 RREG32(SRBM_STATUS));
1195         /* After reset we need to reinit the asic as GPU often endup in an
1196          * incoherent state.
1197          */
1198         atom_asic_init(rdev->mode_info.atom_context);
1199         evergreen_mc_resume(rdev, &save);
1200         return 0;
1201 }
1202
1203 int evergreen_asic_reset(struct radeon_device *rdev)
1204 {
1205         return evergreen_gpu_soft_reset(rdev);
1206 }
1207
1208 /* Interrupts */
1209
1210 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
1211 {
1212         switch (crtc) {
1213         case 0:
1214                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
1215         case 1:
1216                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
1217         case 2:
1218                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
1219         case 3:
1220                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
1221         case 4:
1222                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
1223         case 5:
1224                 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
1225         default:
1226                 return 0;
1227         }
1228 }
1229
1230 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
1231 {
1232         u32 tmp;
1233
1234         WREG32(CP_INT_CNTL, 0);
1235         WREG32(GRBM_INT_CNTL, 0);
1236         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1237         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1238         WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1239         WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1240         WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1241         WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1242
1243         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1244         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1245         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1246         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1247         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1248         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1249
1250         WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
1251         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
1252
1253         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1254         WREG32(DC_HPD1_INT_CONTROL, tmp);
1255         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1256         WREG32(DC_HPD2_INT_CONTROL, tmp);
1257         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1258         WREG32(DC_HPD3_INT_CONTROL, tmp);
1259         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1260         WREG32(DC_HPD4_INT_CONTROL, tmp);
1261         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1262         WREG32(DC_HPD5_INT_CONTROL, tmp);
1263         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1264         WREG32(DC_HPD6_INT_CONTROL, tmp);
1265
1266 }
1267
1268 int evergreen_irq_set(struct radeon_device *rdev)
1269 {
1270         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
1271         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
1272         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
1273
1274         if (!rdev->irq.installed) {
1275                 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
1276                 return -EINVAL;
1277         }
1278         /* don't enable anything if the ih is disabled */
1279         if (!rdev->ih.enabled) {
1280                 r600_disable_interrupts(rdev);
1281                 /* force the active interrupt state to all disabled */
1282                 evergreen_disable_interrupt_state(rdev);
1283                 return 0;
1284         }
1285
1286         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
1287         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
1288         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
1289         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
1290         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
1291         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
1292
1293         if (rdev->irq.sw_int) {
1294                 DRM_DEBUG("evergreen_irq_set: sw int\n");
1295                 cp_int_cntl |= RB_INT_ENABLE;
1296         }
1297         if (rdev->irq.crtc_vblank_int[0]) {
1298                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
1299                 crtc1 |= VBLANK_INT_MASK;
1300         }
1301         if (rdev->irq.crtc_vblank_int[1]) {
1302                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
1303                 crtc2 |= VBLANK_INT_MASK;
1304         }
1305         if (rdev->irq.crtc_vblank_int[2]) {
1306                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
1307                 crtc3 |= VBLANK_INT_MASK;
1308         }
1309         if (rdev->irq.crtc_vblank_int[3]) {
1310                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
1311                 crtc4 |= VBLANK_INT_MASK;
1312         }
1313         if (rdev->irq.crtc_vblank_int[4]) {
1314                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
1315                 crtc5 |= VBLANK_INT_MASK;
1316         }
1317         if (rdev->irq.crtc_vblank_int[5]) {
1318                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
1319                 crtc6 |= VBLANK_INT_MASK;
1320         }
1321         if (rdev->irq.hpd[0]) {
1322                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
1323                 hpd1 |= DC_HPDx_INT_EN;
1324         }
1325         if (rdev->irq.hpd[1]) {
1326                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
1327                 hpd2 |= DC_HPDx_INT_EN;
1328         }
1329         if (rdev->irq.hpd[2]) {
1330                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
1331                 hpd3 |= DC_HPDx_INT_EN;
1332         }
1333         if (rdev->irq.hpd[3]) {
1334                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
1335                 hpd4 |= DC_HPDx_INT_EN;
1336         }
1337         if (rdev->irq.hpd[4]) {
1338                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
1339                 hpd5 |= DC_HPDx_INT_EN;
1340         }
1341         if (rdev->irq.hpd[5]) {
1342                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
1343                 hpd6 |= DC_HPDx_INT_EN;
1344         }
1345
1346         WREG32(CP_INT_CNTL, cp_int_cntl);
1347
1348         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
1349         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
1350         WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
1351         WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
1352         WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
1353         WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
1354
1355         WREG32(DC_HPD1_INT_CONTROL, hpd1);
1356         WREG32(DC_HPD2_INT_CONTROL, hpd2);
1357         WREG32(DC_HPD3_INT_CONTROL, hpd3);
1358         WREG32(DC_HPD4_INT_CONTROL, hpd4);
1359         WREG32(DC_HPD5_INT_CONTROL, hpd5);
1360         WREG32(DC_HPD6_INT_CONTROL, hpd6);
1361
1362         return 0;
1363 }
1364
1365 static inline void evergreen_irq_ack(struct radeon_device *rdev,
1366                                      u32 *disp_int,
1367                                      u32 *disp_int_cont,
1368                                      u32 *disp_int_cont2,
1369                                      u32 *disp_int_cont3,
1370                                      u32 *disp_int_cont4,
1371                                      u32 *disp_int_cont5)
1372 {
1373         u32 tmp;
1374
1375         *disp_int = RREG32(DISP_INTERRUPT_STATUS);
1376         *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
1377         *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
1378         *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
1379         *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
1380         *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
1381
1382         if (*disp_int & LB_D1_VBLANK_INTERRUPT)
1383                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
1384         if (*disp_int & LB_D1_VLINE_INTERRUPT)
1385                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
1386
1387         if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
1388                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
1389         if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
1390                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
1391
1392         if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
1393                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
1394         if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
1395                 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
1396
1397         if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
1398                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
1399         if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
1400                 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
1401
1402         if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
1403                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
1404         if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
1405                 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
1406
1407         if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
1408                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
1409         if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
1410                 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
1411
1412         if (*disp_int & DC_HPD1_INTERRUPT) {
1413                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1414                 tmp |= DC_HPDx_INT_ACK;
1415                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1416         }
1417         if (*disp_int_cont & DC_HPD2_INTERRUPT) {
1418                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1419                 tmp |= DC_HPDx_INT_ACK;
1420                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1421         }
1422         if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
1423                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1424                 tmp |= DC_HPDx_INT_ACK;
1425                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1426         }
1427         if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
1428                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1429                 tmp |= DC_HPDx_INT_ACK;
1430                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1431         }
1432         if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
1433                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1434                 tmp |= DC_HPDx_INT_ACK;
1435                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1436         }
1437         if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
1438                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1439                 tmp |= DC_HPDx_INT_ACK;
1440                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1441         }
1442 }
1443
1444 void evergreen_irq_disable(struct radeon_device *rdev)
1445 {
1446         u32 disp_int, disp_int_cont, disp_int_cont2;
1447         u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1448
1449         r600_disable_interrupts(rdev);
1450         /* Wait and acknowledge irq */
1451         mdelay(1);
1452         evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1453                           &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1454         evergreen_disable_interrupt_state(rdev);
1455 }
1456
1457 static void evergreen_irq_suspend(struct radeon_device *rdev)
1458 {
1459         evergreen_irq_disable(rdev);
1460         r600_rlc_stop(rdev);
1461 }
1462
1463 static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
1464 {
1465         u32 wptr, tmp;
1466
1467         /* XXX use writeback */
1468         wptr = RREG32(IH_RB_WPTR);
1469
1470         if (wptr & RB_OVERFLOW) {
1471                 /* When a ring buffer overflow happen start parsing interrupt
1472                  * from the last not overwritten vector (wptr + 16). Hopefully
1473                  * this should allow us to catchup.
1474                  */
1475                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
1476                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
1477                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
1478                 tmp = RREG32(IH_RB_CNTL);
1479                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
1480                 WREG32(IH_RB_CNTL, tmp);
1481         }
1482         return (wptr & rdev->ih.ptr_mask);
1483 }
1484
1485 int evergreen_irq_process(struct radeon_device *rdev)
1486 {
1487         u32 wptr = evergreen_get_ih_wptr(rdev);
1488         u32 rptr = rdev->ih.rptr;
1489         u32 src_id, src_data;
1490         u32 ring_index;
1491         u32 disp_int, disp_int_cont, disp_int_cont2;
1492         u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1493         unsigned long flags;
1494         bool queue_hotplug = false;
1495
1496         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
1497         if (!rdev->ih.enabled)
1498                 return IRQ_NONE;
1499
1500         spin_lock_irqsave(&rdev->ih.lock, flags);
1501
1502         if (rptr == wptr) {
1503                 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1504                 return IRQ_NONE;
1505         }
1506         if (rdev->shutdown) {
1507                 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1508                 return IRQ_NONE;
1509         }
1510
1511 restart_ih:
1512         /* display interrupts */
1513         evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1514                           &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1515
1516         rdev->ih.wptr = wptr;
1517         while (rptr != wptr) {
1518                 /* wptr/rptr are in bytes! */
1519                 ring_index = rptr / 4;
1520                 src_id =  rdev->ih.ring[ring_index] & 0xff;
1521                 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
1522
1523                 switch (src_id) {
1524                 case 1: /* D1 vblank/vline */
1525                         switch (src_data) {
1526                         case 0: /* D1 vblank */
1527                                 if (disp_int & LB_D1_VBLANK_INTERRUPT) {
1528                                         drm_handle_vblank(rdev->ddev, 0);
1529                                         wake_up(&rdev->irq.vblank_queue);
1530                                         disp_int &= ~LB_D1_VBLANK_INTERRUPT;
1531                                         DRM_DEBUG("IH: D1 vblank\n");
1532                                 }
1533                                 break;
1534                         case 1: /* D1 vline */
1535                                 if (disp_int & LB_D1_VLINE_INTERRUPT) {
1536                                         disp_int &= ~LB_D1_VLINE_INTERRUPT;
1537                                         DRM_DEBUG("IH: D1 vline\n");
1538                                 }
1539                                 break;
1540                         default:
1541                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1542                                 break;
1543                         }
1544                         break;
1545                 case 2: /* D2 vblank/vline */
1546                         switch (src_data) {
1547                         case 0: /* D2 vblank */
1548                                 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
1549                                         drm_handle_vblank(rdev->ddev, 1);
1550                                         wake_up(&rdev->irq.vblank_queue);
1551                                         disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
1552                                         DRM_DEBUG("IH: D2 vblank\n");
1553                                 }
1554                                 break;
1555                         case 1: /* D2 vline */
1556                                 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
1557                                         disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
1558                                         DRM_DEBUG("IH: D2 vline\n");
1559                                 }
1560                                 break;
1561                         default:
1562                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1563                                 break;
1564                         }
1565                         break;
1566                 case 3: /* D3 vblank/vline */
1567                         switch (src_data) {
1568                         case 0: /* D3 vblank */
1569                                 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
1570                                         drm_handle_vblank(rdev->ddev, 2);
1571                                         wake_up(&rdev->irq.vblank_queue);
1572                                         disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
1573                                         DRM_DEBUG("IH: D3 vblank\n");
1574                                 }
1575                                 break;
1576                         case 1: /* D3 vline */
1577                                 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
1578                                         disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
1579                                         DRM_DEBUG("IH: D3 vline\n");
1580                                 }
1581                                 break;
1582                         default:
1583                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1584                                 break;
1585                         }
1586                         break;
1587                 case 4: /* D4 vblank/vline */
1588                         switch (src_data) {
1589                         case 0: /* D4 vblank */
1590                                 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
1591                                         drm_handle_vblank(rdev->ddev, 3);
1592                                         wake_up(&rdev->irq.vblank_queue);
1593                                         disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
1594                                         DRM_DEBUG("IH: D4 vblank\n");
1595                                 }
1596                                 break;
1597                         case 1: /* D4 vline */
1598                                 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
1599                                         disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
1600                                         DRM_DEBUG("IH: D4 vline\n");
1601                                 }
1602                                 break;
1603                         default:
1604                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1605                                 break;
1606                         }
1607                         break;
1608                 case 5: /* D5 vblank/vline */
1609                         switch (src_data) {
1610                         case 0: /* D5 vblank */
1611                                 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
1612                                         drm_handle_vblank(rdev->ddev, 4);
1613                                         wake_up(&rdev->irq.vblank_queue);
1614                                         disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
1615                                         DRM_DEBUG("IH: D5 vblank\n");
1616                                 }
1617                                 break;
1618                         case 1: /* D5 vline */
1619                                 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
1620                                         disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
1621                                         DRM_DEBUG("IH: D5 vline\n");
1622                                 }
1623                                 break;
1624                         default:
1625                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1626                                 break;
1627                         }
1628                         break;
1629                 case 6: /* D6 vblank/vline */
1630                         switch (src_data) {
1631                         case 0: /* D6 vblank */
1632                                 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
1633                                         drm_handle_vblank(rdev->ddev, 5);
1634                                         wake_up(&rdev->irq.vblank_queue);
1635                                         disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
1636                                         DRM_DEBUG("IH: D6 vblank\n");
1637                                 }
1638                                 break;
1639                         case 1: /* D6 vline */
1640                                 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
1641                                         disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
1642                                         DRM_DEBUG("IH: D6 vline\n");
1643                                 }
1644                                 break;
1645                         default:
1646                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1647                                 break;
1648                         }
1649                         break;
1650                 case 42: /* HPD hotplug */
1651                         switch (src_data) {
1652                         case 0:
1653                                 if (disp_int & DC_HPD1_INTERRUPT) {
1654                                         disp_int &= ~DC_HPD1_INTERRUPT;
1655                                         queue_hotplug = true;
1656                                         DRM_DEBUG("IH: HPD1\n");
1657                                 }
1658                                 break;
1659                         case 1:
1660                                 if (disp_int_cont & DC_HPD2_INTERRUPT) {
1661                                         disp_int_cont &= ~DC_HPD2_INTERRUPT;
1662                                         queue_hotplug = true;
1663                                         DRM_DEBUG("IH: HPD2\n");
1664                                 }
1665                                 break;
1666                         case 2:
1667                                 if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
1668                                         disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
1669                                         queue_hotplug = true;
1670                                         DRM_DEBUG("IH: HPD3\n");
1671                                 }
1672                                 break;
1673                         case 3:
1674                                 if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
1675                                         disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
1676                                         queue_hotplug = true;
1677                                         DRM_DEBUG("IH: HPD4\n");
1678                                 }
1679                                 break;
1680                         case 4:
1681                                 if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
1682                                         disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
1683                                         queue_hotplug = true;
1684                                         DRM_DEBUG("IH: HPD5\n");
1685                                 }
1686                                 break;
1687                         case 5:
1688                                 if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
1689                                         disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
1690                                         queue_hotplug = true;
1691                                         DRM_DEBUG("IH: HPD6\n");
1692                                 }
1693                                 break;
1694                         default:
1695                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1696                                 break;
1697                         }
1698                         break;
1699                 case 176: /* CP_INT in ring buffer */
1700                 case 177: /* CP_INT in IB1 */
1701                 case 178: /* CP_INT in IB2 */
1702                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
1703                         radeon_fence_process(rdev);
1704                         break;
1705                 case 181: /* CP EOP event */
1706                         DRM_DEBUG("IH: CP EOP\n");
1707                         break;
1708                 default:
1709                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1710                         break;
1711                 }
1712
1713                 /* wptr/rptr are in bytes! */
1714                 rptr += 16;
1715                 rptr &= rdev->ih.ptr_mask;
1716         }
1717         /* make sure wptr hasn't changed while processing */
1718         wptr = evergreen_get_ih_wptr(rdev);
1719         if (wptr != rdev->ih.wptr)
1720                 goto restart_ih;
1721         if (queue_hotplug)
1722                 queue_work(rdev->wq, &rdev->hotplug_work);
1723         rdev->ih.rptr = rptr;
1724         WREG32(IH_RB_RPTR, rdev->ih.rptr);
1725         spin_unlock_irqrestore(&rdev->ih.lock, flags);
1726         return IRQ_HANDLED;
1727 }
1728
1729 static int evergreen_startup(struct radeon_device *rdev)
1730 {
1731         int r;
1732
1733         if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
1734                 r = r600_init_microcode(rdev);
1735                 if (r) {
1736                         DRM_ERROR("Failed to load firmware!\n");
1737                         return r;
1738                 }
1739         }
1740
1741         evergreen_mc_program(rdev);
1742         if (rdev->flags & RADEON_IS_AGP) {
1743                 evergreen_agp_enable(rdev);
1744         } else {
1745                 r = evergreen_pcie_gart_enable(rdev);
1746                 if (r)
1747                         return r;
1748         }
1749         evergreen_gpu_init(rdev);
1750 #if 0
1751         if (!rdev->r600_blit.shader_obj) {
1752                 r = r600_blit_init(rdev);
1753                 if (r) {
1754                         DRM_ERROR("radeon: failed blitter (%d).\n", r);
1755                         return r;
1756                 }
1757         }
1758
1759         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1760         if (unlikely(r != 0))
1761                 return r;
1762         r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
1763                         &rdev->r600_blit.shader_gpu_addr);
1764         radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1765         if (r) {
1766                 DRM_ERROR("failed to pin blit object %d\n", r);
1767                 return r;
1768         }
1769 #endif
1770
1771         /* Enable IRQ */
1772         r = r600_irq_init(rdev);
1773         if (r) {
1774                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
1775                 radeon_irq_kms_fini(rdev);
1776                 return r;
1777         }
1778         evergreen_irq_set(rdev);
1779
1780         r = radeon_ring_init(rdev, rdev->cp.ring_size);
1781         if (r)
1782                 return r;
1783         r = evergreen_cp_load_microcode(rdev);
1784         if (r)
1785                 return r;
1786         r = evergreen_cp_resume(rdev);
1787         if (r)
1788                 return r;
1789         /* write back buffer are not vital so don't worry about failure */
1790         r600_wb_enable(rdev);
1791
1792         return 0;
1793 }
1794
1795 int evergreen_resume(struct radeon_device *rdev)
1796 {
1797         int r;
1798
1799         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
1800          * posting will perform necessary task to bring back GPU into good
1801          * shape.
1802          */
1803         /* post card */
1804         atom_asic_init(rdev->mode_info.atom_context);
1805         /* Initialize clocks */
1806         r = radeon_clocks_init(rdev);
1807         if (r) {
1808                 return r;
1809         }
1810
1811         r = evergreen_startup(rdev);
1812         if (r) {
1813                 DRM_ERROR("r600 startup failed on resume\n");
1814                 return r;
1815         }
1816
1817         r = r600_ib_test(rdev);
1818         if (r) {
1819                 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
1820                 return r;
1821         }
1822
1823         return r;
1824
1825 }
1826
1827 int evergreen_suspend(struct radeon_device *rdev)
1828 {
1829 #if 0
1830         int r;
1831 #endif
1832         /* FIXME: we should wait for ring to be empty */
1833         r700_cp_stop(rdev);
1834         rdev->cp.ready = false;
1835         evergreen_irq_suspend(rdev);
1836         r600_wb_disable(rdev);
1837         evergreen_pcie_gart_disable(rdev);
1838 #if 0
1839         /* unpin shaders bo */
1840         r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1841         if (likely(r == 0)) {
1842                 radeon_bo_unpin(rdev->r600_blit.shader_obj);
1843                 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1844         }
1845 #endif
1846         return 0;
1847 }
1848
1849 static bool evergreen_card_posted(struct radeon_device *rdev)
1850 {
1851         u32 reg;
1852
1853         /* first check CRTCs */
1854         reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
1855                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
1856                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
1857                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
1858                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
1859                 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
1860         if (reg & EVERGREEN_CRTC_MASTER_EN)
1861                 return true;
1862
1863         /* then check MEM_SIZE, in case the crtcs are off */
1864         if (RREG32(CONFIG_MEMSIZE))
1865                 return true;
1866
1867         return false;
1868 }
1869
1870 /* Plan is to move initialization in that function and use
1871  * helper function so that radeon_device_init pretty much
1872  * do nothing more than calling asic specific function. This
1873  * should also allow to remove a bunch of callback function
1874  * like vram_info.
1875  */
1876 int evergreen_init(struct radeon_device *rdev)
1877 {
1878         int r;
1879
1880         r = radeon_dummy_page_init(rdev);
1881         if (r)
1882                 return r;
1883         /* This don't do much */
1884         r = radeon_gem_init(rdev);
1885         if (r)
1886                 return r;
1887         /* Read BIOS */
1888         if (!radeon_get_bios(rdev)) {
1889                 if (ASIC_IS_AVIVO(rdev))
1890                         return -EINVAL;
1891         }
1892         /* Must be an ATOMBIOS */
1893         if (!rdev->is_atom_bios) {
1894                 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
1895                 return -EINVAL;
1896         }
1897         r = radeon_atombios_init(rdev);
1898         if (r)
1899                 return r;
1900         /* Post card if necessary */
1901         if (!evergreen_card_posted(rdev)) {
1902                 if (!rdev->bios) {
1903                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
1904                         return -EINVAL;
1905                 }
1906                 DRM_INFO("GPU not posted. posting now...\n");
1907                 atom_asic_init(rdev->mode_info.atom_context);
1908         }
1909         /* Initialize scratch registers */
1910         r600_scratch_init(rdev);
1911         /* Initialize surface registers */
1912         radeon_surface_init(rdev);
1913         /* Initialize clocks */
1914         radeon_get_clock_info(rdev->ddev);
1915         r = radeon_clocks_init(rdev);
1916         if (r)
1917                 return r;
1918         /* Initialize power management */
1919         radeon_pm_init(rdev);
1920         /* Fence driver */
1921         r = radeon_fence_driver_init(rdev);
1922         if (r)
1923                 return r;
1924         /* initialize AGP */
1925         if (rdev->flags & RADEON_IS_AGP) {
1926                 r = radeon_agp_init(rdev);
1927                 if (r)
1928                         radeon_agp_disable(rdev);
1929         }
1930         /* initialize memory controller */
1931         r = evergreen_mc_init(rdev);
1932         if (r)
1933                 return r;
1934         /* Memory manager */
1935         r = radeon_bo_init(rdev);
1936         if (r)
1937                 return r;
1938
1939         r = radeon_irq_kms_init(rdev);
1940         if (r)
1941                 return r;
1942
1943         rdev->cp.ring_obj = NULL;
1944         r600_ring_init(rdev, 1024 * 1024);
1945
1946         rdev->ih.ring_obj = NULL;
1947         r600_ih_ring_init(rdev, 64 * 1024);
1948
1949         r = r600_pcie_gart_init(rdev);
1950         if (r)
1951                 return r;
1952
1953         rdev->accel_working = false;
1954         r = evergreen_startup(rdev);
1955         if (r) {
1956                 dev_err(rdev->dev, "disabling GPU acceleration\n");
1957                 r700_cp_fini(rdev);
1958                 r600_wb_fini(rdev);
1959                 r600_irq_fini(rdev);
1960                 radeon_irq_kms_fini(rdev);
1961                 evergreen_pcie_gart_fini(rdev);
1962                 rdev->accel_working = false;
1963         }
1964         if (rdev->accel_working) {
1965                 r = radeon_ib_pool_init(rdev);
1966                 if (r) {
1967                         DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
1968                         rdev->accel_working = false;
1969                 }
1970                 r = r600_ib_test(rdev);
1971                 if (r) {
1972                         DRM_ERROR("radeon: failed testing IB (%d).\n", r);
1973                         rdev->accel_working = false;
1974                 }
1975         }
1976         return 0;
1977 }
1978
1979 void evergreen_fini(struct radeon_device *rdev)
1980 {
1981         radeon_pm_fini(rdev);
1982         /*r600_blit_fini(rdev);*/
1983         r700_cp_fini(rdev);
1984         r600_wb_fini(rdev);
1985         r600_irq_fini(rdev);
1986         radeon_irq_kms_fini(rdev);
1987         evergreen_pcie_gart_fini(rdev);
1988         radeon_gem_fini(rdev);
1989         radeon_fence_driver_fini(rdev);
1990         radeon_clocks_fini(rdev);
1991         radeon_agp_fini(rdev);
1992         radeon_bo_fini(rdev);
1993         radeon_atombios_fini(rdev);
1994         kfree(rdev->bios);
1995         rdev->bios = NULL;
1996         radeon_dummy_page_fini(rdev);
1997 }