radeon_asic.h 20.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33
/*
 * Copyright 2008 Advanced Micro Devices, Inc.
 * Copyright 2008 Red Hat Inc.
 * Copyright 2009 Jerome Glisse.
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 * OTHER DEALINGS IN THE SOFTWARE.
 *
 * Authors: Dave Airlie
 *          Alex Deucher
 *          Jerome Glisse
 */
#ifndef __RADEON_ASIC_H__
#define __RADEON_ASIC_H__

/*
 * common functions
 */
34
uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
35 36 37
void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);

38
uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
39
void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
40
uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
41 42 43 44 45 46
void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);

/*
 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
 */
47 48 49 50
extern int r100_init(struct radeon_device *rdev);
extern void r100_fini(struct radeon_device *rdev);
extern int r100_suspend(struct radeon_device *rdev);
extern int r100_resume(struct radeon_device *rdev);
51 52
uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
53
void r100_vga_set_state(struct radeon_device *rdev, bool state);
54
int r100_gpu_reset(struct radeon_device *rdev);
55
u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
56 57
void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
58
void r100_cp_commit(struct radeon_device *rdev);
59 60 61 62 63 64 65 66 67 68 69 70 71
void r100_ring_start(struct radeon_device *rdev);
int r100_irq_set(struct radeon_device *rdev);
int r100_irq_process(struct radeon_device *rdev);
void r100_fence_ring_emit(struct radeon_device *rdev,
			  struct radeon_fence *fence);
int r100_cs_parse(struct radeon_cs_parser *p);
void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
int r100_copy_blit(struct radeon_device *rdev,
		   uint64_t src_offset,
		   uint64_t dst_offset,
		   unsigned num_pages,
		   struct radeon_fence *fence);
72 73 74 75
int r100_set_surface_reg(struct radeon_device *rdev, int reg,
			 uint32_t tiling_flags, uint32_t pitch,
			 uint32_t offset, uint32_t obj_size);
int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
76
void r100_bandwidth_update(struct radeon_device *rdev);
77 78
void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
int r100_ring_test(struct radeon_device *rdev);
79
void r100_hdp_flush(struct radeon_device *rdev);
80 81

static struct radeon_asic r100_asic = {
82
	.init = &r100_init,
83 84 85
	.fini = &r100_fini,
	.suspend = &r100_suspend,
	.resume = &r100_resume,
86
	.vga_set_state = &r100_vga_set_state,
87 88 89
	.gpu_reset = &r100_gpu_reset,
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
	.gart_set_page = &r100_pci_gart_set_page,
90
	.cp_commit = &r100_cp_commit,
91
	.ring_start = &r100_ring_start,
92 93
	.ring_test = &r100_ring_test,
	.ring_ib_execute = &r100_ring_ib_execute,
94 95
	.irq_set = &r100_irq_set,
	.irq_process = &r100_irq_process,
96
	.get_vblank_counter = &r100_get_vblank_counter,
97 98 99 100 101
	.fence_ring_emit = &r100_fence_ring_emit,
	.cs_parse = &r100_cs_parse,
	.copy_blit = &r100_copy_blit,
	.copy_dma = NULL,
	.copy = &r100_copy_blit,
102
	.get_engine_clock = &radeon_legacy_get_engine_clock,
103
	.set_engine_clock = &radeon_legacy_set_engine_clock,
104
	.get_memory_clock = NULL,
105 106 107
	.set_memory_clock = NULL,
	.set_pcie_lanes = NULL,
	.set_clock_gating = &radeon_legacy_set_clock_gating,
108 109
	.set_surface_reg = r100_set_surface_reg,
	.clear_surface_reg = r100_clear_surface_reg,
110
	.bandwidth_update = &r100_bandwidth_update,
111
	.hdp_flush = &r100_hdp_flush,
112 113 114 115 116 117
};


/*
 * r300,r350,rv350,rv380
 */
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136
extern int r300_init(struct radeon_device *rdev);
extern void r300_fini(struct radeon_device *rdev);
extern int r300_suspend(struct radeon_device *rdev);
extern int r300_resume(struct radeon_device *rdev);
extern int r300_gpu_reset(struct radeon_device *rdev);
extern void r300_ring_start(struct radeon_device *rdev);
extern void r300_fence_ring_emit(struct radeon_device *rdev,
				struct radeon_fence *fence);
extern int r300_cs_parse(struct radeon_cs_parser *p);
extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
extern int r300_copy_dma(struct radeon_device *rdev,
			uint64_t src_offset,
			uint64_t dst_offset,
			unsigned num_pages,
			struct radeon_fence *fence);
137
static struct radeon_asic r300_asic = {
138
	.init = &r300_init,
139 140 141
	.fini = &r300_fini,
	.suspend = &r300_suspend,
	.resume = &r300_resume,
142
	.vga_set_state = &r100_vga_set_state,
143 144 145
	.gpu_reset = &r300_gpu_reset,
	.gart_tlb_flush = &r100_pci_gart_tlb_flush,
	.gart_set_page = &r100_pci_gart_set_page,
146
	.cp_commit = &r100_cp_commit,
147
	.ring_start = &r300_ring_start,
148 149
	.ring_test = &r100_ring_test,
	.ring_ib_execute = &r100_ring_ib_execute,
150 151
	.irq_set = &r100_irq_set,
	.irq_process = &r100_irq_process,
152
	.get_vblank_counter = &r100_get_vblank_counter,
153 154 155 156 157
	.fence_ring_emit = &r300_fence_ring_emit,
	.cs_parse = &r300_cs_parse,
	.copy_blit = &r100_copy_blit,
	.copy_dma = &r300_copy_dma,
	.copy = &r100_copy_blit,
158
	.get_engine_clock = &radeon_legacy_get_engine_clock,
159
	.set_engine_clock = &radeon_legacy_set_engine_clock,
160
	.get_memory_clock = NULL,
161 162 163
	.set_memory_clock = NULL,
	.set_pcie_lanes = &rv370_set_pcie_lanes,
	.set_clock_gating = &radeon_legacy_set_clock_gating,
164 165
	.set_surface_reg = r100_set_surface_reg,
	.clear_surface_reg = r100_clear_surface_reg,
166
	.bandwidth_update = &r100_bandwidth_update,
167
	.hdp_flush = &r100_hdp_flush,
168 169 170 171 172
};

/*
 * r420,r423,rv410
 */
173 174 175 176
extern int r420_init(struct radeon_device *rdev);
extern void r420_fini(struct radeon_device *rdev);
extern int r420_suspend(struct radeon_device *rdev);
extern int r420_resume(struct radeon_device *rdev);
177
static struct radeon_asic r420_asic = {
178 179 180 181
	.init = &r420_init,
	.fini = &r420_fini,
	.suspend = &r420_suspend,
	.resume = &r420_resume,
182
	.vga_set_state = &r100_vga_set_state,
183 184 185
	.gpu_reset = &r300_gpu_reset,
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
	.gart_set_page = &rv370_pcie_gart_set_page,
186
	.cp_commit = &r100_cp_commit,
187
	.ring_start = &r300_ring_start,
188 189
	.ring_test = &r100_ring_test,
	.ring_ib_execute = &r100_ring_ib_execute,
190 191
	.irq_set = &r100_irq_set,
	.irq_process = &r100_irq_process,
192
	.get_vblank_counter = &r100_get_vblank_counter,
193 194 195 196 197
	.fence_ring_emit = &r300_fence_ring_emit,
	.cs_parse = &r300_cs_parse,
	.copy_blit = &r100_copy_blit,
	.copy_dma = &r300_copy_dma,
	.copy = &r100_copy_blit,
198
	.get_engine_clock = &radeon_atom_get_engine_clock,
199
	.set_engine_clock = &radeon_atom_set_engine_clock,
200
	.get_memory_clock = &radeon_atom_get_memory_clock,
201 202 203
	.set_memory_clock = &radeon_atom_set_memory_clock,
	.set_pcie_lanes = &rv370_set_pcie_lanes,
	.set_clock_gating = &radeon_atom_set_clock_gating,
204 205
	.set_surface_reg = r100_set_surface_reg,
	.clear_surface_reg = r100_clear_surface_reg,
206
	.bandwidth_update = &r100_bandwidth_update,
207
	.hdp_flush = &r100_hdp_flush,
208 209 210 211 212 213
};


/*
 * rs400,rs480
 */
214 215 216 217
extern int rs400_init(struct radeon_device *rdev);
extern void rs400_fini(struct radeon_device *rdev);
extern int rs400_suspend(struct radeon_device *rdev);
extern int rs400_resume(struct radeon_device *rdev);
218 219 220 221 222
void rs400_gart_tlb_flush(struct radeon_device *rdev);
int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
static struct radeon_asic rs400_asic = {
223 224 225 226
	.init = &rs400_init,
	.fini = &rs400_fini,
	.suspend = &rs400_suspend,
	.resume = &rs400_resume,
227
	.vga_set_state = &r100_vga_set_state,
228 229 230
	.gpu_reset = &r300_gpu_reset,
	.gart_tlb_flush = &rs400_gart_tlb_flush,
	.gart_set_page = &rs400_gart_set_page,
231
	.cp_commit = &r100_cp_commit,
232
	.ring_start = &r300_ring_start,
233 234
	.ring_test = &r100_ring_test,
	.ring_ib_execute = &r100_ring_ib_execute,
235 236
	.irq_set = &r100_irq_set,
	.irq_process = &r100_irq_process,
237
	.get_vblank_counter = &r100_get_vblank_counter,
238 239 240 241 242
	.fence_ring_emit = &r300_fence_ring_emit,
	.cs_parse = &r300_cs_parse,
	.copy_blit = &r100_copy_blit,
	.copy_dma = &r300_copy_dma,
	.copy = &r100_copy_blit,
243
	.get_engine_clock = &radeon_legacy_get_engine_clock,
244
	.set_engine_clock = &radeon_legacy_set_engine_clock,
245
	.get_memory_clock = NULL,
246 247 248
	.set_memory_clock = NULL,
	.set_pcie_lanes = NULL,
	.set_clock_gating = &radeon_legacy_set_clock_gating,
249 250
	.set_surface_reg = r100_set_surface_reg,
	.clear_surface_reg = r100_clear_surface_reg,
251
	.bandwidth_update = &r100_bandwidth_update,
252
	.hdp_flush = &r100_hdp_flush,
253 254 255 256 257 258
};


/*
 * rs600.
 */
259 260 261 262
extern int rs600_init(struct radeon_device *rdev);
extern void rs600_fini(struct radeon_device *rdev);
extern int rs600_suspend(struct radeon_device *rdev);
extern int rs600_resume(struct radeon_device *rdev);
263
int rs600_irq_set(struct radeon_device *rdev);
264 265
int rs600_irq_process(struct radeon_device *rdev);
u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
266 267 268 269
void rs600_gart_tlb_flush(struct radeon_device *rdev);
int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
270
void rs600_bandwidth_update(struct radeon_device *rdev);
271
static struct radeon_asic rs600_asic = {
272
	.init = &rs600_init,
273 274 275
	.fini = &rs600_fini,
	.suspend = &rs600_suspend,
	.resume = &rs600_resume,
276
	.vga_set_state = &r100_vga_set_state,
277 278 279
	.gpu_reset = &r300_gpu_reset,
	.gart_tlb_flush = &rs600_gart_tlb_flush,
	.gart_set_page = &rs600_gart_set_page,
280
	.cp_commit = &r100_cp_commit,
281
	.ring_start = &r300_ring_start,
282 283
	.ring_test = &r100_ring_test,
	.ring_ib_execute = &r100_ring_ib_execute,
284
	.irq_set = &rs600_irq_set,
285 286
	.irq_process = &rs600_irq_process,
	.get_vblank_counter = &rs600_get_vblank_counter,
287 288 289 290 291
	.fence_ring_emit = &r300_fence_ring_emit,
	.cs_parse = &r300_cs_parse,
	.copy_blit = &r100_copy_blit,
	.copy_dma = &r300_copy_dma,
	.copy = &r100_copy_blit,
292
	.get_engine_clock = &radeon_atom_get_engine_clock,
293
	.set_engine_clock = &radeon_atom_set_engine_clock,
294
	.get_memory_clock = &radeon_atom_get_memory_clock,
295 296 297
	.set_memory_clock = &radeon_atom_set_memory_clock,
	.set_pcie_lanes = NULL,
	.set_clock_gating = &radeon_atom_set_clock_gating,
298
	.bandwidth_update = &rs600_bandwidth_update,
299
	.hdp_flush = &r100_hdp_flush,
300 301 302 303 304 305
};


/*
 * rs690,rs740
 */
306 307 308 309
int rs690_init(struct radeon_device *rdev);
void rs690_fini(struct radeon_device *rdev);
int rs690_resume(struct radeon_device *rdev);
int rs690_suspend(struct radeon_device *rdev);
310 311
uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
312
void rs690_bandwidth_update(struct radeon_device *rdev);
313
static struct radeon_asic rs690_asic = {
314 315 316 317
	.init = &rs690_init,
	.fini = &rs690_fini,
	.suspend = &rs690_suspend,
	.resume = &rs690_resume,
318
	.vga_set_state = &r100_vga_set_state,
319 320 321
	.gpu_reset = &r300_gpu_reset,
	.gart_tlb_flush = &rs400_gart_tlb_flush,
	.gart_set_page = &rs400_gart_set_page,
322
	.cp_commit = &r100_cp_commit,
323
	.ring_start = &r300_ring_start,
324 325
	.ring_test = &r100_ring_test,
	.ring_ib_execute = &r100_ring_ib_execute,
326
	.irq_set = &rs600_irq_set,
327 328
	.irq_process = &rs600_irq_process,
	.get_vblank_counter = &rs600_get_vblank_counter,
329 330 331 332 333
	.fence_ring_emit = &r300_fence_ring_emit,
	.cs_parse = &r300_cs_parse,
	.copy_blit = &r100_copy_blit,
	.copy_dma = &r300_copy_dma,
	.copy = &r300_copy_dma,
334
	.get_engine_clock = &radeon_atom_get_engine_clock,
335
	.set_engine_clock = &radeon_atom_set_engine_clock,
336
	.get_memory_clock = &radeon_atom_get_memory_clock,
337 338 339
	.set_memory_clock = &radeon_atom_set_memory_clock,
	.set_pcie_lanes = NULL,
	.set_clock_gating = &radeon_atom_set_clock_gating,
340 341
	.set_surface_reg = r100_set_surface_reg,
	.clear_surface_reg = r100_clear_surface_reg,
342
	.bandwidth_update = &rs690_bandwidth_update,
343
	.hdp_flush = &r100_hdp_flush,
344 345 346 347 348 349
};


/*
 * rv515
 */
350
int rv515_init(struct radeon_device *rdev);
351
void rv515_fini(struct radeon_device *rdev);
352 353 354 355 356 357
int rv515_gpu_reset(struct radeon_device *rdev);
uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
void rv515_ring_start(struct radeon_device *rdev);
uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
358
void rv515_bandwidth_update(struct radeon_device *rdev);
359 360
int rv515_resume(struct radeon_device *rdev);
int rv515_suspend(struct radeon_device *rdev);
361
static struct radeon_asic rv515_asic = {
362
	.init = &rv515_init,
363 364 365
	.fini = &rv515_fini,
	.suspend = &rv515_suspend,
	.resume = &rv515_resume,
366
	.vga_set_state = &r100_vga_set_state,
367 368 369
	.gpu_reset = &rv515_gpu_reset,
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
	.gart_set_page = &rv370_pcie_gart_set_page,
370
	.cp_commit = &r100_cp_commit,
371
	.ring_start = &rv515_ring_start,
372 373
	.ring_test = &r100_ring_test,
	.ring_ib_execute = &r100_ring_ib_execute,
374 375 376
	.irq_set = &rs600_irq_set,
	.irq_process = &rs600_irq_process,
	.get_vblank_counter = &rs600_get_vblank_counter,
377
	.fence_ring_emit = &r300_fence_ring_emit,
378
	.cs_parse = &r300_cs_parse,
379 380 381
	.copy_blit = &r100_copy_blit,
	.copy_dma = &r300_copy_dma,
	.copy = &r100_copy_blit,
382
	.get_engine_clock = &radeon_atom_get_engine_clock,
383
	.set_engine_clock = &radeon_atom_set_engine_clock,
384
	.get_memory_clock = &radeon_atom_get_memory_clock,
385 386 387
	.set_memory_clock = &radeon_atom_set_memory_clock,
	.set_pcie_lanes = &rv370_set_pcie_lanes,
	.set_clock_gating = &radeon_atom_set_clock_gating,
388 389
	.set_surface_reg = r100_set_surface_reg,
	.clear_surface_reg = r100_clear_surface_reg,
390
	.bandwidth_update = &rv515_bandwidth_update,
391
	.hdp_flush = &r100_hdp_flush,
392 393 394 395 396 397
};


/*
 * r520,rv530,rv560,rv570,r580
 */
398
int r520_init(struct radeon_device *rdev);
399
int r520_resume(struct radeon_device *rdev);
400
static struct radeon_asic r520_asic = {
401
	.init = &r520_init,
402 403 404
	.fini = &rv515_fini,
	.suspend = &rv515_suspend,
	.resume = &r520_resume,
405
	.vga_set_state = &r100_vga_set_state,
406 407 408
	.gpu_reset = &rv515_gpu_reset,
	.gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
	.gart_set_page = &rv370_pcie_gart_set_page,
409
	.cp_commit = &r100_cp_commit,
410
	.ring_start = &rv515_ring_start,
411 412
	.ring_test = &r100_ring_test,
	.ring_ib_execute = &r100_ring_ib_execute,
413 414 415
	.irq_set = &rs600_irq_set,
	.irq_process = &rs600_irq_process,
	.get_vblank_counter = &rs600_get_vblank_counter,
416
	.fence_ring_emit = &r300_fence_ring_emit,
417
	.cs_parse = &r300_cs_parse,
418 419 420
	.copy_blit = &r100_copy_blit,
	.copy_dma = &r300_copy_dma,
	.copy = &r100_copy_blit,
421
	.get_engine_clock = &radeon_atom_get_engine_clock,
422
	.set_engine_clock = &radeon_atom_set_engine_clock,
423
	.get_memory_clock = &radeon_atom_get_memory_clock,
424 425 426
	.set_memory_clock = &radeon_atom_set_memory_clock,
	.set_pcie_lanes = &rv370_set_pcie_lanes,
	.set_clock_gating = &radeon_atom_set_clock_gating,
427 428
	.set_surface_reg = r100_set_surface_reg,
	.clear_surface_reg = r100_clear_surface_reg,
429
	.bandwidth_update = &rv515_bandwidth_update,
430
	.hdp_flush = &r100_hdp_flush,
431 432 433
};

/*
434
 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
435
 */
436 437 438 439
int r600_init(struct radeon_device *rdev);
void r600_fini(struct radeon_device *rdev);
int r600_suspend(struct radeon_device *rdev);
int r600_resume(struct radeon_device *rdev);
440
void r600_vga_set_state(struct radeon_device *rdev, bool state);
441 442 443 444
int r600_wb_init(struct radeon_device *rdev);
void r600_wb_fini(struct radeon_device *rdev);
void r600_cp_commit(struct radeon_device *rdev);
void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
445 446
uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466
int r600_cs_parse(struct radeon_cs_parser *p);
void r600_fence_ring_emit(struct radeon_device *rdev,
			  struct radeon_fence *fence);
int r600_copy_dma(struct radeon_device *rdev,
		  uint64_t src_offset,
		  uint64_t dst_offset,
		  unsigned num_pages,
		  struct radeon_fence *fence);
int r600_irq_process(struct radeon_device *rdev);
int r600_irq_set(struct radeon_device *rdev);
int r600_gpu_reset(struct radeon_device *rdev);
int r600_set_surface_reg(struct radeon_device *rdev, int reg,
			 uint32_t tiling_flags, uint32_t pitch,
			 uint32_t offset, uint32_t obj_size);
int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
int r600_ring_test(struct radeon_device *rdev);
int r600_copy_blit(struct radeon_device *rdev,
		   uint64_t src_offset, uint64_t dst_offset,
		   unsigned num_pages, struct radeon_fence *fence);
467
void r600_hdp_flush(struct radeon_device *rdev);
468 469 470 471 472 473 474

static struct radeon_asic r600_asic = {
	.init = &r600_init,
	.fini = &r600_fini,
	.suspend = &r600_suspend,
	.resume = &r600_resume,
	.cp_commit = &r600_cp_commit,
475
	.vga_set_state = &r600_vga_set_state,
476 477 478 479 480 481 482
	.gpu_reset = &r600_gpu_reset,
	.gart_tlb_flush = &r600_pcie_gart_tlb_flush,
	.gart_set_page = &rs600_gart_set_page,
	.ring_test = &r600_ring_test,
	.ring_ib_execute = &r600_ring_ib_execute,
	.irq_set = &r600_irq_set,
	.irq_process = &r600_irq_process,
483
	.get_vblank_counter = &rs600_get_vblank_counter,
484 485 486 487
	.fence_ring_emit = &r600_fence_ring_emit,
	.cs_parse = &r600_cs_parse,
	.copy_blit = &r600_copy_blit,
	.copy_dma = &r600_copy_blit,
488
	.copy = &r600_copy_blit,
489
	.get_engine_clock = &radeon_atom_get_engine_clock,
490
	.set_engine_clock = &radeon_atom_set_engine_clock,
491
	.get_memory_clock = &radeon_atom_get_memory_clock,
492 493 494 495 496
	.set_memory_clock = &radeon_atom_set_memory_clock,
	.set_pcie_lanes = NULL,
	.set_clock_gating = &radeon_atom_set_clock_gating,
	.set_surface_reg = r600_set_surface_reg,
	.clear_surface_reg = r600_clear_surface_reg,
497
	.bandwidth_update = &rv515_bandwidth_update,
498
	.hdp_flush = &r600_hdp_flush,
499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516
};

/*
 * rv770,rv730,rv710,rv740
 */
int rv770_init(struct radeon_device *rdev);
void rv770_fini(struct radeon_device *rdev);
int rv770_suspend(struct radeon_device *rdev);
int rv770_resume(struct radeon_device *rdev);
int rv770_gpu_reset(struct radeon_device *rdev);

static struct radeon_asic rv770_asic = {
	.init = &rv770_init,
	.fini = &rv770_fini,
	.suspend = &rv770_suspend,
	.resume = &rv770_resume,
	.cp_commit = &r600_cp_commit,
	.gpu_reset = &rv770_gpu_reset,
517
	.vga_set_state = &r600_vga_set_state,
518 519 520 521 522 523
	.gart_tlb_flush = &r600_pcie_gart_tlb_flush,
	.gart_set_page = &rs600_gart_set_page,
	.ring_test = &r600_ring_test,
	.ring_ib_execute = &r600_ring_ib_execute,
	.irq_set = &r600_irq_set,
	.irq_process = &r600_irq_process,
524
	.get_vblank_counter = &rs600_get_vblank_counter,
525 526 527 528
	.fence_ring_emit = &r600_fence_ring_emit,
	.cs_parse = &r600_cs_parse,
	.copy_blit = &r600_copy_blit,
	.copy_dma = &r600_copy_blit,
529
	.copy = &r600_copy_blit,
530
	.get_engine_clock = &radeon_atom_get_engine_clock,
531
	.set_engine_clock = &radeon_atom_set_engine_clock,
532
	.get_memory_clock = &radeon_atom_get_memory_clock,
533 534 535 536 537
	.set_memory_clock = &radeon_atom_set_memory_clock,
	.set_pcie_lanes = NULL,
	.set_clock_gating = &radeon_atom_set_clock_gating,
	.set_surface_reg = r600_set_surface_reg,
	.clear_surface_reg = r600_clear_surface_reg,
538
	.bandwidth_update = &rv515_bandwidth_update,
539
	.hdp_flush = &r600_hdp_flush,
540
};
541 542

#endif