dma.h 8.2 KB
Newer Older
B
Bryan Wu 已提交
1
/*
2
 * dma.h - Blackfin DMA defines/structures/etc...
B
Bryan Wu 已提交
3
 *
4 5
 * Copyright 2004-2008 Analog Devices Inc.
 * Licensed under the GPL-2 or later.
B
Bryan Wu 已提交
6 7 8 9 10 11
 */

#ifndef _BLACKFIN_DMA_H_
#define _BLACKFIN_DMA_H_

#include <linux/interrupt.h>
12
#include <mach/dma.h>
13
#include <asm/atomic.h>
B
Bryan Wu 已提交
14
#include <asm/blackfin.h>
15
#include <asm/page.h>
B
Bryan Wu 已提交
16 17 18

#define MAX_DMA_ADDRESS PAGE_OFFSET

M
Mike Frysinger 已提交
19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51
/* DMA_CONFIG Masks */
#define DMAEN			0x0001	/* DMA Channel Enable */
#define WNR				0x0002	/* Channel Direction (W/R*) */
#define WDSIZE_8		0x0000	/* Transfer Word Size = 8 */
#define WDSIZE_16		0x0004	/* Transfer Word Size = 16 */
#define WDSIZE_32		0x0008	/* Transfer Word Size = 32 */
#define DMA2D			0x0010	/* DMA Mode (2D/1D*) */
#define RESTART			0x0020	/* DMA Buffer Clear */
#define DI_SEL			0x0040	/* Data Interrupt Timing Select */
#define DI_EN			0x0080	/* Data Interrupt Enable */
#define NDSIZE_0		0x0000	/* Next Descriptor Size = 0 (Stop/Autobuffer) */
#define NDSIZE_1		0x0100	/* Next Descriptor Size = 1 */
#define NDSIZE_2		0x0200	/* Next Descriptor Size = 2 */
#define NDSIZE_3		0x0300	/* Next Descriptor Size = 3 */
#define NDSIZE_4		0x0400	/* Next Descriptor Size = 4 */
#define NDSIZE_5		0x0500	/* Next Descriptor Size = 5 */
#define NDSIZE_6		0x0600	/* Next Descriptor Size = 6 */
#define NDSIZE_7		0x0700	/* Next Descriptor Size = 7 */
#define NDSIZE_8		0x0800	/* Next Descriptor Size = 8 */
#define NDSIZE_9		0x0900	/* Next Descriptor Size = 9 */
#define NDSIZE			0x0f00	/* Next Descriptor Size */
#define DMAFLOW			0x7000	/* Flow Control */
#define DMAFLOW_STOP	0x0000	/* Stop Mode */
#define DMAFLOW_AUTO	0x1000	/* Autobuffer Mode */
#define DMAFLOW_ARRAY	0x4000	/* Descriptor Array Mode */
#define DMAFLOW_SMALL	0x6000	/* Small Model Descriptor List Mode */
#define DMAFLOW_LARGE	0x7000	/* Large Model Descriptor List Mode */

/* DMA_IRQ_STATUS Masks */
#define DMA_DONE		0x0001	/* DMA Completion Interrupt Status */
#define DMA_ERR			0x0002	/* DMA Error Interrupt Status */
#define DFETCH			0x0004	/* DMA Descriptor Fetch Indicator */
#define DMA_RUN			0x0008	/* DMA Channel Running Indicator */
B
Bryan Wu 已提交
52 53 54 55

/*-------------------------
 * config reg bits value
 *-------------------------*/
M
Mike Frysinger 已提交
56 57 58
#define DATA_SIZE_8			0
#define DATA_SIZE_16		1
#define DATA_SIZE_32		2
B
Bryan Wu 已提交
59

M
Mike Frysinger 已提交
60 61 62 63 64
#define DMA_FLOW_STOP		0
#define DMA_FLOW_AUTO		1
#define DMA_FLOW_ARRAY		4
#define DMA_FLOW_SMALL		6
#define DMA_FLOW_LARGE		7
B
Bryan Wu 已提交
65

M
Mike Frysinger 已提交
66 67
#define DIMENSION_LINEAR	0
#define DIMENSION_2D		1
B
Bryan Wu 已提交
68

M
Mike Frysinger 已提交
69 70
#define DIR_READ			0
#define DIR_WRITE			1
B
Bryan Wu 已提交
71

M
Mike Frysinger 已提交
72 73 74
#define INTR_DISABLE		0
#define INTR_ON_BUF			2
#define INTR_ON_ROW			3
B
Bryan Wu 已提交
75

76
#define DMA_NOSYNC_KEEP_DMA_BUF	0
M
Mike Frysinger 已提交
77
#define DMA_SYNC_RESTART		1
78

B
Bryan Wu 已提交
79
struct dmasg {
80
	void *next_desc_addr;
B
Bryan Wu 已提交
81 82 83 84 85 86 87 88 89
	unsigned long start_addr;
	unsigned short cfg;
	unsigned short x_count;
	short x_modify;
	unsigned short y_count;
	short y_modify;
} __attribute__((packed));

struct dma_register {
90
	void *next_desc_ptr;	/* DMA Next Descriptor Pointer register */
B
Bryan Wu 已提交
91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
	unsigned long start_addr;	/* DMA Start address  register */

	unsigned short cfg;	/* DMA Configuration register */
	unsigned short dummy1;	/* DMA Configuration register */

	unsigned long reserved;

	unsigned short x_count;	/* DMA x_count register */
	unsigned short dummy2;

	short x_modify;	/* DMA x_modify register */
	unsigned short dummy3;

	unsigned short y_count;	/* DMA y_count register */
	unsigned short dummy4;

	short y_modify;	/* DMA y_modify register */
	unsigned short dummy5;

110
	void *curr_desc_ptr;	/* DMA Current Descriptor Pointer
B
Bryan Wu 已提交
111
					   register */
112
	unsigned long curr_addr_ptr;	/* DMA Current Address Pointer
B
Bryan Wu 已提交
113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132
						   register */
	unsigned short irq_status;	/* DMA irq status register */
	unsigned short dummy6;

	unsigned short peripheral_map;	/* DMA peripheral map register */
	unsigned short dummy7;

	unsigned short curr_x_count;	/* DMA Current x-count register */
	unsigned short dummy8;

	unsigned long reserved2;

	unsigned short curr_y_count;	/* DMA Current y-count register */
	unsigned short dummy9;

	unsigned long reserved3;

};

struct dma_channel {
133
	const char *device_id;
134
	atomic_t chan_status;
135
	volatile struct dma_register *regs;
B
Bryan Wu 已提交
136
	struct dmasg *sg;		/* large mode descriptor */
137
	unsigned int irq;
B
Bryan Wu 已提交
138
	void *data;
139 140 141
#ifdef CONFIG_PM
	unsigned short saved_peripheral_map;
#endif
B
Bryan Wu 已提交
142 143
};

144 145 146 147 148
#ifdef CONFIG_PM
int blackfin_dma_suspend(void);
void blackfin_dma_resume(void);
#endif

B
Bryan Wu 已提交
149 150 151
/*******************************************************************************
*	DMA API's
*******************************************************************************/
152 153 154 155 156 157 158 159
extern struct dma_channel dma_ch[MAX_DMA_CHANNELS];
extern struct dma_register *dma_io_base_addr[MAX_DMA_CHANNELS];
extern int channel2irq(unsigned int channel);

static inline void set_dma_start_addr(unsigned int channel, unsigned long addr)
{
	dma_ch[channel].regs->start_addr = addr;
}
160
static inline void set_dma_next_desc_addr(unsigned int channel, void *addr)
161 162 163
{
	dma_ch[channel].regs->next_desc_ptr = addr;
}
164
static inline void set_dma_curr_desc_addr(unsigned int channel, void *addr)
165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212
{
	dma_ch[channel].regs->curr_desc_ptr = addr;
}
static inline void set_dma_x_count(unsigned int channel, unsigned short x_count)
{
	dma_ch[channel].regs->x_count = x_count;
}
static inline void set_dma_y_count(unsigned int channel, unsigned short y_count)
{
	dma_ch[channel].regs->y_count = y_count;
}
static inline void set_dma_x_modify(unsigned int channel, short x_modify)
{
	dma_ch[channel].regs->x_modify = x_modify;
}
static inline void set_dma_y_modify(unsigned int channel, short y_modify)
{
	dma_ch[channel].regs->y_modify = y_modify;
}
static inline void set_dma_config(unsigned int channel, unsigned short config)
{
	dma_ch[channel].regs->cfg = config;
}
static inline void set_dma_curr_addr(unsigned int channel, unsigned long addr)
{
	dma_ch[channel].regs->curr_addr_ptr = addr;
}

static inline unsigned short
set_bfin_dma_config(char direction, char flow_mode,
		    char intr_mode, char dma_mode, char width, char syncmode)
{
	return (direction << 1) | (width << 2) | (dma_mode << 4) |
		(intr_mode << 6) | (flow_mode << 12) | (syncmode << 5);
}

static inline unsigned short get_dma_curr_irqstat(unsigned int channel)
{
	return dma_ch[channel].regs->irq_status;
}
static inline unsigned short get_dma_curr_xcount(unsigned int channel)
{
	return dma_ch[channel].regs->curr_x_count;
}
static inline unsigned short get_dma_curr_ycount(unsigned int channel)
{
	return dma_ch[channel].regs->curr_y_count;
}
213
static inline void *get_dma_next_desc_ptr(unsigned int channel)
214 215 216
{
	return dma_ch[channel].regs->next_desc_ptr;
}
217
static inline void *get_dma_curr_desc_ptr(unsigned int channel)
218 219 220
{
	return dma_ch[channel].regs->curr_desc_ptr;
}
221 222 223 224
static inline unsigned short get_dma_config(unsigned int channel)
{
	return dma_ch[channel].regs->cfg;
}
225 226 227 228 229 230 231
static inline unsigned long get_dma_curr_addr(unsigned int channel)
{
	return dma_ch[channel].regs->curr_addr_ptr;
}

static inline void set_dma_sg(unsigned int channel, struct dmasg *sg, int ndsize)
{
232 233 234 235 236 237 238
	/* Make sure the internal data buffers in the core are drained
	 * so that the DMA descriptors are completely written when the
	 * DMA engine goes to fetch them below.
	 */
	SSYNC();

	dma_ch[channel].regs->next_desc_ptr = sg;
239 240 241
	dma_ch[channel].regs->cfg =
		(dma_ch[channel].regs->cfg & ~(0xf << 8)) |
		((ndsize & 0xf) << 8);
242 243 244 245
}

static inline int dma_channel_active(unsigned int channel)
{
246
	return atomic_read(&dma_ch[channel].chan_status);
247 248 249 250 251 252 253 254 255 256 257 258 259
}

static inline void disable_dma(unsigned int channel)
{
	dma_ch[channel].regs->cfg &= ~DMAEN;
	SSYNC();
}
static inline void enable_dma(unsigned int channel)
{
	dma_ch[channel].regs->curr_x_count = 0;
	dma_ch[channel].regs->curr_y_count = 0;
	dma_ch[channel].regs->cfg |= DMAEN;
}
B
Bryan Wu 已提交
260
void free_dma(unsigned int channel);
261
int request_dma(unsigned int channel, const char *device_id);
262 263 264 265 266 267 268 269 270 271 272 273 274 275 276
int set_dma_callback(unsigned int channel, irq_handler_t callback, void *data);

static inline void dma_disable_irq(unsigned int channel)
{
	disable_irq(dma_ch[channel].irq);
}
static inline void dma_enable_irq(unsigned int channel)
{
	enable_irq(dma_ch[channel].irq);
}
static inline void clear_dma_irqstat(unsigned int channel)
{
	dma_ch[channel].regs->irq_status = DMA_DONE | DMA_ERR;
}

B
Bryan Wu 已提交
277 278
void *dma_memcpy(void *dest, const void *src, size_t count);
void *safe_dma_memcpy(void *dest, const void *src, size_t count);
279
void blackfin_dma_early_init(void);
280 281
void early_dma_memcpy(void *dest, const void *src, size_t count);
void early_dma_memcpy_done(void);
B
Bryan Wu 已提交
282 283

#endif