dma.h 8.2 KB
Newer Older
B
Bryan Wu 已提交
1
/*
2
 * dma.h - Blackfin DMA defines/structures/etc...
B
Bryan Wu 已提交
3
 *
4 5
 * Copyright 2004-2008 Analog Devices Inc.
 * Licensed under the GPL-2 or later.
B
Bryan Wu 已提交
6 7 8 9 10 11
 */

#ifndef _BLACKFIN_DMA_H_
#define _BLACKFIN_DMA_H_

#include <linux/interrupt.h>
12
#include <mach/dma.h>
13
#include <asm/atomic.h>
B
Bryan Wu 已提交
14
#include <asm/blackfin.h>
15
#include <asm/page.h>
16
#include <asm-generic/dma.h>
B
Bryan Wu 已提交
17

M
Mike Frysinger 已提交
18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
/* DMA_CONFIG Masks */
#define DMAEN			0x0001	/* DMA Channel Enable */
#define WNR				0x0002	/* Channel Direction (W/R*) */
#define WDSIZE_8		0x0000	/* Transfer Word Size = 8 */
#define WDSIZE_16		0x0004	/* Transfer Word Size = 16 */
#define WDSIZE_32		0x0008	/* Transfer Word Size = 32 */
#define DMA2D			0x0010	/* DMA Mode (2D/1D*) */
#define RESTART			0x0020	/* DMA Buffer Clear */
#define DI_SEL			0x0040	/* Data Interrupt Timing Select */
#define DI_EN			0x0080	/* Data Interrupt Enable */
#define NDSIZE_0		0x0000	/* Next Descriptor Size = 0 (Stop/Autobuffer) */
#define NDSIZE_1		0x0100	/* Next Descriptor Size = 1 */
#define NDSIZE_2		0x0200	/* Next Descriptor Size = 2 */
#define NDSIZE_3		0x0300	/* Next Descriptor Size = 3 */
#define NDSIZE_4		0x0400	/* Next Descriptor Size = 4 */
#define NDSIZE_5		0x0500	/* Next Descriptor Size = 5 */
#define NDSIZE_6		0x0600	/* Next Descriptor Size = 6 */
#define NDSIZE_7		0x0700	/* Next Descriptor Size = 7 */
#define NDSIZE_8		0x0800	/* Next Descriptor Size = 8 */
#define NDSIZE_9		0x0900	/* Next Descriptor Size = 9 */
#define NDSIZE			0x0f00	/* Next Descriptor Size */
#define DMAFLOW			0x7000	/* Flow Control */
#define DMAFLOW_STOP	0x0000	/* Stop Mode */
#define DMAFLOW_AUTO	0x1000	/* Autobuffer Mode */
#define DMAFLOW_ARRAY	0x4000	/* Descriptor Array Mode */
#define DMAFLOW_SMALL	0x6000	/* Small Model Descriptor List Mode */
#define DMAFLOW_LARGE	0x7000	/* Large Model Descriptor List Mode */

/* DMA_IRQ_STATUS Masks */
#define DMA_DONE		0x0001	/* DMA Completion Interrupt Status */
#define DMA_ERR			0x0002	/* DMA Error Interrupt Status */
#define DFETCH			0x0004	/* DMA Descriptor Fetch Indicator */
#define DMA_RUN			0x0008	/* DMA Channel Running Indicator */
B
Bryan Wu 已提交
51 52 53 54

/*-------------------------
 * config reg bits value
 *-------------------------*/
M
Mike Frysinger 已提交
55 56 57
#define DATA_SIZE_8			0
#define DATA_SIZE_16		1
#define DATA_SIZE_32		2
B
Bryan Wu 已提交
58

M
Mike Frysinger 已提交
59 60 61 62 63
#define DMA_FLOW_STOP		0
#define DMA_FLOW_AUTO		1
#define DMA_FLOW_ARRAY		4
#define DMA_FLOW_SMALL		6
#define DMA_FLOW_LARGE		7
B
Bryan Wu 已提交
64

M
Mike Frysinger 已提交
65 66
#define DIMENSION_LINEAR	0
#define DIMENSION_2D		1
B
Bryan Wu 已提交
67

M
Mike Frysinger 已提交
68 69
#define DIR_READ			0
#define DIR_WRITE			1
B
Bryan Wu 已提交
70

M
Mike Frysinger 已提交
71 72 73
#define INTR_DISABLE		0
#define INTR_ON_BUF			2
#define INTR_ON_ROW			3
B
Bryan Wu 已提交
74

75
#define DMA_NOSYNC_KEEP_DMA_BUF	0
M
Mike Frysinger 已提交
76
#define DMA_SYNC_RESTART		1
77

B
Bryan Wu 已提交
78
struct dmasg {
79
	void *next_desc_addr;
B
Bryan Wu 已提交
80 81 82 83 84 85 86 87 88
	unsigned long start_addr;
	unsigned short cfg;
	unsigned short x_count;
	short x_modify;
	unsigned short y_count;
	short y_modify;
} __attribute__((packed));

struct dma_register {
89
	void *next_desc_ptr;	/* DMA Next Descriptor Pointer register */
B
Bryan Wu 已提交
90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108
	unsigned long start_addr;	/* DMA Start address  register */

	unsigned short cfg;	/* DMA Configuration register */
	unsigned short dummy1;	/* DMA Configuration register */

	unsigned long reserved;

	unsigned short x_count;	/* DMA x_count register */
	unsigned short dummy2;

	short x_modify;	/* DMA x_modify register */
	unsigned short dummy3;

	unsigned short y_count;	/* DMA y_count register */
	unsigned short dummy4;

	short y_modify;	/* DMA y_modify register */
	unsigned short dummy5;

109
	void *curr_desc_ptr;	/* DMA Current Descriptor Pointer
B
Bryan Wu 已提交
110
					   register */
111
	unsigned long curr_addr_ptr;	/* DMA Current Address Pointer
B
Bryan Wu 已提交
112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
						   register */
	unsigned short irq_status;	/* DMA irq status register */
	unsigned short dummy6;

	unsigned short peripheral_map;	/* DMA peripheral map register */
	unsigned short dummy7;

	unsigned short curr_x_count;	/* DMA Current x-count register */
	unsigned short dummy8;

	unsigned long reserved2;

	unsigned short curr_y_count;	/* DMA Current y-count register */
	unsigned short dummy9;

	unsigned long reserved3;

};

struct dma_channel {
132
	const char *device_id;
133
	atomic_t chan_status;
134
	volatile struct dma_register *regs;
B
Bryan Wu 已提交
135
	struct dmasg *sg;		/* large mode descriptor */
136
	unsigned int irq;
B
Bryan Wu 已提交
137
	void *data;
138 139 140
#ifdef CONFIG_PM
	unsigned short saved_peripheral_map;
#endif
B
Bryan Wu 已提交
141 142
};

143 144 145 146 147
#ifdef CONFIG_PM
int blackfin_dma_suspend(void);
void blackfin_dma_resume(void);
#endif

B
Bryan Wu 已提交
148 149 150
/*******************************************************************************
*	DMA API's
*******************************************************************************/
151 152 153 154 155 156 157 158
extern struct dma_channel dma_ch[MAX_DMA_CHANNELS];
extern struct dma_register *dma_io_base_addr[MAX_DMA_CHANNELS];
extern int channel2irq(unsigned int channel);

static inline void set_dma_start_addr(unsigned int channel, unsigned long addr)
{
	dma_ch[channel].regs->start_addr = addr;
}
159
static inline void set_dma_next_desc_addr(unsigned int channel, void *addr)
160 161 162
{
	dma_ch[channel].regs->next_desc_ptr = addr;
}
163
static inline void set_dma_curr_desc_addr(unsigned int channel, void *addr)
164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211
{
	dma_ch[channel].regs->curr_desc_ptr = addr;
}
static inline void set_dma_x_count(unsigned int channel, unsigned short x_count)
{
	dma_ch[channel].regs->x_count = x_count;
}
static inline void set_dma_y_count(unsigned int channel, unsigned short y_count)
{
	dma_ch[channel].regs->y_count = y_count;
}
static inline void set_dma_x_modify(unsigned int channel, short x_modify)
{
	dma_ch[channel].regs->x_modify = x_modify;
}
static inline void set_dma_y_modify(unsigned int channel, short y_modify)
{
	dma_ch[channel].regs->y_modify = y_modify;
}
static inline void set_dma_config(unsigned int channel, unsigned short config)
{
	dma_ch[channel].regs->cfg = config;
}
static inline void set_dma_curr_addr(unsigned int channel, unsigned long addr)
{
	dma_ch[channel].regs->curr_addr_ptr = addr;
}

static inline unsigned short
set_bfin_dma_config(char direction, char flow_mode,
		    char intr_mode, char dma_mode, char width, char syncmode)
{
	return (direction << 1) | (width << 2) | (dma_mode << 4) |
		(intr_mode << 6) | (flow_mode << 12) | (syncmode << 5);
}

static inline unsigned short get_dma_curr_irqstat(unsigned int channel)
{
	return dma_ch[channel].regs->irq_status;
}
static inline unsigned short get_dma_curr_xcount(unsigned int channel)
{
	return dma_ch[channel].regs->curr_x_count;
}
static inline unsigned short get_dma_curr_ycount(unsigned int channel)
{
	return dma_ch[channel].regs->curr_y_count;
}
212
static inline void *get_dma_next_desc_ptr(unsigned int channel)
213 214 215
{
	return dma_ch[channel].regs->next_desc_ptr;
}
216
static inline void *get_dma_curr_desc_ptr(unsigned int channel)
217 218 219
{
	return dma_ch[channel].regs->curr_desc_ptr;
}
220 221 222 223
static inline unsigned short get_dma_config(unsigned int channel)
{
	return dma_ch[channel].regs->cfg;
}
224 225 226 227 228 229 230
static inline unsigned long get_dma_curr_addr(unsigned int channel)
{
	return dma_ch[channel].regs->curr_addr_ptr;
}

static inline void set_dma_sg(unsigned int channel, struct dmasg *sg, int ndsize)
{
231 232 233 234 235 236 237
	/* Make sure the internal data buffers in the core are drained
	 * so that the DMA descriptors are completely written when the
	 * DMA engine goes to fetch them below.
	 */
	SSYNC();

	dma_ch[channel].regs->next_desc_ptr = sg;
238 239 240
	dma_ch[channel].regs->cfg =
		(dma_ch[channel].regs->cfg & ~(0xf << 8)) |
		((ndsize & 0xf) << 8);
241 242 243 244
}

static inline int dma_channel_active(unsigned int channel)
{
245
	return atomic_read(&dma_ch[channel].chan_status);
246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264
}

static inline void disable_dma(unsigned int channel)
{
	dma_ch[channel].regs->cfg &= ~DMAEN;
	SSYNC();
}
static inline void enable_dma(unsigned int channel)
{
	dma_ch[channel].regs->curr_x_count = 0;
	dma_ch[channel].regs->curr_y_count = 0;
	dma_ch[channel].regs->cfg |= DMAEN;
}
int set_dma_callback(unsigned int channel, irq_handler_t callback, void *data);

static inline void dma_disable_irq(unsigned int channel)
{
	disable_irq(dma_ch[channel].irq);
}
265 266 267 268
static inline void dma_disable_irq_nosync(unsigned int channel)
{
	disable_irq_nosync(dma_ch[channel].irq);
}
269 270 271 272 273 274 275 276 277
static inline void dma_enable_irq(unsigned int channel)
{
	enable_irq(dma_ch[channel].irq);
}
static inline void clear_dma_irqstat(unsigned int channel)
{
	dma_ch[channel].regs->irq_status = DMA_DONE | DMA_ERR;
}

B
Bryan Wu 已提交
278 279
void *dma_memcpy(void *dest, const void *src, size_t count);
void *safe_dma_memcpy(void *dest, const void *src, size_t count);
280
void blackfin_dma_early_init(void);
281 282
void early_dma_memcpy(void *dest, const void *src, size_t count);
void early_dma_memcpy_done(void);
B
Bryan Wu 已提交
283 284

#endif