1 // SPDX-License-Identifier: GPL-2.0
2 /* Wrapper for DMA channel allocator that updates DMA client muxing.
3 * Copyright 2004-2007, Axis Communications AB
6 #include <linux/kernel.h>
7 #include <linux/module.h>
8 #include <linux/errno.h>
11 #include <arch/svinto.h>
12 #include <arch/system.h>
14 /* Macro to access ETRAX 100 registers */
15 #define SETS(var, reg, field, val) var = (var & ~IO_MASK_(reg##_, field##_)) | \
16 IO_STATE_(reg##_, field##_, _##val)
19 static char used_dma_channels[MAX_DMA_CHANNELS];
20 static const char * used_dma_channels_users[MAX_DMA_CHANNELS];
22 int cris_request_dma(unsigned int dmanr, const char * device_id,
23 unsigned options, enum dma_owner owner)
26 unsigned long int gens;
29 if (dmanr >= MAX_DMA_CHANNELS) {
30 printk(KERN_CRIT "cris_request_dma: invalid DMA channel %u\n", dmanr);
34 local_irq_save(flags);
35 if (used_dma_channels[dmanr]) {
36 local_irq_restore(flags);
37 if (options & DMA_VERBOSE_ON_ERROR) {
38 printk(KERN_CRIT "Failed to request DMA %i for %s, already allocated by %s\n", dmanr, device_id, used_dma_channels_users[dmanr]);
40 if (options & DMA_PANIC_ON_ERROR) {
41 panic("request_dma error!");
46 gens = genconfig_shadow;
51 if ((dmanr != NETWORK_TX_DMA_NBR) &&
52 (dmanr != NETWORK_RX_DMA_NBR)) {
53 printk(KERN_CRIT "Invalid DMA channel for eth\n");
58 if (dmanr == SER0_TX_DMA_NBR) {
59 SETS(gens, R_GEN_CONFIG, dma6, serial0);
60 } else if (dmanr == SER0_RX_DMA_NBR) {
61 SETS(gens, R_GEN_CONFIG, dma7, serial0);
63 printk(KERN_CRIT "Invalid DMA channel for ser0\n");
68 if (dmanr == SER1_TX_DMA_NBR) {
69 SETS(gens, R_GEN_CONFIG, dma8, serial1);
70 } else if (dmanr == SER1_RX_DMA_NBR) {
71 SETS(gens, R_GEN_CONFIG, dma9, serial1);
73 printk(KERN_CRIT "Invalid DMA channel for ser1\n");
78 if (dmanr == SER2_TX_DMA_NBR) {
79 SETS(gens, R_GEN_CONFIG, dma2, serial2);
80 } else if (dmanr == SER2_RX_DMA_NBR) {
81 SETS(gens, R_GEN_CONFIG, dma3, serial2);
83 printk(KERN_CRIT "Invalid DMA channel for ser2\n");
88 if (dmanr == SER3_TX_DMA_NBR) {
89 SETS(gens, R_GEN_CONFIG, dma4, serial3);
90 } else if (dmanr == SER3_RX_DMA_NBR) {
91 SETS(gens, R_GEN_CONFIG, dma5, serial3);
93 printk(KERN_CRIT "Invalid DMA channel for ser3\n");
98 if (dmanr == ATA_TX_DMA_NBR) {
99 SETS(gens, R_GEN_CONFIG, dma2, ata);
100 } else if (dmanr == ATA_RX_DMA_NBR) {
101 SETS(gens, R_GEN_CONFIG, dma3, ata);
103 printk(KERN_CRIT "Invalid DMA channel for ata\n");
108 if (dmanr == EXTDMA0_TX_DMA_NBR) {
109 SETS(gens, R_GEN_CONFIG, dma4, extdma0);
110 } else if (dmanr == EXTDMA0_RX_DMA_NBR) {
111 SETS(gens, R_GEN_CONFIG, dma5, extdma0);
113 printk(KERN_CRIT "Invalid DMA channel for ext0\n");
118 if (dmanr == EXTDMA1_TX_DMA_NBR) {
119 SETS(gens, R_GEN_CONFIG, dma6, extdma1);
120 } else if (dmanr == EXTDMA1_RX_DMA_NBR) {
121 SETS(gens, R_GEN_CONFIG, dma7, extdma1);
123 printk(KERN_CRIT "Invalid DMA channel for ext1\n");
128 if (dmanr == MEM2MEM_RX_DMA_NBR) {
129 SETS(gens, R_GEN_CONFIG, dma7, intdma6);
131 printk(KERN_CRIT "Invalid DMA channel for int6\n");
136 if (dmanr == MEM2MEM_TX_DMA_NBR) {
137 SETS(gens, R_GEN_CONFIG, dma6, intdma7);
139 printk(KERN_CRIT "Invalid DMA channel for int7\n");
144 if (dmanr == USB_TX_DMA_NBR) {
145 SETS(gens, R_GEN_CONFIG, dma8, usb);
146 } else if (dmanr == USB_RX_DMA_NBR) {
147 SETS(gens, R_GEN_CONFIG, dma9, usb);
149 printk(KERN_CRIT "Invalid DMA channel for usb\n");
154 if (dmanr == SCSI0_TX_DMA_NBR) {
155 SETS(gens, R_GEN_CONFIG, dma2, scsi0);
156 } else if (dmanr == SCSI0_RX_DMA_NBR) {
157 SETS(gens, R_GEN_CONFIG, dma3, scsi0);
159 printk(KERN_CRIT "Invalid DMA channel for scsi0\n");
164 if (dmanr == SCSI1_TX_DMA_NBR) {
165 SETS(gens, R_GEN_CONFIG, dma4, scsi1);
166 } else if (dmanr == SCSI1_RX_DMA_NBR) {
167 SETS(gens, R_GEN_CONFIG, dma5, scsi1);
169 printk(KERN_CRIT "Invalid DMA channel for scsi1\n");
174 if (dmanr == PAR0_TX_DMA_NBR) {
175 SETS(gens, R_GEN_CONFIG, dma2, par0);
176 } else if (dmanr == PAR0_RX_DMA_NBR) {
177 SETS(gens, R_GEN_CONFIG, dma3, par0);
179 printk(KERN_CRIT "Invalid DMA channel for par0\n");
184 if (dmanr == PAR1_TX_DMA_NBR) {
185 SETS(gens, R_GEN_CONFIG, dma4, par1);
186 } else if (dmanr == PAR1_RX_DMA_NBR) {
187 SETS(gens, R_GEN_CONFIG, dma5, par1);
189 printk(KERN_CRIT "Invalid DMA channel for par1\n");
194 printk(KERN_CRIT "Invalid DMA owner.\n");
198 used_dma_channels[dmanr] = 1;
199 used_dma_channels_users[dmanr] = device_id;
203 genconfig_shadow = gens;
204 *R_GEN_CONFIG = genconfig_shadow;
205 /* Wait 12 cycles before doing any DMA command */
206 for(i = 6; i > 0; i--)
211 local_irq_restore(flags);
215 void cris_free_dma(unsigned int dmanr, const char * device_id)
218 if (dmanr >= MAX_DMA_CHANNELS) {
219 printk(KERN_CRIT "cris_free_dma: invalid DMA channel %u\n", dmanr);
223 local_irq_save(flags);
224 if (!used_dma_channels[dmanr]) {
225 printk(KERN_CRIT "cris_free_dma: DMA channel %u not allocated\n", dmanr);
226 } else if (device_id != used_dma_channels_users[dmanr]) {
227 printk(KERN_CRIT "cris_free_dma: DMA channel %u not allocated by device\n", dmanr);
232 *R_DMA_CH0_CMD = IO_STATE(R_DMA_CH0_CMD, cmd, reset);
233 while (IO_EXTRACT(R_DMA_CH0_CMD, cmd, *R_DMA_CH0_CMD) ==
234 IO_STATE_VALUE(R_DMA_CH0_CMD, cmd, reset));
237 *R_DMA_CH1_CMD = IO_STATE(R_DMA_CH1_CMD, cmd, reset);
238 while (IO_EXTRACT(R_DMA_CH1_CMD, cmd, *R_DMA_CH1_CMD) ==
239 IO_STATE_VALUE(R_DMA_CH1_CMD, cmd, reset));
242 *R_DMA_CH2_CMD = IO_STATE(R_DMA_CH2_CMD, cmd, reset);
243 while (IO_EXTRACT(R_DMA_CH2_CMD, cmd, *R_DMA_CH2_CMD) ==
244 IO_STATE_VALUE(R_DMA_CH2_CMD, cmd, reset));
247 *R_DMA_CH3_CMD = IO_STATE(R_DMA_CH3_CMD, cmd, reset);
248 while (IO_EXTRACT(R_DMA_CH3_CMD, cmd, *R_DMA_CH3_CMD) ==
249 IO_STATE_VALUE(R_DMA_CH3_CMD, cmd, reset));
252 *R_DMA_CH4_CMD = IO_STATE(R_DMA_CH4_CMD, cmd, reset);
253 while (IO_EXTRACT(R_DMA_CH4_CMD, cmd, *R_DMA_CH4_CMD) ==
254 IO_STATE_VALUE(R_DMA_CH4_CMD, cmd, reset));
257 *R_DMA_CH5_CMD = IO_STATE(R_DMA_CH5_CMD, cmd, reset);
258 while (IO_EXTRACT(R_DMA_CH5_CMD, cmd, *R_DMA_CH5_CMD) ==
259 IO_STATE_VALUE(R_DMA_CH5_CMD, cmd, reset));
262 *R_DMA_CH6_CMD = IO_STATE(R_DMA_CH6_CMD, cmd, reset);
263 while (IO_EXTRACT(R_DMA_CH6_CMD, cmd, *R_DMA_CH6_CMD) ==
264 IO_STATE_VALUE(R_DMA_CH6_CMD, cmd, reset));
267 *R_DMA_CH7_CMD = IO_STATE(R_DMA_CH7_CMD, cmd, reset);
268 while (IO_EXTRACT(R_DMA_CH7_CMD, cmd, *R_DMA_CH7_CMD) ==
269 IO_STATE_VALUE(R_DMA_CH7_CMD, cmd, reset));
272 *R_DMA_CH8_CMD = IO_STATE(R_DMA_CH8_CMD, cmd, reset);
273 while (IO_EXTRACT(R_DMA_CH8_CMD, cmd, *R_DMA_CH8_CMD) ==
274 IO_STATE_VALUE(R_DMA_CH8_CMD, cmd, reset));
277 *R_DMA_CH9_CMD = IO_STATE(R_DMA_CH9_CMD, cmd, reset);
278 while (IO_EXTRACT(R_DMA_CH9_CMD, cmd, *R_DMA_CH9_CMD) ==
279 IO_STATE_VALUE(R_DMA_CH9_CMD, cmd, reset));
282 used_dma_channels[dmanr] = 0;
284 local_irq_restore(flags);
287 EXPORT_SYMBOL(cris_request_dma);
288 EXPORT_SYMBOL(cris_free_dma);