1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Enhanced Direct Memory Access (EDMA3) Controller
4  *
5  * (C) Copyright 2014
6  *     Texas Instruments Incorporated, <www.ti.com>
7  *
8  * Author: Ivan Khoronzhuk <ivan.khoronzhuk@ti.com>
9  */
10 
11 #include <asm/cache.h>
12 #include <asm/io.h>
13 #include <common.h>
14 #include <dm.h>
15 #include <dma-uclass.h>
16 #include <asm/omap_common.h>
17 #include <asm/ti-common/ti-edma3.h>
18 
19 #define EDMA3_SL_BASE(slot)			(0x4000 + ((slot) << 5))
20 #define EDMA3_SL_MAX_NUM			512
21 #define EDMA3_SLOPT_FIFO_WIDTH_MASK		(0x7 << 8)
22 
23 #define EDMA3_QCHMAP(ch)			0x0200 + ((ch) << 2)
24 #define EDMA3_CHMAP_PARSET_MASK			0x1ff
25 #define EDMA3_CHMAP_PARSET_SHIFT		0x5
26 #define EDMA3_CHMAP_TRIGWORD_SHIFT		0x2
27 
28 #define EDMA3_QEMCR				0x314
29 #define EDMA3_IPR				0x1068
30 #define EDMA3_IPRH				0x106c
31 #define EDMA3_ICR				0x1070
32 #define EDMA3_ICRH				0x1074
33 #define EDMA3_QEECR				0x1088
34 #define EDMA3_QEESR				0x108c
35 #define EDMA3_QSECR				0x1094
36 
37 #define EDMA_FILL_BUFFER_SIZE			512
38 
39 struct ti_edma3_priv {
40 	u32 base;
41 };
42 
43 static u8 edma_fill_buffer[EDMA_FILL_BUFFER_SIZE] __aligned(ARCH_DMA_MINALIGN);
44 
45 /**
46  * qedma3_start - start qdma on a channel
47  * @base: base address of edma
48  * @cfg: pinter to struct edma3_channel_config where you can set
49  * the slot number to associate with, the chnum, which corresponds
50  * your quick channel number 0-7, complete code - transfer complete code
51  * and trigger slot word - which has to correspond to the word number in
52  * edma3_slot_layout struct for generating event.
53  *
54  */
qedma3_start(u32 base,struct edma3_channel_config * cfg)55 void qedma3_start(u32 base, struct edma3_channel_config *cfg)
56 {
57 	u32 qchmap;
58 
59 	/* Clear the pending int bit */
60 	if (cfg->complete_code < 32)
61 		__raw_writel(1 << cfg->complete_code, base + EDMA3_ICR);
62 	else
63 		__raw_writel(1 << cfg->complete_code, base + EDMA3_ICRH);
64 
65 	/* Map parameter set and trigger word 7 to quick channel */
66 	qchmap = ((EDMA3_CHMAP_PARSET_MASK & cfg->slot)
67 		  << EDMA3_CHMAP_PARSET_SHIFT) |
68 		  (cfg->trigger_slot_word << EDMA3_CHMAP_TRIGWORD_SHIFT);
69 
70 	__raw_writel(qchmap, base + EDMA3_QCHMAP(cfg->chnum));
71 
72 	/* Clear missed event if set*/
73 	__raw_writel(1 << cfg->chnum, base + EDMA3_QSECR);
74 	__raw_writel(1 << cfg->chnum, base + EDMA3_QEMCR);
75 
76 	/* Enable qdma channel event */
77 	__raw_writel(1 << cfg->chnum, base + EDMA3_QEESR);
78 }
79 
80 /**
81  * edma3_set_dest - set initial DMA destination address in parameter RAM slot
82  * @base: base address of edma
83  * @slot: parameter RAM slot being configured
84  * @dst: physical address of destination (memory, controller FIFO, etc)
85  * @addressMode: INCR, except in very rare cases
86  * @width: ignored unless @addressMode is FIFO, else specifies the
87  *	width to use when addressing the fifo (e.g. W8BIT, W32BIT)
88  *
89  * Note that the destination address is modified during the DMA transfer
90  * according to edma3_set_dest_index().
91  */
edma3_set_dest(u32 base,int slot,u32 dst,enum edma3_address_mode mode,enum edma3_fifo_width width)92 void edma3_set_dest(u32 base, int slot, u32 dst, enum edma3_address_mode mode,
93 		    enum edma3_fifo_width width)
94 {
95 	u32 opt;
96 	struct edma3_slot_layout *rg;
97 
98 	rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
99 
100 	opt = __raw_readl(&rg->opt);
101 	if (mode == FIFO)
102 		opt = (opt & EDMA3_SLOPT_FIFO_WIDTH_MASK) |
103 		       (EDMA3_SLOPT_DST_ADDR_CONST_MODE |
104 			EDMA3_SLOPT_FIFO_WIDTH_SET(width));
105 	else
106 		opt &= ~EDMA3_SLOPT_DST_ADDR_CONST_MODE;
107 
108 	__raw_writel(opt, &rg->opt);
109 	__raw_writel(dst, &rg->dst);
110 }
111 
112 /**
113  * edma3_set_dest_index - configure DMA destination address indexing
114  * @base: base address of edma
115  * @slot: parameter RAM slot being configured
116  * @bidx: byte offset between destination arrays in a frame
117  * @cidx: byte offset between destination frames in a block
118  *
119  * Offsets are specified to support either contiguous or discontiguous
120  * memory transfers, or repeated access to a hardware register, as needed.
121  * When accessing hardware registers, both offsets are normally zero.
122  */
edma3_set_dest_index(u32 base,unsigned slot,int bidx,int cidx)123 void edma3_set_dest_index(u32 base, unsigned slot, int bidx, int cidx)
124 {
125 	u32 src_dst_bidx;
126 	u32 src_dst_cidx;
127 	struct edma3_slot_layout *rg;
128 
129 	rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
130 
131 	src_dst_bidx = __raw_readl(&rg->src_dst_bidx);
132 	src_dst_cidx = __raw_readl(&rg->src_dst_cidx);
133 
134 	__raw_writel((src_dst_bidx & 0x0000ffff) | (bidx << 16),
135 		     &rg->src_dst_bidx);
136 	__raw_writel((src_dst_cidx & 0x0000ffff) | (cidx << 16),
137 		     &rg->src_dst_cidx);
138 }
139 
140 /**
141  * edma3_set_dest_addr - set destination address for slot only
142  */
edma3_set_dest_addr(u32 base,int slot,u32 dst)143 void edma3_set_dest_addr(u32 base, int slot, u32 dst)
144 {
145 	struct edma3_slot_layout *rg;
146 
147 	rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
148 	__raw_writel(dst, &rg->dst);
149 }
150 
151 /**
152  * edma3_set_src - set initial DMA source address in parameter RAM slot
153  * @base: base address of edma
154  * @slot: parameter RAM slot being configured
155  * @src_port: physical address of source (memory, controller FIFO, etc)
156  * @mode: INCR, except in very rare cases
157  * @width: ignored unless @addressMode is FIFO, else specifies the
158  *	width to use when addressing the fifo (e.g. W8BIT, W32BIT)
159  *
160  * Note that the source address is modified during the DMA transfer
161  * according to edma3_set_src_index().
162  */
edma3_set_src(u32 base,int slot,u32 src,enum edma3_address_mode mode,enum edma3_fifo_width width)163 void edma3_set_src(u32 base, int slot, u32 src, enum edma3_address_mode mode,
164 		   enum edma3_fifo_width width)
165 {
166 	u32 opt;
167 	struct edma3_slot_layout *rg;
168 
169 	rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
170 
171 	opt = __raw_readl(&rg->opt);
172 	if (mode == FIFO)
173 		opt = (opt & EDMA3_SLOPT_FIFO_WIDTH_MASK) |
174 		       (EDMA3_SLOPT_DST_ADDR_CONST_MODE |
175 			EDMA3_SLOPT_FIFO_WIDTH_SET(width));
176 	else
177 		opt &= ~EDMA3_SLOPT_DST_ADDR_CONST_MODE;
178 
179 	__raw_writel(opt, &rg->opt);
180 	__raw_writel(src, &rg->src);
181 }
182 
183 /**
184  * edma3_set_src_index - configure DMA source address indexing
185  * @base: base address of edma
186  * @slot: parameter RAM slot being configured
187  * @bidx: byte offset between source arrays in a frame
188  * @cidx: byte offset between source frames in a block
189  *
190  * Offsets are specified to support either contiguous or discontiguous
191  * memory transfers, or repeated access to a hardware register, as needed.
192  * When accessing hardware registers, both offsets are normally zero.
193  */
edma3_set_src_index(u32 base,unsigned slot,int bidx,int cidx)194 void edma3_set_src_index(u32 base, unsigned slot, int bidx, int cidx)
195 {
196 	u32 src_dst_bidx;
197 	u32 src_dst_cidx;
198 	struct edma3_slot_layout *rg;
199 
200 	rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
201 
202 	src_dst_bidx = __raw_readl(&rg->src_dst_bidx);
203 	src_dst_cidx = __raw_readl(&rg->src_dst_cidx);
204 
205 	__raw_writel((src_dst_bidx & 0xffff0000) | bidx,
206 		     &rg->src_dst_bidx);
207 	__raw_writel((src_dst_cidx & 0xffff0000) | cidx,
208 		     &rg->src_dst_cidx);
209 }
210 
211 /**
212  * edma3_set_src_addr - set source address for slot only
213  */
edma3_set_src_addr(u32 base,int slot,u32 src)214 void edma3_set_src_addr(u32 base, int slot, u32 src)
215 {
216 	struct edma3_slot_layout *rg;
217 
218 	rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
219 	__raw_writel(src, &rg->src);
220 }
221 
222 /**
223  * edma3_set_transfer_params - configure DMA transfer parameters
224  * @base: base address of edma
225  * @slot: parameter RAM slot being configured
226  * @acnt: how many bytes per array (at least one)
227  * @bcnt: how many arrays per frame (at least one)
228  * @ccnt: how many frames per block (at least one)
229  * @bcnt_rld: used only for A-Synchronized transfers; this specifies
230  *	the value to reload into bcnt when it decrements to zero
231  * @sync_mode: ASYNC or ABSYNC
232  *
233  * See the EDMA3 documentation to understand how to configure and link
234  * transfers using the fields in PaRAM slots.  If you are not doing it
235  * all at once with edma3_write_slot(), you will use this routine
236  * plus two calls each for source and destination, setting the initial
237  * address and saying how to index that address.
238  *
239  * An example of an A-Synchronized transfer is a serial link using a
240  * single word shift register.  In that case, @acnt would be equal to
241  * that word size; the serial controller issues a DMA synchronization
242  * event to transfer each word, and memory access by the DMA transfer
243  * controller will be word-at-a-time.
244  *
245  * An example of an AB-Synchronized transfer is a device using a FIFO.
246  * In that case, @acnt equals the FIFO width and @bcnt equals its depth.
247  * The controller with the FIFO issues DMA synchronization events when
248  * the FIFO threshold is reached, and the DMA transfer controller will
249  * transfer one frame to (or from) the FIFO.  It will probably use
250  * efficient burst modes to access memory.
251  */
edma3_set_transfer_params(u32 base,int slot,int acnt,int bcnt,int ccnt,u16 bcnt_rld,enum edma3_sync_dimension sync_mode)252 void edma3_set_transfer_params(u32 base, int slot, int acnt,
253 			       int bcnt, int ccnt, u16 bcnt_rld,
254 			       enum edma3_sync_dimension sync_mode)
255 {
256 	u32 opt;
257 	u32 link_bcntrld;
258 	struct edma3_slot_layout *rg;
259 
260 	rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
261 
262 	link_bcntrld = __raw_readl(&rg->link_bcntrld);
263 
264 	__raw_writel((bcnt_rld << 16) | (0x0000ffff & link_bcntrld),
265 		     &rg->link_bcntrld);
266 
267 	opt = __raw_readl(&rg->opt);
268 	if (sync_mode == ASYNC)
269 		__raw_writel(opt & ~EDMA3_SLOPT_AB_SYNC, &rg->opt);
270 	else
271 		__raw_writel(opt | EDMA3_SLOPT_AB_SYNC, &rg->opt);
272 
273 	/* Set the acount, bcount, ccount registers */
274 	__raw_writel((bcnt << 16) | (acnt & 0xffff), &rg->a_b_cnt);
275 	__raw_writel(0xffff & ccnt, &rg->ccnt);
276 }
277 
278 /**
279  * edma3_write_slot - write parameter RAM data for slot
280  * @base: base address of edma
281  * @slot: number of parameter RAM slot being modified
282  * @param: data to be written into parameter RAM slot
283  *
284  * Use this to assign all parameters of a transfer at once.  This
285  * allows more efficient setup of transfers than issuing multiple
286  * calls to set up those parameters in small pieces, and provides
287  * complete control over all transfer options.
288  */
edma3_write_slot(u32 base,int slot,struct edma3_slot_layout * param)289 void edma3_write_slot(u32 base, int slot, struct edma3_slot_layout *param)
290 {
291 	int i;
292 	u32 *p = (u32 *)param;
293 	u32 *addr = (u32 *)(base + EDMA3_SL_BASE(slot));
294 
295 	for (i = 0; i < sizeof(struct edma3_slot_layout)/4; i += 4)
296 		__raw_writel(*p++, addr++);
297 }
298 
299 /**
300  * edma3_read_slot - read parameter RAM data from slot
301  * @base: base address of edma
302  * @slot: number of parameter RAM slot being copied
303  * @param: where to store copy of parameter RAM data
304  *
305  * Use this to read data from a parameter RAM slot, perhaps to
306  * save them as a template for later reuse.
307  */
edma3_read_slot(u32 base,int slot,struct edma3_slot_layout * param)308 void edma3_read_slot(u32 base, int slot, struct edma3_slot_layout *param)
309 {
310 	int i;
311 	u32 *p = (u32 *)param;
312 	u32 *addr = (u32 *)(base + EDMA3_SL_BASE(slot));
313 
314 	for (i = 0; i < sizeof(struct edma3_slot_layout)/4; i += 4)
315 		*p++ = __raw_readl(addr++);
316 }
317 
edma3_slot_configure(u32 base,int slot,struct edma3_slot_config * cfg)318 void edma3_slot_configure(u32 base, int slot, struct edma3_slot_config *cfg)
319 {
320 	struct edma3_slot_layout *rg;
321 
322 	rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
323 
324 	__raw_writel(cfg->opt, &rg->opt);
325 	__raw_writel(cfg->src, &rg->src);
326 	__raw_writel((cfg->bcnt << 16) | (cfg->acnt & 0xffff), &rg->a_b_cnt);
327 	__raw_writel(cfg->dst, &rg->dst);
328 	__raw_writel((cfg->dst_bidx << 16) |
329 		     (cfg->src_bidx & 0xffff), &rg->src_dst_bidx);
330 	__raw_writel((cfg->bcntrld << 16) |
331 		     (cfg->link & 0xffff), &rg->link_bcntrld);
332 	__raw_writel((cfg->dst_cidx << 16) |
333 		     (cfg->src_cidx & 0xffff), &rg->src_dst_cidx);
334 	__raw_writel(0xffff & cfg->ccnt, &rg->ccnt);
335 }
336 
337 /**
338  * edma3_check_for_transfer - check if transfer coplete by checking
339  * interrupt pending bit. Clear interrupt pending bit if complete.
340  * @base: base address of edma
341  * @cfg: pinter to struct edma3_channel_config which was passed
342  * to qedma3_start when you started qdma channel
343  *
344  * Return 0 if complete, 1 if not.
345  */
edma3_check_for_transfer(u32 base,struct edma3_channel_config * cfg)346 int edma3_check_for_transfer(u32 base, struct edma3_channel_config *cfg)
347 {
348 	u32 inum;
349 	u32 ipr_base;
350 	u32 icr_base;
351 
352 	if (cfg->complete_code < 32) {
353 		ipr_base = base + EDMA3_IPR;
354 		icr_base = base + EDMA3_ICR;
355 		inum = 1 << cfg->complete_code;
356 	} else {
357 		ipr_base = base + EDMA3_IPRH;
358 		icr_base = base + EDMA3_ICRH;
359 		inum = 1 << (cfg->complete_code - 32);
360 	}
361 
362 	/* check complete interrupt */
363 	if (!(__raw_readl(ipr_base) & inum))
364 		return 1;
365 
366 	/* clean up the pending int bit */
367 	__raw_writel(inum, icr_base);
368 
369 	return 0;
370 }
371 
372 /**
373  * qedma3_stop - stops dma on the channel passed
374  * @base: base address of edma
375  * @cfg: pinter to struct edma3_channel_config which was passed
376  * to qedma3_start when you started qdma channel
377  */
qedma3_stop(u32 base,struct edma3_channel_config * cfg)378 void qedma3_stop(u32 base, struct edma3_channel_config *cfg)
379 {
380 	/* Disable qdma channel event */
381 	__raw_writel(1 << cfg->chnum, base + EDMA3_QEECR);
382 
383 	/* clean up the interrupt indication */
384 	if (cfg->complete_code < 32)
385 		__raw_writel(1 << cfg->complete_code, base + EDMA3_ICR);
386 	else
387 		__raw_writel(1 << cfg->complete_code, base + EDMA3_ICRH);
388 
389 	/* Clear missed event if set*/
390 	__raw_writel(1 << cfg->chnum, base + EDMA3_QSECR);
391 	__raw_writel(1 << cfg->chnum, base + EDMA3_QEMCR);
392 
393 	/* Clear the channel map */
394 	__raw_writel(0, base + EDMA3_QCHMAP(cfg->chnum));
395 }
396 
__edma3_transfer(unsigned long edma3_base_addr,unsigned int edma_slot_num,void * dst,void * src,size_t len,size_t s_len)397 void __edma3_transfer(unsigned long edma3_base_addr, unsigned int edma_slot_num,
398 		      void *dst, void *src, size_t len, size_t s_len)
399 {
400 	struct edma3_slot_config        slot;
401 	struct edma3_channel_config     edma_channel;
402 	int                             b_cnt_value = 1;
403 	int                             rem_bytes  = 0;
404 	int                             a_cnt_value = len;
405 	unsigned int                    addr = (unsigned int) (dst);
406 	unsigned int                    max_acnt  = 0x7FFFU;
407 
408 	if (len > s_len) {
409 		b_cnt_value = (len / s_len);
410 		rem_bytes = (len % s_len);
411 		a_cnt_value = s_len;
412 	} else if (len > max_acnt) {
413 		b_cnt_value = (len / max_acnt);
414 		rem_bytes  = (len % max_acnt);
415 		a_cnt_value = max_acnt;
416 	}
417 
418 	slot.opt        = 0;
419 	slot.src        = ((unsigned int) src);
420 	slot.acnt       = a_cnt_value;
421 	slot.bcnt       = b_cnt_value;
422 	slot.ccnt       = 1;
423 	if (len == s_len)
424 		slot.src_bidx = a_cnt_value;
425 	else
426 		slot.src_bidx = 0;
427 	slot.dst_bidx   = a_cnt_value;
428 	slot.src_cidx   = 0;
429 	slot.dst_cidx   = 0;
430 	slot.link       = EDMA3_PARSET_NULL_LINK;
431 	slot.bcntrld    = 0;
432 	slot.opt        = EDMA3_SLOPT_TRANS_COMP_INT_ENB |
433 			  EDMA3_SLOPT_COMP_CODE(0) |
434 			  EDMA3_SLOPT_STATIC | EDMA3_SLOPT_AB_SYNC;
435 
436 	edma3_slot_configure(edma3_base_addr, edma_slot_num, &slot);
437 	edma_channel.slot = edma_slot_num;
438 	edma_channel.chnum = 0;
439 	edma_channel.complete_code = 0;
440 	 /* set event trigger to dst update */
441 	edma_channel.trigger_slot_word = EDMA3_TWORD(dst);
442 
443 	qedma3_start(edma3_base_addr, &edma_channel);
444 	edma3_set_dest_addr(edma3_base_addr, edma_channel.slot, addr);
445 
446 	while (edma3_check_for_transfer(edma3_base_addr, &edma_channel))
447 		;
448 	qedma3_stop(edma3_base_addr, &edma_channel);
449 
450 	if (rem_bytes != 0) {
451 		slot.opt        = 0;
452 		if (len == s_len)
453 			slot.src =
454 				(b_cnt_value * max_acnt) + ((unsigned int) src);
455 		else
456 			slot.src = (unsigned int) src;
457 		slot.acnt       = rem_bytes;
458 		slot.bcnt       = 1;
459 		slot.ccnt       = 1;
460 		slot.src_bidx   = rem_bytes;
461 		slot.dst_bidx   = rem_bytes;
462 		slot.src_cidx   = 0;
463 		slot.dst_cidx   = 0;
464 		slot.link       = EDMA3_PARSET_NULL_LINK;
465 		slot.bcntrld    = 0;
466 		slot.opt        = EDMA3_SLOPT_TRANS_COMP_INT_ENB |
467 				  EDMA3_SLOPT_COMP_CODE(0) |
468 				  EDMA3_SLOPT_STATIC | EDMA3_SLOPT_AB_SYNC;
469 		edma3_slot_configure(edma3_base_addr, edma_slot_num, &slot);
470 		edma_channel.slot = edma_slot_num;
471 		edma_channel.chnum = 0;
472 		edma_channel.complete_code = 0;
473 		/* set event trigger to dst update */
474 		edma_channel.trigger_slot_word = EDMA3_TWORD(dst);
475 
476 		qedma3_start(edma3_base_addr, &edma_channel);
477 		edma3_set_dest_addr(edma3_base_addr, edma_channel.slot, addr +
478 				    (max_acnt * b_cnt_value));
479 		while (edma3_check_for_transfer(edma3_base_addr, &edma_channel))
480 			;
481 		qedma3_stop(edma3_base_addr, &edma_channel);
482 	}
483 }
484 
__edma3_fill(unsigned long edma3_base_addr,unsigned int edma_slot_num,void * dst,u8 val,size_t len)485 void __edma3_fill(unsigned long edma3_base_addr, unsigned int edma_slot_num,
486 		  void *dst, u8 val, size_t len)
487 {
488 	int xfer_len;
489 	int max_xfer = EDMA_FILL_BUFFER_SIZE * 65535;
490 
491 	memset((void *)edma_fill_buffer, val, sizeof(edma_fill_buffer));
492 
493 	while (len) {
494 		xfer_len = len;
495 		if (xfer_len > max_xfer)
496 			xfer_len = max_xfer;
497 
498 		__edma3_transfer(edma3_base_addr, edma_slot_num, dst,
499 				 edma_fill_buffer, xfer_len,
500 				 EDMA_FILL_BUFFER_SIZE);
501 		len -= xfer_len;
502 		dst += xfer_len;
503 	}
504 }
505 
506 #ifndef CONFIG_DMA
507 
edma3_transfer(unsigned long edma3_base_addr,unsigned int edma_slot_num,void * dst,void * src,size_t len)508 void edma3_transfer(unsigned long edma3_base_addr, unsigned int edma_slot_num,
509 		    void *dst, void *src, size_t len)
510 {
511 	__edma3_transfer(edma3_base_addr, edma_slot_num, dst, src, len, len);
512 }
513 
edma3_fill(unsigned long edma3_base_addr,unsigned int edma_slot_num,void * dst,u8 val,size_t len)514 void edma3_fill(unsigned long edma3_base_addr, unsigned int edma_slot_num,
515 		void *dst, u8 val, size_t len)
516 {
517 	__edma3_fill(edma3_base_addr, edma_slot_num, dst, val, len);
518 }
519 
520 #else
521 
ti_edma3_transfer(struct udevice * dev,int direction,void * dst,void * src,size_t len)522 static int ti_edma3_transfer(struct udevice *dev, int direction, void *dst,
523 			     void *src, size_t len)
524 {
525 	struct ti_edma3_priv *priv = dev_get_priv(dev);
526 
527 	/* enable edma3 clocks */
528 	enable_edma3_clocks();
529 
530 	switch (direction) {
531 	case DMA_MEM_TO_MEM:
532 		__edma3_transfer(priv->base, 1, dst, src, len, len);
533 		break;
534 	default:
535 		pr_err("Transfer type not implemented in DMA driver\n");
536 		break;
537 	}
538 
539 	/* disable edma3 clocks */
540 	disable_edma3_clocks();
541 
542 	return 0;
543 }
544 
ti_edma3_of_to_plat(struct udevice * dev)545 static int ti_edma3_of_to_plat(struct udevice *dev)
546 {
547 	struct ti_edma3_priv *priv = dev_get_priv(dev);
548 
549 	priv->base = dev_read_addr(dev);
550 
551 	return 0;
552 }
553 
ti_edma3_probe(struct udevice * dev)554 static int ti_edma3_probe(struct udevice *dev)
555 {
556 	struct dma_dev_priv *uc_priv = dev_get_uclass_priv(dev);
557 
558 	uc_priv->supported = DMA_SUPPORTS_MEM_TO_MEM;
559 
560 	return 0;
561 }
562 
563 static const struct dma_ops ti_edma3_ops = {
564 	.transfer	= ti_edma3_transfer,
565 };
566 
567 static const struct udevice_id ti_edma3_ids[] = {
568 	{ .compatible = "ti,edma3" },
569 	{ }
570 };
571 
572 U_BOOT_DRIVER(ti_edma3) = {
573 	.name	= "ti_edma3",
574 	.id	= UCLASS_DMA,
575 	.of_match = ti_edma3_ids,
576 	.ops	= &ti_edma3_ops,
577 	.of_to_plat = ti_edma3_of_to_plat,
578 	.probe	= ti_edma3_probe,
579 	.priv_auto	= sizeof(struct ti_edma3_priv),
580 };
581 #endif /* CONFIG_DMA */
582