xref: /linux/drivers/mtd/nand/raw/stm32_fmc2_nand.c (revision 9a6b55ac)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) STMicroelectronics 2018
4  * Author: Christophe Kerello <christophe.kerello@st.com>
5  */
6 
7 #include <linux/clk.h>
8 #include <linux/dmaengine.h>
9 #include <linux/dma-mapping.h>
10 #include <linux/errno.h>
11 #include <linux/interrupt.h>
12 #include <linux/iopoll.h>
13 #include <linux/module.h>
14 #include <linux/mtd/rawnand.h>
15 #include <linux/pinctrl/consumer.h>
16 #include <linux/platform_device.h>
17 #include <linux/reset.h>
18 
19 /* Bad block marker length */
20 #define FMC2_BBM_LEN			2
21 
22 /* ECC step size */
23 #define FMC2_ECC_STEP_SIZE		512
24 
25 /* BCHDSRx registers length */
26 #define FMC2_BCHDSRS_LEN		20
27 
28 /* HECCR length */
29 #define FMC2_HECCR_LEN			4
30 
31 /* Max requests done for a 8k nand page size */
32 #define FMC2_MAX_SG			16
33 
34 /* Max chip enable */
35 #define FMC2_MAX_CE			2
36 
37 /* Max ECC buffer length */
38 #define FMC2_MAX_ECC_BUF_LEN		(FMC2_BCHDSRS_LEN * FMC2_MAX_SG)
39 
40 #define FMC2_TIMEOUT_MS			1000
41 
42 /* Timings */
43 #define FMC2_THIZ			1
44 #define FMC2_TIO			8000
45 #define FMC2_TSYNC			3000
46 #define FMC2_PCR_TIMING_MASK		0xf
47 #define FMC2_PMEM_PATT_TIMING_MASK	0xff
48 
49 /* FMC2 Controller Registers */
50 #define FMC2_BCR1			0x0
51 #define FMC2_PCR			0x80
52 #define FMC2_SR				0x84
53 #define FMC2_PMEM			0x88
54 #define FMC2_PATT			0x8c
55 #define FMC2_HECCR			0x94
56 #define FMC2_CSQCR			0x200
57 #define FMC2_CSQCFGR1			0x204
58 #define FMC2_CSQCFGR2			0x208
59 #define FMC2_CSQCFGR3			0x20c
60 #define FMC2_CSQAR1			0x210
61 #define FMC2_CSQAR2			0x214
62 #define FMC2_CSQIER			0x220
63 #define FMC2_CSQISR			0x224
64 #define FMC2_CSQICR			0x228
65 #define FMC2_CSQEMSR			0x230
66 #define FMC2_BCHIER			0x250
67 #define FMC2_BCHISR			0x254
68 #define FMC2_BCHICR			0x258
69 #define FMC2_BCHPBR1			0x260
70 #define FMC2_BCHPBR2			0x264
71 #define FMC2_BCHPBR3			0x268
72 #define FMC2_BCHPBR4			0x26c
73 #define FMC2_BCHDSR0			0x27c
74 #define FMC2_BCHDSR1			0x280
75 #define FMC2_BCHDSR2			0x284
76 #define FMC2_BCHDSR3			0x288
77 #define FMC2_BCHDSR4			0x28c
78 
79 /* Register: FMC2_BCR1 */
80 #define FMC2_BCR1_FMC2EN		BIT(31)
81 
82 /* Register: FMC2_PCR */
83 #define FMC2_PCR_PWAITEN		BIT(1)
84 #define FMC2_PCR_PBKEN			BIT(2)
85 #define FMC2_PCR_PWID_MASK		GENMASK(5, 4)
86 #define FMC2_PCR_PWID(x)		(((x) & 0x3) << 4)
87 #define FMC2_PCR_PWID_BUSWIDTH_8	0
88 #define FMC2_PCR_PWID_BUSWIDTH_16	1
89 #define FMC2_PCR_ECCEN			BIT(6)
90 #define FMC2_PCR_ECCALG			BIT(8)
91 #define FMC2_PCR_TCLR_MASK		GENMASK(12, 9)
92 #define FMC2_PCR_TCLR(x)		(((x) & 0xf) << 9)
93 #define FMC2_PCR_TCLR_DEFAULT		0xf
94 #define FMC2_PCR_TAR_MASK		GENMASK(16, 13)
95 #define FMC2_PCR_TAR(x)			(((x) & 0xf) << 13)
96 #define FMC2_PCR_TAR_DEFAULT		0xf
97 #define FMC2_PCR_ECCSS_MASK		GENMASK(19, 17)
98 #define FMC2_PCR_ECCSS(x)		(((x) & 0x7) << 17)
99 #define FMC2_PCR_ECCSS_512		1
100 #define FMC2_PCR_ECCSS_2048		3
101 #define FMC2_PCR_BCHECC			BIT(24)
102 #define FMC2_PCR_WEN			BIT(25)
103 
104 /* Register: FMC2_SR */
105 #define FMC2_SR_NWRF			BIT(6)
106 
107 /* Register: FMC2_PMEM */
108 #define FMC2_PMEM_MEMSET(x)		(((x) & 0xff) << 0)
109 #define FMC2_PMEM_MEMWAIT(x)		(((x) & 0xff) << 8)
110 #define FMC2_PMEM_MEMHOLD(x)		(((x) & 0xff) << 16)
111 #define FMC2_PMEM_MEMHIZ(x)		(((x) & 0xff) << 24)
112 #define FMC2_PMEM_DEFAULT		0x0a0a0a0a
113 
114 /* Register: FMC2_PATT */
115 #define FMC2_PATT_ATTSET(x)		(((x) & 0xff) << 0)
116 #define FMC2_PATT_ATTWAIT(x)		(((x) & 0xff) << 8)
117 #define FMC2_PATT_ATTHOLD(x)		(((x) & 0xff) << 16)
118 #define FMC2_PATT_ATTHIZ(x)		(((x) & 0xff) << 24)
119 #define FMC2_PATT_DEFAULT		0x0a0a0a0a
120 
121 /* Register: FMC2_CSQCR */
122 #define FMC2_CSQCR_CSQSTART		BIT(0)
123 
124 /* Register: FMC2_CSQCFGR1 */
125 #define FMC2_CSQCFGR1_CMD2EN		BIT(1)
126 #define FMC2_CSQCFGR1_DMADEN		BIT(2)
127 #define FMC2_CSQCFGR1_ACYNBR(x)		(((x) & 0x7) << 4)
128 #define FMC2_CSQCFGR1_CMD1(x)		(((x) & 0xff) << 8)
129 #define FMC2_CSQCFGR1_CMD2(x)		(((x) & 0xff) << 16)
130 #define FMC2_CSQCFGR1_CMD1T		BIT(24)
131 #define FMC2_CSQCFGR1_CMD2T		BIT(25)
132 
133 /* Register: FMC2_CSQCFGR2 */
134 #define FMC2_CSQCFGR2_SQSDTEN		BIT(0)
135 #define FMC2_CSQCFGR2_RCMD2EN		BIT(1)
136 #define FMC2_CSQCFGR2_DMASEN		BIT(2)
137 #define FMC2_CSQCFGR2_RCMD1(x)		(((x) & 0xff) << 8)
138 #define FMC2_CSQCFGR2_RCMD2(x)		(((x) & 0xff) << 16)
139 #define FMC2_CSQCFGR2_RCMD1T		BIT(24)
140 #define FMC2_CSQCFGR2_RCMD2T		BIT(25)
141 
142 /* Register: FMC2_CSQCFGR3 */
143 #define FMC2_CSQCFGR3_SNBR(x)		(((x) & 0x1f) << 8)
144 #define FMC2_CSQCFGR3_AC1T		BIT(16)
145 #define FMC2_CSQCFGR3_AC2T		BIT(17)
146 #define FMC2_CSQCFGR3_AC3T		BIT(18)
147 #define FMC2_CSQCFGR3_AC4T		BIT(19)
148 #define FMC2_CSQCFGR3_AC5T		BIT(20)
149 #define FMC2_CSQCFGR3_SDT		BIT(21)
150 #define FMC2_CSQCFGR3_RAC1T		BIT(22)
151 #define FMC2_CSQCFGR3_RAC2T		BIT(23)
152 
153 /* Register: FMC2_CSQCAR1 */
154 #define FMC2_CSQCAR1_ADDC1(x)		(((x) & 0xff) << 0)
155 #define FMC2_CSQCAR1_ADDC2(x)		(((x) & 0xff) << 8)
156 #define FMC2_CSQCAR1_ADDC3(x)		(((x) & 0xff) << 16)
157 #define FMC2_CSQCAR1_ADDC4(x)		(((x) & 0xff) << 24)
158 
159 /* Register: FMC2_CSQCAR2 */
160 #define FMC2_CSQCAR2_ADDC5(x)		(((x) & 0xff) << 0)
161 #define FMC2_CSQCAR2_NANDCEN(x)		(((x) & 0x3) << 10)
162 #define FMC2_CSQCAR2_SAO(x)		(((x) & 0xffff) << 16)
163 
164 /* Register: FMC2_CSQIER */
165 #define FMC2_CSQIER_TCIE		BIT(0)
166 
167 /* Register: FMC2_CSQICR */
168 #define FMC2_CSQICR_CLEAR_IRQ		GENMASK(4, 0)
169 
170 /* Register: FMC2_CSQEMSR */
171 #define FMC2_CSQEMSR_SEM		GENMASK(15, 0)
172 
173 /* Register: FMC2_BCHIER */
174 #define FMC2_BCHIER_DERIE		BIT(1)
175 #define FMC2_BCHIER_EPBRIE		BIT(4)
176 
177 /* Register: FMC2_BCHICR */
178 #define FMC2_BCHICR_CLEAR_IRQ		GENMASK(4, 0)
179 
180 /* Register: FMC2_BCHDSR0 */
181 #define FMC2_BCHDSR0_DUE		BIT(0)
182 #define FMC2_BCHDSR0_DEF		BIT(1)
183 #define FMC2_BCHDSR0_DEN_MASK		GENMASK(7, 4)
184 #define FMC2_BCHDSR0_DEN_SHIFT		4
185 
186 /* Register: FMC2_BCHDSR1 */
187 #define FMC2_BCHDSR1_EBP1_MASK		GENMASK(12, 0)
188 #define FMC2_BCHDSR1_EBP2_MASK		GENMASK(28, 16)
189 #define FMC2_BCHDSR1_EBP2_SHIFT		16
190 
191 /* Register: FMC2_BCHDSR2 */
192 #define FMC2_BCHDSR2_EBP3_MASK		GENMASK(12, 0)
193 #define FMC2_BCHDSR2_EBP4_MASK		GENMASK(28, 16)
194 #define FMC2_BCHDSR2_EBP4_SHIFT		16
195 
196 /* Register: FMC2_BCHDSR3 */
197 #define FMC2_BCHDSR3_EBP5_MASK		GENMASK(12, 0)
198 #define FMC2_BCHDSR3_EBP6_MASK		GENMASK(28, 16)
199 #define FMC2_BCHDSR3_EBP6_SHIFT		16
200 
201 /* Register: FMC2_BCHDSR4 */
202 #define FMC2_BCHDSR4_EBP7_MASK		GENMASK(12, 0)
203 #define FMC2_BCHDSR4_EBP8_MASK		GENMASK(28, 16)
204 #define FMC2_BCHDSR4_EBP8_SHIFT		16
205 
206 enum stm32_fmc2_ecc {
207 	FMC2_ECC_HAM = 1,
208 	FMC2_ECC_BCH4 = 4,
209 	FMC2_ECC_BCH8 = 8
210 };
211 
212 enum stm32_fmc2_irq_state {
213 	FMC2_IRQ_UNKNOWN = 0,
214 	FMC2_IRQ_BCH,
215 	FMC2_IRQ_SEQ
216 };
217 
218 struct stm32_fmc2_timings {
219 	u8 tclr;
220 	u8 tar;
221 	u8 thiz;
222 	u8 twait;
223 	u8 thold_mem;
224 	u8 tset_mem;
225 	u8 thold_att;
226 	u8 tset_att;
227 };
228 
229 struct stm32_fmc2_nand {
230 	struct nand_chip chip;
231 	struct stm32_fmc2_timings timings;
232 	int ncs;
233 	int cs_used[FMC2_MAX_CE];
234 };
235 
236 static inline struct stm32_fmc2_nand *to_fmc2_nand(struct nand_chip *chip)
237 {
238 	return container_of(chip, struct stm32_fmc2_nand, chip);
239 }
240 
241 struct stm32_fmc2_nfc {
242 	struct nand_controller base;
243 	struct stm32_fmc2_nand nand;
244 	struct device *dev;
245 	void __iomem *io_base;
246 	void __iomem *data_base[FMC2_MAX_CE];
247 	void __iomem *cmd_base[FMC2_MAX_CE];
248 	void __iomem *addr_base[FMC2_MAX_CE];
249 	phys_addr_t io_phys_addr;
250 	phys_addr_t data_phys_addr[FMC2_MAX_CE];
251 	struct clk *clk;
252 	u8 irq_state;
253 
254 	struct dma_chan *dma_tx_ch;
255 	struct dma_chan *dma_rx_ch;
256 	struct dma_chan *dma_ecc_ch;
257 	struct sg_table dma_data_sg;
258 	struct sg_table dma_ecc_sg;
259 	u8 *ecc_buf;
260 	int dma_ecc_len;
261 
262 	struct completion complete;
263 	struct completion dma_data_complete;
264 	struct completion dma_ecc_complete;
265 
266 	u8 cs_assigned;
267 	int cs_sel;
268 };
269 
270 static inline struct stm32_fmc2_nfc *to_stm32_nfc(struct nand_controller *base)
271 {
272 	return container_of(base, struct stm32_fmc2_nfc, base);
273 }
274 
275 /* Timings configuration */
276 static void stm32_fmc2_timings_init(struct nand_chip *chip)
277 {
278 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
279 	struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
280 	struct stm32_fmc2_timings *timings = &nand->timings;
281 	u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
282 	u32 pmem, patt;
283 
284 	/* Set tclr/tar timings */
285 	pcr &= ~FMC2_PCR_TCLR_MASK;
286 	pcr |= FMC2_PCR_TCLR(timings->tclr);
287 	pcr &= ~FMC2_PCR_TAR_MASK;
288 	pcr |= FMC2_PCR_TAR(timings->tar);
289 
290 	/* Set tset/twait/thold/thiz timings in common bank */
291 	pmem = FMC2_PMEM_MEMSET(timings->tset_mem);
292 	pmem |= FMC2_PMEM_MEMWAIT(timings->twait);
293 	pmem |= FMC2_PMEM_MEMHOLD(timings->thold_mem);
294 	pmem |= FMC2_PMEM_MEMHIZ(timings->thiz);
295 
296 	/* Set tset/twait/thold/thiz timings in attribut bank */
297 	patt = FMC2_PATT_ATTSET(timings->tset_att);
298 	patt |= FMC2_PATT_ATTWAIT(timings->twait);
299 	patt |= FMC2_PATT_ATTHOLD(timings->thold_att);
300 	patt |= FMC2_PATT_ATTHIZ(timings->thiz);
301 
302 	writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
303 	writel_relaxed(pmem, fmc2->io_base + FMC2_PMEM);
304 	writel_relaxed(patt, fmc2->io_base + FMC2_PATT);
305 }
306 
307 /* Controller configuration */
308 static void stm32_fmc2_setup(struct nand_chip *chip)
309 {
310 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
311 	u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
312 
313 	/* Configure ECC algorithm (default configuration is Hamming) */
314 	pcr &= ~FMC2_PCR_ECCALG;
315 	pcr &= ~FMC2_PCR_BCHECC;
316 	if (chip->ecc.strength == FMC2_ECC_BCH8) {
317 		pcr |= FMC2_PCR_ECCALG;
318 		pcr |= FMC2_PCR_BCHECC;
319 	} else if (chip->ecc.strength == FMC2_ECC_BCH4) {
320 		pcr |= FMC2_PCR_ECCALG;
321 	}
322 
323 	/* Set buswidth */
324 	pcr &= ~FMC2_PCR_PWID_MASK;
325 	if (chip->options & NAND_BUSWIDTH_16)
326 		pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
327 
328 	/* Set ECC sector size */
329 	pcr &= ~FMC2_PCR_ECCSS_MASK;
330 	pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_512);
331 
332 	writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
333 }
334 
335 /* Select target */
336 static int stm32_fmc2_select_chip(struct nand_chip *chip, int chipnr)
337 {
338 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
339 	struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
340 	struct dma_slave_config dma_cfg;
341 	int ret;
342 
343 	if (nand->cs_used[chipnr] == fmc2->cs_sel)
344 		return 0;
345 
346 	fmc2->cs_sel = nand->cs_used[chipnr];
347 
348 	/* FMC2 setup routine */
349 	stm32_fmc2_setup(chip);
350 
351 	/* Apply timings */
352 	stm32_fmc2_timings_init(chip);
353 
354 	if (fmc2->dma_tx_ch && fmc2->dma_rx_ch) {
355 		memset(&dma_cfg, 0, sizeof(dma_cfg));
356 		dma_cfg.src_addr = fmc2->data_phys_addr[fmc2->cs_sel];
357 		dma_cfg.dst_addr = fmc2->data_phys_addr[fmc2->cs_sel];
358 		dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
359 		dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
360 		dma_cfg.src_maxburst = 32;
361 		dma_cfg.dst_maxburst = 32;
362 
363 		ret = dmaengine_slave_config(fmc2->dma_tx_ch, &dma_cfg);
364 		if (ret) {
365 			dev_err(fmc2->dev, "tx DMA engine slave config failed\n");
366 			return ret;
367 		}
368 
369 		ret = dmaengine_slave_config(fmc2->dma_rx_ch, &dma_cfg);
370 		if (ret) {
371 			dev_err(fmc2->dev, "rx DMA engine slave config failed\n");
372 			return ret;
373 		}
374 	}
375 
376 	if (fmc2->dma_ecc_ch) {
377 		/*
378 		 * Hamming: we read HECCR register
379 		 * BCH4/BCH8: we read BCHDSRSx registers
380 		 */
381 		memset(&dma_cfg, 0, sizeof(dma_cfg));
382 		dma_cfg.src_addr = fmc2->io_phys_addr;
383 		dma_cfg.src_addr += chip->ecc.strength == FMC2_ECC_HAM ?
384 				    FMC2_HECCR : FMC2_BCHDSR0;
385 		dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
386 
387 		ret = dmaengine_slave_config(fmc2->dma_ecc_ch, &dma_cfg);
388 		if (ret) {
389 			dev_err(fmc2->dev, "ECC DMA engine slave config failed\n");
390 			return ret;
391 		}
392 
393 		/* Calculate ECC length needed for one sector */
394 		fmc2->dma_ecc_len = chip->ecc.strength == FMC2_ECC_HAM ?
395 				    FMC2_HECCR_LEN : FMC2_BCHDSRS_LEN;
396 	}
397 
398 	return 0;
399 }
400 
401 /* Set bus width to 16-bit or 8-bit */
402 static void stm32_fmc2_set_buswidth_16(struct stm32_fmc2_nfc *fmc2, bool set)
403 {
404 	u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
405 
406 	pcr &= ~FMC2_PCR_PWID_MASK;
407 	if (set)
408 		pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
409 	writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
410 }
411 
412 /* Enable/disable ECC */
413 static void stm32_fmc2_set_ecc(struct stm32_fmc2_nfc *fmc2, bool enable)
414 {
415 	u32 pcr = readl(fmc2->io_base + FMC2_PCR);
416 
417 	pcr &= ~FMC2_PCR_ECCEN;
418 	if (enable)
419 		pcr |= FMC2_PCR_ECCEN;
420 	writel(pcr, fmc2->io_base + FMC2_PCR);
421 }
422 
423 /* Enable irq sources in case of the sequencer is used */
424 static inline void stm32_fmc2_enable_seq_irq(struct stm32_fmc2_nfc *fmc2)
425 {
426 	u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
427 
428 	csqier |= FMC2_CSQIER_TCIE;
429 
430 	fmc2->irq_state = FMC2_IRQ_SEQ;
431 
432 	writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
433 }
434 
435 /* Disable irq sources in case of the sequencer is used */
436 static inline void stm32_fmc2_disable_seq_irq(struct stm32_fmc2_nfc *fmc2)
437 {
438 	u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
439 
440 	csqier &= ~FMC2_CSQIER_TCIE;
441 
442 	writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
443 
444 	fmc2->irq_state = FMC2_IRQ_UNKNOWN;
445 }
446 
447 /* Clear irq sources in case of the sequencer is used */
448 static inline void stm32_fmc2_clear_seq_irq(struct stm32_fmc2_nfc *fmc2)
449 {
450 	writel_relaxed(FMC2_CSQICR_CLEAR_IRQ, fmc2->io_base + FMC2_CSQICR);
451 }
452 
453 /* Enable irq sources in case of bch is used */
454 static inline void stm32_fmc2_enable_bch_irq(struct stm32_fmc2_nfc *fmc2,
455 					     int mode)
456 {
457 	u32 bchier = readl_relaxed(fmc2->io_base + FMC2_BCHIER);
458 
459 	if (mode == NAND_ECC_WRITE)
460 		bchier |= FMC2_BCHIER_EPBRIE;
461 	else
462 		bchier |= FMC2_BCHIER_DERIE;
463 
464 	fmc2->irq_state = FMC2_IRQ_BCH;
465 
466 	writel_relaxed(bchier, fmc2->io_base + FMC2_BCHIER);
467 }
468 
469 /* Disable irq sources in case of bch is used */
470 static inline void stm32_fmc2_disable_bch_irq(struct stm32_fmc2_nfc *fmc2)
471 {
472 	u32 bchier = readl_relaxed(fmc2->io_base + FMC2_BCHIER);
473 
474 	bchier &= ~FMC2_BCHIER_DERIE;
475 	bchier &= ~FMC2_BCHIER_EPBRIE;
476 
477 	writel_relaxed(bchier, fmc2->io_base + FMC2_BCHIER);
478 
479 	fmc2->irq_state = FMC2_IRQ_UNKNOWN;
480 }
481 
482 /* Clear irq sources in case of bch is used */
483 static inline void stm32_fmc2_clear_bch_irq(struct stm32_fmc2_nfc *fmc2)
484 {
485 	writel_relaxed(FMC2_BCHICR_CLEAR_IRQ, fmc2->io_base + FMC2_BCHICR);
486 }
487 
488 /*
489  * Enable ECC logic and reset syndrome/parity bits previously calculated
490  * Syndrome/parity bits is cleared by setting the ECCEN bit to 0
491  */
492 static void stm32_fmc2_hwctl(struct nand_chip *chip, int mode)
493 {
494 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
495 
496 	stm32_fmc2_set_ecc(fmc2, false);
497 
498 	if (chip->ecc.strength != FMC2_ECC_HAM) {
499 		u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
500 
501 		if (mode == NAND_ECC_WRITE)
502 			pcr |= FMC2_PCR_WEN;
503 		else
504 			pcr &= ~FMC2_PCR_WEN;
505 		writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
506 
507 		reinit_completion(&fmc2->complete);
508 		stm32_fmc2_clear_bch_irq(fmc2);
509 		stm32_fmc2_enable_bch_irq(fmc2, mode);
510 	}
511 
512 	stm32_fmc2_set_ecc(fmc2, true);
513 }
514 
515 /*
516  * ECC Hamming calculation
517  * ECC is 3 bytes for 512 bytes of data (supports error correction up to
518  * max of 1-bit)
519  */
520 static inline void stm32_fmc2_ham_set_ecc(const u32 ecc_sta, u8 *ecc)
521 {
522 	ecc[0] = ecc_sta;
523 	ecc[1] = ecc_sta >> 8;
524 	ecc[2] = ecc_sta >> 16;
525 }
526 
527 static int stm32_fmc2_ham_calculate(struct nand_chip *chip, const u8 *data,
528 				    u8 *ecc)
529 {
530 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
531 	u32 sr, heccr;
532 	int ret;
533 
534 	ret = readl_relaxed_poll_timeout(fmc2->io_base + FMC2_SR,
535 					 sr, sr & FMC2_SR_NWRF, 10,
536 					 FMC2_TIMEOUT_MS);
537 	if (ret) {
538 		dev_err(fmc2->dev, "ham timeout\n");
539 		return ret;
540 	}
541 
542 	heccr = readl_relaxed(fmc2->io_base + FMC2_HECCR);
543 
544 	stm32_fmc2_ham_set_ecc(heccr, ecc);
545 
546 	/* Disable ECC */
547 	stm32_fmc2_set_ecc(fmc2, false);
548 
549 	return 0;
550 }
551 
552 static int stm32_fmc2_ham_correct(struct nand_chip *chip, u8 *dat,
553 				  u8 *read_ecc, u8 *calc_ecc)
554 {
555 	u8 bit_position = 0, b0, b1, b2;
556 	u32 byte_addr = 0, b;
557 	u32 i, shifting = 1;
558 
559 	/* Indicate which bit and byte is faulty (if any) */
560 	b0 = read_ecc[0] ^ calc_ecc[0];
561 	b1 = read_ecc[1] ^ calc_ecc[1];
562 	b2 = read_ecc[2] ^ calc_ecc[2];
563 	b = b0 | (b1 << 8) | (b2 << 16);
564 
565 	/* No errors */
566 	if (likely(!b))
567 		return 0;
568 
569 	/* Calculate bit position */
570 	for (i = 0; i < 3; i++) {
571 		switch (b % 4) {
572 		case 2:
573 			bit_position += shifting;
574 		case 1:
575 			break;
576 		default:
577 			return -EBADMSG;
578 		}
579 		shifting <<= 1;
580 		b >>= 2;
581 	}
582 
583 	/* Calculate byte position */
584 	shifting = 1;
585 	for (i = 0; i < 9; i++) {
586 		switch (b % 4) {
587 		case 2:
588 			byte_addr += shifting;
589 		case 1:
590 			break;
591 		default:
592 			return -EBADMSG;
593 		}
594 		shifting <<= 1;
595 		b >>= 2;
596 	}
597 
598 	/* Flip the bit */
599 	dat[byte_addr] ^= (1 << bit_position);
600 
601 	return 1;
602 }
603 
604 /*
605  * ECC BCH calculation and correction
606  * ECC is 7/13 bytes for 512 bytes of data (supports error correction up to
607  * max of 4-bit/8-bit)
608  */
609 static int stm32_fmc2_bch_calculate(struct nand_chip *chip, const u8 *data,
610 				    u8 *ecc)
611 {
612 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
613 	u32 bchpbr;
614 
615 	/* Wait until the BCH code is ready */
616 	if (!wait_for_completion_timeout(&fmc2->complete,
617 					 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
618 		dev_err(fmc2->dev, "bch timeout\n");
619 		stm32_fmc2_disable_bch_irq(fmc2);
620 		return -ETIMEDOUT;
621 	}
622 
623 	/* Read parity bits */
624 	bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR1);
625 	ecc[0] = bchpbr;
626 	ecc[1] = bchpbr >> 8;
627 	ecc[2] = bchpbr >> 16;
628 	ecc[3] = bchpbr >> 24;
629 
630 	bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR2);
631 	ecc[4] = bchpbr;
632 	ecc[5] = bchpbr >> 8;
633 	ecc[6] = bchpbr >> 16;
634 
635 	if (chip->ecc.strength == FMC2_ECC_BCH8) {
636 		ecc[7] = bchpbr >> 24;
637 
638 		bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR3);
639 		ecc[8] = bchpbr;
640 		ecc[9] = bchpbr >> 8;
641 		ecc[10] = bchpbr >> 16;
642 		ecc[11] = bchpbr >> 24;
643 
644 		bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR4);
645 		ecc[12] = bchpbr;
646 	}
647 
648 	/* Disable ECC */
649 	stm32_fmc2_set_ecc(fmc2, false);
650 
651 	return 0;
652 }
653 
654 /* BCH algorithm correction */
655 static int stm32_fmc2_bch_decode(int eccsize, u8 *dat, u32 *ecc_sta)
656 {
657 	u32 bchdsr0 = ecc_sta[0];
658 	u32 bchdsr1 = ecc_sta[1];
659 	u32 bchdsr2 = ecc_sta[2];
660 	u32 bchdsr3 = ecc_sta[3];
661 	u32 bchdsr4 = ecc_sta[4];
662 	u16 pos[8];
663 	int i, den;
664 	unsigned int nb_errs = 0;
665 
666 	/* No errors found */
667 	if (likely(!(bchdsr0 & FMC2_BCHDSR0_DEF)))
668 		return 0;
669 
670 	/* Too many errors detected */
671 	if (unlikely(bchdsr0 & FMC2_BCHDSR0_DUE))
672 		return -EBADMSG;
673 
674 	pos[0] = bchdsr1 & FMC2_BCHDSR1_EBP1_MASK;
675 	pos[1] = (bchdsr1 & FMC2_BCHDSR1_EBP2_MASK) >> FMC2_BCHDSR1_EBP2_SHIFT;
676 	pos[2] = bchdsr2 & FMC2_BCHDSR2_EBP3_MASK;
677 	pos[3] = (bchdsr2 & FMC2_BCHDSR2_EBP4_MASK) >> FMC2_BCHDSR2_EBP4_SHIFT;
678 	pos[4] = bchdsr3 & FMC2_BCHDSR3_EBP5_MASK;
679 	pos[5] = (bchdsr3 & FMC2_BCHDSR3_EBP6_MASK) >> FMC2_BCHDSR3_EBP6_SHIFT;
680 	pos[6] = bchdsr4 & FMC2_BCHDSR4_EBP7_MASK;
681 	pos[7] = (bchdsr4 & FMC2_BCHDSR4_EBP8_MASK) >> FMC2_BCHDSR4_EBP8_SHIFT;
682 
683 	den = (bchdsr0 & FMC2_BCHDSR0_DEN_MASK) >> FMC2_BCHDSR0_DEN_SHIFT;
684 	for (i = 0; i < den; i++) {
685 		if (pos[i] < eccsize * 8) {
686 			change_bit(pos[i], (unsigned long *)dat);
687 			nb_errs++;
688 		}
689 	}
690 
691 	return nb_errs;
692 }
693 
694 static int stm32_fmc2_bch_correct(struct nand_chip *chip, u8 *dat,
695 				  u8 *read_ecc, u8 *calc_ecc)
696 {
697 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
698 	u32 ecc_sta[5];
699 
700 	/* Wait until the decoding error is ready */
701 	if (!wait_for_completion_timeout(&fmc2->complete,
702 					 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
703 		dev_err(fmc2->dev, "bch timeout\n");
704 		stm32_fmc2_disable_bch_irq(fmc2);
705 		return -ETIMEDOUT;
706 	}
707 
708 	ecc_sta[0] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR0);
709 	ecc_sta[1] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR1);
710 	ecc_sta[2] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR2);
711 	ecc_sta[3] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR3);
712 	ecc_sta[4] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR4);
713 
714 	/* Disable ECC */
715 	stm32_fmc2_set_ecc(fmc2, false);
716 
717 	return stm32_fmc2_bch_decode(chip->ecc.size, dat, ecc_sta);
718 }
719 
720 static int stm32_fmc2_read_page(struct nand_chip *chip, u8 *buf,
721 				int oob_required, int page)
722 {
723 	struct mtd_info *mtd = nand_to_mtd(chip);
724 	int ret, i, s, stat, eccsize = chip->ecc.size;
725 	int eccbytes = chip->ecc.bytes;
726 	int eccsteps = chip->ecc.steps;
727 	int eccstrength = chip->ecc.strength;
728 	u8 *p = buf;
729 	u8 *ecc_calc = chip->ecc.calc_buf;
730 	u8 *ecc_code = chip->ecc.code_buf;
731 	unsigned int max_bitflips = 0;
732 
733 	ret = nand_read_page_op(chip, page, 0, NULL, 0);
734 	if (ret)
735 		return ret;
736 
737 	for (i = mtd->writesize + FMC2_BBM_LEN, s = 0; s < eccsteps;
738 	     s++, i += eccbytes, p += eccsize) {
739 		chip->ecc.hwctl(chip, NAND_ECC_READ);
740 
741 		/* Read the nand page sector (512 bytes) */
742 		ret = nand_change_read_column_op(chip, s * eccsize, p,
743 						 eccsize, false);
744 		if (ret)
745 			return ret;
746 
747 		/* Read the corresponding ECC bytes */
748 		ret = nand_change_read_column_op(chip, i, ecc_code,
749 						 eccbytes, false);
750 		if (ret)
751 			return ret;
752 
753 		/* Correct the data */
754 		stat = chip->ecc.correct(chip, p, ecc_code, ecc_calc);
755 		if (stat == -EBADMSG)
756 			/* Check for empty pages with bitflips */
757 			stat = nand_check_erased_ecc_chunk(p, eccsize,
758 							   ecc_code, eccbytes,
759 							   NULL, 0,
760 							   eccstrength);
761 
762 		if (stat < 0) {
763 			mtd->ecc_stats.failed++;
764 		} else {
765 			mtd->ecc_stats.corrected += stat;
766 			max_bitflips = max_t(unsigned int, max_bitflips, stat);
767 		}
768 	}
769 
770 	/* Read oob */
771 	if (oob_required) {
772 		ret = nand_change_read_column_op(chip, mtd->writesize,
773 						 chip->oob_poi, mtd->oobsize,
774 						 false);
775 		if (ret)
776 			return ret;
777 	}
778 
779 	return max_bitflips;
780 }
781 
782 /* Sequencer read/write configuration */
783 static void stm32_fmc2_rw_page_init(struct nand_chip *chip, int page,
784 				    int raw, bool write_data)
785 {
786 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
787 	struct mtd_info *mtd = nand_to_mtd(chip);
788 	u32 csqcfgr1, csqcfgr2, csqcfgr3;
789 	u32 csqar1, csqar2;
790 	u32 ecc_offset = mtd->writesize + FMC2_BBM_LEN;
791 	u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
792 
793 	if (write_data)
794 		pcr |= FMC2_PCR_WEN;
795 	else
796 		pcr &= ~FMC2_PCR_WEN;
797 	writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
798 
799 	/*
800 	 * - Set Program Page/Page Read command
801 	 * - Enable DMA request data
802 	 * - Set timings
803 	 */
804 	csqcfgr1 = FMC2_CSQCFGR1_DMADEN | FMC2_CSQCFGR1_CMD1T;
805 	if (write_data)
806 		csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_SEQIN);
807 	else
808 		csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_READ0) |
809 			    FMC2_CSQCFGR1_CMD2EN |
810 			    FMC2_CSQCFGR1_CMD2(NAND_CMD_READSTART) |
811 			    FMC2_CSQCFGR1_CMD2T;
812 
813 	/*
814 	 * - Set Random Data Input/Random Data Read command
815 	 * - Enable the sequencer to access the Spare data area
816 	 * - Enable  DMA request status decoding for read
817 	 * - Set timings
818 	 */
819 	if (write_data)
820 		csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDIN);
821 	else
822 		csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDOUT) |
823 			   FMC2_CSQCFGR2_RCMD2EN |
824 			   FMC2_CSQCFGR2_RCMD2(NAND_CMD_RNDOUTSTART) |
825 			   FMC2_CSQCFGR2_RCMD1T |
826 			   FMC2_CSQCFGR2_RCMD2T;
827 	if (!raw) {
828 		csqcfgr2 |= write_data ? 0 : FMC2_CSQCFGR2_DMASEN;
829 		csqcfgr2 |= FMC2_CSQCFGR2_SQSDTEN;
830 	}
831 
832 	/*
833 	 * - Set the number of sectors to be written
834 	 * - Set timings
835 	 */
836 	csqcfgr3 = FMC2_CSQCFGR3_SNBR(chip->ecc.steps - 1);
837 	if (write_data) {
838 		csqcfgr3 |= FMC2_CSQCFGR3_RAC2T;
839 		if (chip->options & NAND_ROW_ADDR_3)
840 			csqcfgr3 |= FMC2_CSQCFGR3_AC5T;
841 		else
842 			csqcfgr3 |= FMC2_CSQCFGR3_AC4T;
843 	}
844 
845 	/*
846 	 * Set the fourth first address cycles
847 	 * Byte 1 and byte 2 => column, we start at 0x0
848 	 * Byte 3 and byte 4 => page
849 	 */
850 	csqar1 = FMC2_CSQCAR1_ADDC3(page);
851 	csqar1 |= FMC2_CSQCAR1_ADDC4(page >> 8);
852 
853 	/*
854 	 * - Set chip enable number
855 	 * - Set ECC byte offset in the spare area
856 	 * - Calculate the number of address cycles to be issued
857 	 * - Set byte 5 of address cycle if needed
858 	 */
859 	csqar2 = FMC2_CSQCAR2_NANDCEN(fmc2->cs_sel);
860 	if (chip->options & NAND_BUSWIDTH_16)
861 		csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset >> 1);
862 	else
863 		csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset);
864 	if (chip->options & NAND_ROW_ADDR_3) {
865 		csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(5);
866 		csqar2 |= FMC2_CSQCAR2_ADDC5(page >> 16);
867 	} else {
868 		csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(4);
869 	}
870 
871 	writel_relaxed(csqcfgr1, fmc2->io_base + FMC2_CSQCFGR1);
872 	writel_relaxed(csqcfgr2, fmc2->io_base + FMC2_CSQCFGR2);
873 	writel_relaxed(csqcfgr3, fmc2->io_base + FMC2_CSQCFGR3);
874 	writel_relaxed(csqar1, fmc2->io_base + FMC2_CSQAR1);
875 	writel_relaxed(csqar2, fmc2->io_base + FMC2_CSQAR2);
876 }
877 
878 static void stm32_fmc2_dma_callback(void *arg)
879 {
880 	complete((struct completion *)arg);
881 }
882 
883 /* Read/write data from/to a page */
884 static int stm32_fmc2_xfer(struct nand_chip *chip, const u8 *buf,
885 			   int raw, bool write_data)
886 {
887 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
888 	struct dma_async_tx_descriptor *desc_data, *desc_ecc;
889 	struct scatterlist *sg;
890 	struct dma_chan *dma_ch = fmc2->dma_rx_ch;
891 	enum dma_data_direction dma_data_dir = DMA_FROM_DEVICE;
892 	enum dma_transfer_direction dma_transfer_dir = DMA_DEV_TO_MEM;
893 	u32 csqcr = readl_relaxed(fmc2->io_base + FMC2_CSQCR);
894 	int eccsteps = chip->ecc.steps;
895 	int eccsize = chip->ecc.size;
896 	const u8 *p = buf;
897 	int s, ret;
898 
899 	/* Configure DMA data */
900 	if (write_data) {
901 		dma_data_dir = DMA_TO_DEVICE;
902 		dma_transfer_dir = DMA_MEM_TO_DEV;
903 		dma_ch = fmc2->dma_tx_ch;
904 	}
905 
906 	for_each_sg(fmc2->dma_data_sg.sgl, sg, eccsteps, s) {
907 		sg_set_buf(sg, p, eccsize);
908 		p += eccsize;
909 	}
910 
911 	ret = dma_map_sg(fmc2->dev, fmc2->dma_data_sg.sgl,
912 			 eccsteps, dma_data_dir);
913 	if (ret < 0)
914 		return ret;
915 
916 	desc_data = dmaengine_prep_slave_sg(dma_ch, fmc2->dma_data_sg.sgl,
917 					    eccsteps, dma_transfer_dir,
918 					    DMA_PREP_INTERRUPT);
919 	if (!desc_data) {
920 		ret = -ENOMEM;
921 		goto err_unmap_data;
922 	}
923 
924 	reinit_completion(&fmc2->dma_data_complete);
925 	reinit_completion(&fmc2->complete);
926 	desc_data->callback = stm32_fmc2_dma_callback;
927 	desc_data->callback_param = &fmc2->dma_data_complete;
928 	ret = dma_submit_error(dmaengine_submit(desc_data));
929 	if (ret)
930 		goto err_unmap_data;
931 
932 	dma_async_issue_pending(dma_ch);
933 
934 	if (!write_data && !raw) {
935 		/* Configure DMA ECC status */
936 		p = fmc2->ecc_buf;
937 		for_each_sg(fmc2->dma_ecc_sg.sgl, sg, eccsteps, s) {
938 			sg_set_buf(sg, p, fmc2->dma_ecc_len);
939 			p += fmc2->dma_ecc_len;
940 		}
941 
942 		ret = dma_map_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
943 				 eccsteps, dma_data_dir);
944 		if (ret < 0)
945 			goto err_unmap_data;
946 
947 		desc_ecc = dmaengine_prep_slave_sg(fmc2->dma_ecc_ch,
948 						   fmc2->dma_ecc_sg.sgl,
949 						   eccsteps, dma_transfer_dir,
950 						   DMA_PREP_INTERRUPT);
951 		if (!desc_ecc) {
952 			ret = -ENOMEM;
953 			goto err_unmap_ecc;
954 		}
955 
956 		reinit_completion(&fmc2->dma_ecc_complete);
957 		desc_ecc->callback = stm32_fmc2_dma_callback;
958 		desc_ecc->callback_param = &fmc2->dma_ecc_complete;
959 		ret = dma_submit_error(dmaengine_submit(desc_ecc));
960 		if (ret)
961 			goto err_unmap_ecc;
962 
963 		dma_async_issue_pending(fmc2->dma_ecc_ch);
964 	}
965 
966 	stm32_fmc2_clear_seq_irq(fmc2);
967 	stm32_fmc2_enable_seq_irq(fmc2);
968 
969 	/* Start the transfer */
970 	csqcr |= FMC2_CSQCR_CSQSTART;
971 	writel_relaxed(csqcr, fmc2->io_base + FMC2_CSQCR);
972 
973 	/* Wait end of sequencer transfer */
974 	if (!wait_for_completion_timeout(&fmc2->complete,
975 					 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
976 		dev_err(fmc2->dev, "seq timeout\n");
977 		stm32_fmc2_disable_seq_irq(fmc2);
978 		dmaengine_terminate_all(dma_ch);
979 		if (!write_data && !raw)
980 			dmaengine_terminate_all(fmc2->dma_ecc_ch);
981 		ret = -ETIMEDOUT;
982 		goto err_unmap_ecc;
983 	}
984 
985 	/* Wait DMA data transfer completion */
986 	if (!wait_for_completion_timeout(&fmc2->dma_data_complete,
987 					 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
988 		dev_err(fmc2->dev, "data DMA timeout\n");
989 		dmaengine_terminate_all(dma_ch);
990 		ret = -ETIMEDOUT;
991 	}
992 
993 	/* Wait DMA ECC transfer completion */
994 	if (!write_data && !raw) {
995 		if (!wait_for_completion_timeout(&fmc2->dma_ecc_complete,
996 					msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
997 			dev_err(fmc2->dev, "ECC DMA timeout\n");
998 			dmaengine_terminate_all(fmc2->dma_ecc_ch);
999 			ret = -ETIMEDOUT;
1000 		}
1001 	}
1002 
1003 err_unmap_ecc:
1004 	if (!write_data && !raw)
1005 		dma_unmap_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
1006 			     eccsteps, dma_data_dir);
1007 
1008 err_unmap_data:
1009 	dma_unmap_sg(fmc2->dev, fmc2->dma_data_sg.sgl, eccsteps, dma_data_dir);
1010 
1011 	return ret;
1012 }
1013 
1014 static int stm32_fmc2_sequencer_write(struct nand_chip *chip,
1015 				      const u8 *buf, int oob_required,
1016 				      int page, int raw)
1017 {
1018 	struct mtd_info *mtd = nand_to_mtd(chip);
1019 	int ret;
1020 
1021 	/* Configure the sequencer */
1022 	stm32_fmc2_rw_page_init(chip, page, raw, true);
1023 
1024 	/* Write the page */
1025 	ret = stm32_fmc2_xfer(chip, buf, raw, true);
1026 	if (ret)
1027 		return ret;
1028 
1029 	/* Write oob */
1030 	if (oob_required) {
1031 		ret = nand_change_write_column_op(chip, mtd->writesize,
1032 						  chip->oob_poi, mtd->oobsize,
1033 						  false);
1034 		if (ret)
1035 			return ret;
1036 	}
1037 
1038 	return nand_prog_page_end_op(chip);
1039 }
1040 
1041 static int stm32_fmc2_sequencer_write_page(struct nand_chip *chip,
1042 					   const u8 *buf,
1043 					   int oob_required,
1044 					   int page)
1045 {
1046 	int ret;
1047 
1048 	/* Select the target */
1049 	ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1050 	if (ret)
1051 		return ret;
1052 
1053 	return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, false);
1054 }
1055 
1056 static int stm32_fmc2_sequencer_write_page_raw(struct nand_chip *chip,
1057 					       const u8 *buf,
1058 					       int oob_required,
1059 					       int page)
1060 {
1061 	int ret;
1062 
1063 	/* Select the target */
1064 	ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1065 	if (ret)
1066 		return ret;
1067 
1068 	return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, true);
1069 }
1070 
1071 /* Get a status indicating which sectors have errors */
1072 static inline u16 stm32_fmc2_get_mapping_status(struct stm32_fmc2_nfc *fmc2)
1073 {
1074 	u32 csqemsr = readl_relaxed(fmc2->io_base + FMC2_CSQEMSR);
1075 
1076 	return csqemsr & FMC2_CSQEMSR_SEM;
1077 }
1078 
1079 static int stm32_fmc2_sequencer_correct(struct nand_chip *chip, u8 *dat,
1080 					u8 *read_ecc, u8 *calc_ecc)
1081 {
1082 	struct mtd_info *mtd = nand_to_mtd(chip);
1083 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1084 	int eccbytes = chip->ecc.bytes;
1085 	int eccsteps = chip->ecc.steps;
1086 	int eccstrength = chip->ecc.strength;
1087 	int i, s, eccsize = chip->ecc.size;
1088 	u32 *ecc_sta = (u32 *)fmc2->ecc_buf;
1089 	u16 sta_map = stm32_fmc2_get_mapping_status(fmc2);
1090 	unsigned int max_bitflips = 0;
1091 
1092 	for (i = 0, s = 0; s < eccsteps; s++, i += eccbytes, dat += eccsize) {
1093 		int stat = 0;
1094 
1095 		if (eccstrength == FMC2_ECC_HAM) {
1096 			/* Ecc_sta = FMC2_HECCR */
1097 			if (sta_map & BIT(s)) {
1098 				stm32_fmc2_ham_set_ecc(*ecc_sta, &calc_ecc[i]);
1099 				stat = stm32_fmc2_ham_correct(chip, dat,
1100 							      &read_ecc[i],
1101 							      &calc_ecc[i]);
1102 			}
1103 			ecc_sta++;
1104 		} else {
1105 			/*
1106 			 * Ecc_sta[0] = FMC2_BCHDSR0
1107 			 * Ecc_sta[1] = FMC2_BCHDSR1
1108 			 * Ecc_sta[2] = FMC2_BCHDSR2
1109 			 * Ecc_sta[3] = FMC2_BCHDSR3
1110 			 * Ecc_sta[4] = FMC2_BCHDSR4
1111 			 */
1112 			if (sta_map & BIT(s))
1113 				stat = stm32_fmc2_bch_decode(eccsize, dat,
1114 							     ecc_sta);
1115 			ecc_sta += 5;
1116 		}
1117 
1118 		if (stat == -EBADMSG)
1119 			/* Check for empty pages with bitflips */
1120 			stat = nand_check_erased_ecc_chunk(dat, eccsize,
1121 							   &read_ecc[i],
1122 							   eccbytes,
1123 							   NULL, 0,
1124 							   eccstrength);
1125 
1126 		if (stat < 0) {
1127 			mtd->ecc_stats.failed++;
1128 		} else {
1129 			mtd->ecc_stats.corrected += stat;
1130 			max_bitflips = max_t(unsigned int, max_bitflips, stat);
1131 		}
1132 	}
1133 
1134 	return max_bitflips;
1135 }
1136 
1137 static int stm32_fmc2_sequencer_read_page(struct nand_chip *chip, u8 *buf,
1138 					  int oob_required, int page)
1139 {
1140 	struct mtd_info *mtd = nand_to_mtd(chip);
1141 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1142 	u8 *ecc_calc = chip->ecc.calc_buf;
1143 	u8 *ecc_code = chip->ecc.code_buf;
1144 	u16 sta_map;
1145 	int ret;
1146 
1147 	/* Select the target */
1148 	ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1149 	if (ret)
1150 		return ret;
1151 
1152 	/* Configure the sequencer */
1153 	stm32_fmc2_rw_page_init(chip, page, 0, false);
1154 
1155 	/* Read the page */
1156 	ret = stm32_fmc2_xfer(chip, buf, 0, false);
1157 	if (ret)
1158 		return ret;
1159 
1160 	sta_map = stm32_fmc2_get_mapping_status(fmc2);
1161 
1162 	/* Check if errors happen */
1163 	if (likely(!sta_map)) {
1164 		if (oob_required)
1165 			return nand_change_read_column_op(chip, mtd->writesize,
1166 							  chip->oob_poi,
1167 							  mtd->oobsize, false);
1168 
1169 		return 0;
1170 	}
1171 
1172 	/* Read oob */
1173 	ret = nand_change_read_column_op(chip, mtd->writesize,
1174 					 chip->oob_poi, mtd->oobsize, false);
1175 	if (ret)
1176 		return ret;
1177 
1178 	ret = mtd_ooblayout_get_eccbytes(mtd, ecc_code, chip->oob_poi, 0,
1179 					 chip->ecc.total);
1180 	if (ret)
1181 		return ret;
1182 
1183 	/* Correct data */
1184 	return chip->ecc.correct(chip, buf, ecc_code, ecc_calc);
1185 }
1186 
1187 static int stm32_fmc2_sequencer_read_page_raw(struct nand_chip *chip, u8 *buf,
1188 					      int oob_required, int page)
1189 {
1190 	struct mtd_info *mtd = nand_to_mtd(chip);
1191 	int ret;
1192 
1193 	/* Select the target */
1194 	ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1195 	if (ret)
1196 		return ret;
1197 
1198 	/* Configure the sequencer */
1199 	stm32_fmc2_rw_page_init(chip, page, 1, false);
1200 
1201 	/* Read the page */
1202 	ret = stm32_fmc2_xfer(chip, buf, 1, false);
1203 	if (ret)
1204 		return ret;
1205 
1206 	/* Read oob */
1207 	if (oob_required)
1208 		return nand_change_read_column_op(chip, mtd->writesize,
1209 						  chip->oob_poi, mtd->oobsize,
1210 						  false);
1211 
1212 	return 0;
1213 }
1214 
1215 static irqreturn_t stm32_fmc2_irq(int irq, void *dev_id)
1216 {
1217 	struct stm32_fmc2_nfc *fmc2 = (struct stm32_fmc2_nfc *)dev_id;
1218 
1219 	if (fmc2->irq_state == FMC2_IRQ_SEQ)
1220 		/* Sequencer is used */
1221 		stm32_fmc2_disable_seq_irq(fmc2);
1222 	else if (fmc2->irq_state == FMC2_IRQ_BCH)
1223 		/* BCH is used */
1224 		stm32_fmc2_disable_bch_irq(fmc2);
1225 
1226 	complete(&fmc2->complete);
1227 
1228 	return IRQ_HANDLED;
1229 }
1230 
1231 static void stm32_fmc2_read_data(struct nand_chip *chip, void *buf,
1232 				 unsigned int len, bool force_8bit)
1233 {
1234 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1235 	void __iomem *io_addr_r = fmc2->data_base[fmc2->cs_sel];
1236 
1237 	if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1238 		/* Reconfigure bus width to 8-bit */
1239 		stm32_fmc2_set_buswidth_16(fmc2, false);
1240 
1241 	if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
1242 		if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
1243 			*(u8 *)buf = readb_relaxed(io_addr_r);
1244 			buf += sizeof(u8);
1245 			len -= sizeof(u8);
1246 		}
1247 
1248 		if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
1249 		    len >= sizeof(u16)) {
1250 			*(u16 *)buf = readw_relaxed(io_addr_r);
1251 			buf += sizeof(u16);
1252 			len -= sizeof(u16);
1253 		}
1254 	}
1255 
1256 	/* Buf is aligned */
1257 	while (len >= sizeof(u32)) {
1258 		*(u32 *)buf = readl_relaxed(io_addr_r);
1259 		buf += sizeof(u32);
1260 		len -= sizeof(u32);
1261 	}
1262 
1263 	/* Read remaining bytes */
1264 	if (len >= sizeof(u16)) {
1265 		*(u16 *)buf = readw_relaxed(io_addr_r);
1266 		buf += sizeof(u16);
1267 		len -= sizeof(u16);
1268 	}
1269 
1270 	if (len)
1271 		*(u8 *)buf = readb_relaxed(io_addr_r);
1272 
1273 	if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1274 		/* Reconfigure bus width to 16-bit */
1275 		stm32_fmc2_set_buswidth_16(fmc2, true);
1276 }
1277 
1278 static void stm32_fmc2_write_data(struct nand_chip *chip, const void *buf,
1279 				  unsigned int len, bool force_8bit)
1280 {
1281 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1282 	void __iomem *io_addr_w = fmc2->data_base[fmc2->cs_sel];
1283 
1284 	if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1285 		/* Reconfigure bus width to 8-bit */
1286 		stm32_fmc2_set_buswidth_16(fmc2, false);
1287 
1288 	if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
1289 		if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
1290 			writeb_relaxed(*(u8 *)buf, io_addr_w);
1291 			buf += sizeof(u8);
1292 			len -= sizeof(u8);
1293 		}
1294 
1295 		if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
1296 		    len >= sizeof(u16)) {
1297 			writew_relaxed(*(u16 *)buf, io_addr_w);
1298 			buf += sizeof(u16);
1299 			len -= sizeof(u16);
1300 		}
1301 	}
1302 
1303 	/* Buf is aligned */
1304 	while (len >= sizeof(u32)) {
1305 		writel_relaxed(*(u32 *)buf, io_addr_w);
1306 		buf += sizeof(u32);
1307 		len -= sizeof(u32);
1308 	}
1309 
1310 	/* Write remaining bytes */
1311 	if (len >= sizeof(u16)) {
1312 		writew_relaxed(*(u16 *)buf, io_addr_w);
1313 		buf += sizeof(u16);
1314 		len -= sizeof(u16);
1315 	}
1316 
1317 	if (len)
1318 		writeb_relaxed(*(u8 *)buf, io_addr_w);
1319 
1320 	if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1321 		/* Reconfigure bus width to 16-bit */
1322 		stm32_fmc2_set_buswidth_16(fmc2, true);
1323 }
1324 
1325 static int stm32_fmc2_exec_op(struct nand_chip *chip,
1326 			      const struct nand_operation *op,
1327 			      bool check_only)
1328 {
1329 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1330 	const struct nand_op_instr *instr = NULL;
1331 	unsigned int op_id, i;
1332 	int ret;
1333 
1334 	ret = stm32_fmc2_select_chip(chip, op->cs);
1335 	if (ret)
1336 		return ret;
1337 
1338 	if (check_only)
1339 		return ret;
1340 
1341 	for (op_id = 0; op_id < op->ninstrs; op_id++) {
1342 		instr = &op->instrs[op_id];
1343 
1344 		switch (instr->type) {
1345 		case NAND_OP_CMD_INSTR:
1346 			writeb_relaxed(instr->ctx.cmd.opcode,
1347 				       fmc2->cmd_base[fmc2->cs_sel]);
1348 			break;
1349 
1350 		case NAND_OP_ADDR_INSTR:
1351 			for (i = 0; i < instr->ctx.addr.naddrs; i++)
1352 				writeb_relaxed(instr->ctx.addr.addrs[i],
1353 					       fmc2->addr_base[fmc2->cs_sel]);
1354 			break;
1355 
1356 		case NAND_OP_DATA_IN_INSTR:
1357 			stm32_fmc2_read_data(chip, instr->ctx.data.buf.in,
1358 					     instr->ctx.data.len,
1359 					     instr->ctx.data.force_8bit);
1360 			break;
1361 
1362 		case NAND_OP_DATA_OUT_INSTR:
1363 			stm32_fmc2_write_data(chip, instr->ctx.data.buf.out,
1364 					      instr->ctx.data.len,
1365 					      instr->ctx.data.force_8bit);
1366 			break;
1367 
1368 		case NAND_OP_WAITRDY_INSTR:
1369 			ret = nand_soft_waitrdy(chip,
1370 						instr->ctx.waitrdy.timeout_ms);
1371 			break;
1372 		}
1373 	}
1374 
1375 	return ret;
1376 }
1377 
1378 /* Controller initialization */
1379 static void stm32_fmc2_init(struct stm32_fmc2_nfc *fmc2)
1380 {
1381 	u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
1382 	u32 bcr1 = readl_relaxed(fmc2->io_base + FMC2_BCR1);
1383 
1384 	/* Set CS used to undefined */
1385 	fmc2->cs_sel = -1;
1386 
1387 	/* Enable wait feature and nand flash memory bank */
1388 	pcr |= FMC2_PCR_PWAITEN;
1389 	pcr |= FMC2_PCR_PBKEN;
1390 
1391 	/* Set buswidth to 8 bits mode for identification */
1392 	pcr &= ~FMC2_PCR_PWID_MASK;
1393 
1394 	/* ECC logic is disabled */
1395 	pcr &= ~FMC2_PCR_ECCEN;
1396 
1397 	/* Default mode */
1398 	pcr &= ~FMC2_PCR_ECCALG;
1399 	pcr &= ~FMC2_PCR_BCHECC;
1400 	pcr &= ~FMC2_PCR_WEN;
1401 
1402 	/* Set default ECC sector size */
1403 	pcr &= ~FMC2_PCR_ECCSS_MASK;
1404 	pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_2048);
1405 
1406 	/* Set default tclr/tar timings */
1407 	pcr &= ~FMC2_PCR_TCLR_MASK;
1408 	pcr |= FMC2_PCR_TCLR(FMC2_PCR_TCLR_DEFAULT);
1409 	pcr &= ~FMC2_PCR_TAR_MASK;
1410 	pcr |= FMC2_PCR_TAR(FMC2_PCR_TAR_DEFAULT);
1411 
1412 	/* Enable FMC2 controller */
1413 	bcr1 |= FMC2_BCR1_FMC2EN;
1414 
1415 	writel_relaxed(bcr1, fmc2->io_base + FMC2_BCR1);
1416 	writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
1417 	writel_relaxed(FMC2_PMEM_DEFAULT, fmc2->io_base + FMC2_PMEM);
1418 	writel_relaxed(FMC2_PATT_DEFAULT, fmc2->io_base + FMC2_PATT);
1419 }
1420 
1421 /* Controller timings */
1422 static void stm32_fmc2_calc_timings(struct nand_chip *chip,
1423 				    const struct nand_sdr_timings *sdrt)
1424 {
1425 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1426 	struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
1427 	struct stm32_fmc2_timings *tims = &nand->timings;
1428 	unsigned long hclk = clk_get_rate(fmc2->clk);
1429 	unsigned long hclkp = NSEC_PER_SEC / (hclk / 1000);
1430 	unsigned long timing, tar, tclr, thiz, twait;
1431 	unsigned long tset_mem, tset_att, thold_mem, thold_att;
1432 
1433 	tar = max_t(unsigned long, hclkp, sdrt->tAR_min);
1434 	timing = DIV_ROUND_UP(tar, hclkp) - 1;
1435 	tims->tar = min_t(unsigned long, timing, FMC2_PCR_TIMING_MASK);
1436 
1437 	tclr = max_t(unsigned long, hclkp, sdrt->tCLR_min);
1438 	timing = DIV_ROUND_UP(tclr, hclkp) - 1;
1439 	tims->tclr = min_t(unsigned long, timing, FMC2_PCR_TIMING_MASK);
1440 
1441 	tims->thiz = FMC2_THIZ;
1442 	thiz = (tims->thiz + 1) * hclkp;
1443 
1444 	/*
1445 	 * tWAIT > tRP
1446 	 * tWAIT > tWP
1447 	 * tWAIT > tREA + tIO
1448 	 */
1449 	twait = max_t(unsigned long, hclkp, sdrt->tRP_min);
1450 	twait = max_t(unsigned long, twait, sdrt->tWP_min);
1451 	twait = max_t(unsigned long, twait, sdrt->tREA_max + FMC2_TIO);
1452 	timing = DIV_ROUND_UP(twait, hclkp);
1453 	tims->twait = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1454 
1455 	/*
1456 	 * tSETUP_MEM > tCS - tWAIT
1457 	 * tSETUP_MEM > tALS - tWAIT
1458 	 * tSETUP_MEM > tDS - (tWAIT - tHIZ)
1459 	 */
1460 	tset_mem = hclkp;
1461 	if (sdrt->tCS_min > twait && (tset_mem < sdrt->tCS_min - twait))
1462 		tset_mem = sdrt->tCS_min - twait;
1463 	if (sdrt->tALS_min > twait && (tset_mem < sdrt->tALS_min - twait))
1464 		tset_mem = sdrt->tALS_min - twait;
1465 	if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1466 	    (tset_mem < sdrt->tDS_min - (twait - thiz)))
1467 		tset_mem = sdrt->tDS_min - (twait - thiz);
1468 	timing = DIV_ROUND_UP(tset_mem, hclkp);
1469 	tims->tset_mem = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1470 
1471 	/*
1472 	 * tHOLD_MEM > tCH
1473 	 * tHOLD_MEM > tREH - tSETUP_MEM
1474 	 * tHOLD_MEM > max(tRC, tWC) - (tSETUP_MEM + tWAIT)
1475 	 */
1476 	thold_mem = max_t(unsigned long, hclkp, sdrt->tCH_min);
1477 	if (sdrt->tREH_min > tset_mem &&
1478 	    (thold_mem < sdrt->tREH_min - tset_mem))
1479 		thold_mem = sdrt->tREH_min - tset_mem;
1480 	if ((sdrt->tRC_min > tset_mem + twait) &&
1481 	    (thold_mem < sdrt->tRC_min - (tset_mem + twait)))
1482 		thold_mem = sdrt->tRC_min - (tset_mem + twait);
1483 	if ((sdrt->tWC_min > tset_mem + twait) &&
1484 	    (thold_mem < sdrt->tWC_min - (tset_mem + twait)))
1485 		thold_mem = sdrt->tWC_min - (tset_mem + twait);
1486 	timing = DIV_ROUND_UP(thold_mem, hclkp);
1487 	tims->thold_mem = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1488 
1489 	/*
1490 	 * tSETUP_ATT > tCS - tWAIT
1491 	 * tSETUP_ATT > tCLS - tWAIT
1492 	 * tSETUP_ATT > tALS - tWAIT
1493 	 * tSETUP_ATT > tRHW - tHOLD_MEM
1494 	 * tSETUP_ATT > tDS - (tWAIT - tHIZ)
1495 	 */
1496 	tset_att = hclkp;
1497 	if (sdrt->tCS_min > twait && (tset_att < sdrt->tCS_min - twait))
1498 		tset_att = sdrt->tCS_min - twait;
1499 	if (sdrt->tCLS_min > twait && (tset_att < sdrt->tCLS_min - twait))
1500 		tset_att = sdrt->tCLS_min - twait;
1501 	if (sdrt->tALS_min > twait && (tset_att < sdrt->tALS_min - twait))
1502 		tset_att = sdrt->tALS_min - twait;
1503 	if (sdrt->tRHW_min > thold_mem &&
1504 	    (tset_att < sdrt->tRHW_min - thold_mem))
1505 		tset_att = sdrt->tRHW_min - thold_mem;
1506 	if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1507 	    (tset_att < sdrt->tDS_min - (twait - thiz)))
1508 		tset_att = sdrt->tDS_min - (twait - thiz);
1509 	timing = DIV_ROUND_UP(tset_att, hclkp);
1510 	tims->tset_att = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1511 
1512 	/*
1513 	 * tHOLD_ATT > tALH
1514 	 * tHOLD_ATT > tCH
1515 	 * tHOLD_ATT > tCLH
1516 	 * tHOLD_ATT > tCOH
1517 	 * tHOLD_ATT > tDH
1518 	 * tHOLD_ATT > tWB + tIO + tSYNC - tSETUP_MEM
1519 	 * tHOLD_ATT > tADL - tSETUP_MEM
1520 	 * tHOLD_ATT > tWH - tSETUP_MEM
1521 	 * tHOLD_ATT > tWHR - tSETUP_MEM
1522 	 * tHOLD_ATT > tRC - (tSETUP_ATT + tWAIT)
1523 	 * tHOLD_ATT > tWC - (tSETUP_ATT + tWAIT)
1524 	 */
1525 	thold_att = max_t(unsigned long, hclkp, sdrt->tALH_min);
1526 	thold_att = max_t(unsigned long, thold_att, sdrt->tCH_min);
1527 	thold_att = max_t(unsigned long, thold_att, sdrt->tCLH_min);
1528 	thold_att = max_t(unsigned long, thold_att, sdrt->tCOH_min);
1529 	thold_att = max_t(unsigned long, thold_att, sdrt->tDH_min);
1530 	if ((sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC > tset_mem) &&
1531 	    (thold_att < sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem))
1532 		thold_att = sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem;
1533 	if (sdrt->tADL_min > tset_mem &&
1534 	    (thold_att < sdrt->tADL_min - tset_mem))
1535 		thold_att = sdrt->tADL_min - tset_mem;
1536 	if (sdrt->tWH_min > tset_mem &&
1537 	    (thold_att < sdrt->tWH_min - tset_mem))
1538 		thold_att = sdrt->tWH_min - tset_mem;
1539 	if (sdrt->tWHR_min > tset_mem &&
1540 	    (thold_att < sdrt->tWHR_min - tset_mem))
1541 		thold_att = sdrt->tWHR_min - tset_mem;
1542 	if ((sdrt->tRC_min > tset_att + twait) &&
1543 	    (thold_att < sdrt->tRC_min - (tset_att + twait)))
1544 		thold_att = sdrt->tRC_min - (tset_att + twait);
1545 	if ((sdrt->tWC_min > tset_att + twait) &&
1546 	    (thold_att < sdrt->tWC_min - (tset_att + twait)))
1547 		thold_att = sdrt->tWC_min - (tset_att + twait);
1548 	timing = DIV_ROUND_UP(thold_att, hclkp);
1549 	tims->thold_att = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1550 }
1551 
1552 static int stm32_fmc2_setup_interface(struct nand_chip *chip, int chipnr,
1553 				      const struct nand_data_interface *conf)
1554 {
1555 	const struct nand_sdr_timings *sdrt;
1556 
1557 	sdrt = nand_get_sdr_timings(conf);
1558 	if (IS_ERR(sdrt))
1559 		return PTR_ERR(sdrt);
1560 
1561 	if (chipnr == NAND_DATA_IFACE_CHECK_ONLY)
1562 		return 0;
1563 
1564 	stm32_fmc2_calc_timings(chip, sdrt);
1565 
1566 	/* Apply timings */
1567 	stm32_fmc2_timings_init(chip);
1568 
1569 	return 0;
1570 }
1571 
1572 /* DMA configuration */
1573 static int stm32_fmc2_dma_setup(struct stm32_fmc2_nfc *fmc2)
1574 {
1575 	int ret;
1576 
1577 	fmc2->dma_tx_ch = dma_request_slave_channel(fmc2->dev, "tx");
1578 	fmc2->dma_rx_ch = dma_request_slave_channel(fmc2->dev, "rx");
1579 	fmc2->dma_ecc_ch = dma_request_slave_channel(fmc2->dev, "ecc");
1580 
1581 	if (!fmc2->dma_tx_ch || !fmc2->dma_rx_ch || !fmc2->dma_ecc_ch) {
1582 		dev_warn(fmc2->dev, "DMAs not defined in the device tree, polling mode is used\n");
1583 		return 0;
1584 	}
1585 
1586 	ret = sg_alloc_table(&fmc2->dma_ecc_sg, FMC2_MAX_SG, GFP_KERNEL);
1587 	if (ret)
1588 		return ret;
1589 
1590 	/* Allocate a buffer to store ECC status registers */
1591 	fmc2->ecc_buf = devm_kzalloc(fmc2->dev, FMC2_MAX_ECC_BUF_LEN,
1592 				     GFP_KERNEL);
1593 	if (!fmc2->ecc_buf)
1594 		return -ENOMEM;
1595 
1596 	ret = sg_alloc_table(&fmc2->dma_data_sg, FMC2_MAX_SG, GFP_KERNEL);
1597 	if (ret)
1598 		return ret;
1599 
1600 	init_completion(&fmc2->dma_data_complete);
1601 	init_completion(&fmc2->dma_ecc_complete);
1602 
1603 	return 0;
1604 }
1605 
1606 /* NAND callbacks setup */
1607 static void stm32_fmc2_nand_callbacks_setup(struct nand_chip *chip)
1608 {
1609 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1610 
1611 	/*
1612 	 * Specific callbacks to read/write a page depending on
1613 	 * the mode (polling/sequencer) and the algo used (Hamming, BCH).
1614 	 */
1615 	if (fmc2->dma_tx_ch && fmc2->dma_rx_ch && fmc2->dma_ecc_ch) {
1616 		/* DMA => use sequencer mode callbacks */
1617 		chip->ecc.correct = stm32_fmc2_sequencer_correct;
1618 		chip->ecc.write_page = stm32_fmc2_sequencer_write_page;
1619 		chip->ecc.read_page = stm32_fmc2_sequencer_read_page;
1620 		chip->ecc.write_page_raw = stm32_fmc2_sequencer_write_page_raw;
1621 		chip->ecc.read_page_raw = stm32_fmc2_sequencer_read_page_raw;
1622 	} else {
1623 		/* No DMA => use polling mode callbacks */
1624 		chip->ecc.hwctl = stm32_fmc2_hwctl;
1625 		if (chip->ecc.strength == FMC2_ECC_HAM) {
1626 			/* Hamming is used */
1627 			chip->ecc.calculate = stm32_fmc2_ham_calculate;
1628 			chip->ecc.correct = stm32_fmc2_ham_correct;
1629 			chip->ecc.options |= NAND_ECC_GENERIC_ERASED_CHECK;
1630 		} else {
1631 			/* BCH is used */
1632 			chip->ecc.calculate = stm32_fmc2_bch_calculate;
1633 			chip->ecc.correct = stm32_fmc2_bch_correct;
1634 			chip->ecc.read_page = stm32_fmc2_read_page;
1635 		}
1636 	}
1637 
1638 	/* Specific configurations depending on the algo used */
1639 	if (chip->ecc.strength == FMC2_ECC_HAM)
1640 		chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 4 : 3;
1641 	else if (chip->ecc.strength == FMC2_ECC_BCH8)
1642 		chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 14 : 13;
1643 	else
1644 		chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 8 : 7;
1645 }
1646 
1647 /* FMC2 layout */
1648 static int stm32_fmc2_nand_ooblayout_ecc(struct mtd_info *mtd, int section,
1649 					 struct mtd_oob_region *oobregion)
1650 {
1651 	struct nand_chip *chip = mtd_to_nand(mtd);
1652 	struct nand_ecc_ctrl *ecc = &chip->ecc;
1653 
1654 	if (section)
1655 		return -ERANGE;
1656 
1657 	oobregion->length = ecc->total;
1658 	oobregion->offset = FMC2_BBM_LEN;
1659 
1660 	return 0;
1661 }
1662 
1663 static int stm32_fmc2_nand_ooblayout_free(struct mtd_info *mtd, int section,
1664 					  struct mtd_oob_region *oobregion)
1665 {
1666 	struct nand_chip *chip = mtd_to_nand(mtd);
1667 	struct nand_ecc_ctrl *ecc = &chip->ecc;
1668 
1669 	if (section)
1670 		return -ERANGE;
1671 
1672 	oobregion->length = mtd->oobsize - ecc->total - FMC2_BBM_LEN;
1673 	oobregion->offset = ecc->total + FMC2_BBM_LEN;
1674 
1675 	return 0;
1676 }
1677 
1678 static const struct mtd_ooblayout_ops stm32_fmc2_nand_ooblayout_ops = {
1679 	.ecc = stm32_fmc2_nand_ooblayout_ecc,
1680 	.free = stm32_fmc2_nand_ooblayout_free,
1681 };
1682 
1683 /* FMC2 caps */
1684 static int stm32_fmc2_calc_ecc_bytes(int step_size, int strength)
1685 {
1686 	/* Hamming */
1687 	if (strength == FMC2_ECC_HAM)
1688 		return 4;
1689 
1690 	/* BCH8 */
1691 	if (strength == FMC2_ECC_BCH8)
1692 		return 14;
1693 
1694 	/* BCH4 */
1695 	return 8;
1696 }
1697 
1698 NAND_ECC_CAPS_SINGLE(stm32_fmc2_ecc_caps, stm32_fmc2_calc_ecc_bytes,
1699 		     FMC2_ECC_STEP_SIZE,
1700 		     FMC2_ECC_HAM, FMC2_ECC_BCH4, FMC2_ECC_BCH8);
1701 
1702 /* FMC2 controller ops */
1703 static int stm32_fmc2_attach_chip(struct nand_chip *chip)
1704 {
1705 	struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1706 	struct mtd_info *mtd = nand_to_mtd(chip);
1707 	int ret;
1708 
1709 	/*
1710 	 * Only NAND_ECC_HW mode is actually supported
1711 	 * Hamming => ecc.strength = 1
1712 	 * BCH4 => ecc.strength = 4
1713 	 * BCH8 => ecc.strength = 8
1714 	 * ECC sector size = 512
1715 	 */
1716 	if (chip->ecc.mode != NAND_ECC_HW) {
1717 		dev_err(fmc2->dev, "nand_ecc_mode is not well defined in the DT\n");
1718 		return -EINVAL;
1719 	}
1720 
1721 	ret = nand_ecc_choose_conf(chip, &stm32_fmc2_ecc_caps,
1722 				   mtd->oobsize - FMC2_BBM_LEN);
1723 	if (ret) {
1724 		dev_err(fmc2->dev, "no valid ECC settings set\n");
1725 		return ret;
1726 	}
1727 
1728 	if (mtd->writesize / chip->ecc.size > FMC2_MAX_SG) {
1729 		dev_err(fmc2->dev, "nand page size is not supported\n");
1730 		return -EINVAL;
1731 	}
1732 
1733 	if (chip->bbt_options & NAND_BBT_USE_FLASH)
1734 		chip->bbt_options |= NAND_BBT_NO_OOB;
1735 
1736 	/* NAND callbacks setup */
1737 	stm32_fmc2_nand_callbacks_setup(chip);
1738 
1739 	/* Define ECC layout */
1740 	mtd_set_ooblayout(mtd, &stm32_fmc2_nand_ooblayout_ops);
1741 
1742 	/* Configure bus width to 16-bit */
1743 	if (chip->options & NAND_BUSWIDTH_16)
1744 		stm32_fmc2_set_buswidth_16(fmc2, true);
1745 
1746 	return 0;
1747 }
1748 
1749 static const struct nand_controller_ops stm32_fmc2_nand_controller_ops = {
1750 	.attach_chip = stm32_fmc2_attach_chip,
1751 	.exec_op = stm32_fmc2_exec_op,
1752 	.setup_data_interface = stm32_fmc2_setup_interface,
1753 };
1754 
1755 /* FMC2 probe */
1756 static int stm32_fmc2_parse_child(struct stm32_fmc2_nfc *fmc2,
1757 				  struct device_node *dn)
1758 {
1759 	struct stm32_fmc2_nand *nand = &fmc2->nand;
1760 	u32 cs;
1761 	int ret, i;
1762 
1763 	if (!of_get_property(dn, "reg", &nand->ncs))
1764 		return -EINVAL;
1765 
1766 	nand->ncs /= sizeof(u32);
1767 	if (!nand->ncs) {
1768 		dev_err(fmc2->dev, "invalid reg property size\n");
1769 		return -EINVAL;
1770 	}
1771 
1772 	for (i = 0; i < nand->ncs; i++) {
1773 		ret = of_property_read_u32_index(dn, "reg", i, &cs);
1774 		if (ret) {
1775 			dev_err(fmc2->dev, "could not retrieve reg property: %d\n",
1776 				ret);
1777 			return ret;
1778 		}
1779 
1780 		if (cs > FMC2_MAX_CE) {
1781 			dev_err(fmc2->dev, "invalid reg value: %d\n", cs);
1782 			return -EINVAL;
1783 		}
1784 
1785 		if (fmc2->cs_assigned & BIT(cs)) {
1786 			dev_err(fmc2->dev, "cs already assigned: %d\n", cs);
1787 			return -EINVAL;
1788 		}
1789 
1790 		fmc2->cs_assigned |= BIT(cs);
1791 		nand->cs_used[i] = cs;
1792 	}
1793 
1794 	nand_set_flash_node(&nand->chip, dn);
1795 
1796 	return 0;
1797 }
1798 
1799 static int stm32_fmc2_parse_dt(struct stm32_fmc2_nfc *fmc2)
1800 {
1801 	struct device_node *dn = fmc2->dev->of_node;
1802 	struct device_node *child;
1803 	int nchips = of_get_child_count(dn);
1804 	int ret = 0;
1805 
1806 	if (!nchips) {
1807 		dev_err(fmc2->dev, "NAND chip not defined\n");
1808 		return -EINVAL;
1809 	}
1810 
1811 	if (nchips > 1) {
1812 		dev_err(fmc2->dev, "too many NAND chips defined\n");
1813 		return -EINVAL;
1814 	}
1815 
1816 	for_each_child_of_node(dn, child) {
1817 		ret = stm32_fmc2_parse_child(fmc2, child);
1818 		if (ret < 0) {
1819 			of_node_put(child);
1820 			return ret;
1821 		}
1822 	}
1823 
1824 	return ret;
1825 }
1826 
1827 static int stm32_fmc2_probe(struct platform_device *pdev)
1828 {
1829 	struct device *dev = &pdev->dev;
1830 	struct reset_control *rstc;
1831 	struct stm32_fmc2_nfc *fmc2;
1832 	struct stm32_fmc2_nand *nand;
1833 	struct resource *res;
1834 	struct mtd_info *mtd;
1835 	struct nand_chip *chip;
1836 	int chip_cs, mem_region, ret, irq;
1837 
1838 	fmc2 = devm_kzalloc(dev, sizeof(*fmc2), GFP_KERNEL);
1839 	if (!fmc2)
1840 		return -ENOMEM;
1841 
1842 	fmc2->dev = dev;
1843 	nand_controller_init(&fmc2->base);
1844 	fmc2->base.ops = &stm32_fmc2_nand_controller_ops;
1845 
1846 	ret = stm32_fmc2_parse_dt(fmc2);
1847 	if (ret)
1848 		return ret;
1849 
1850 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1851 	fmc2->io_base = devm_ioremap_resource(dev, res);
1852 	if (IS_ERR(fmc2->io_base))
1853 		return PTR_ERR(fmc2->io_base);
1854 
1855 	fmc2->io_phys_addr = res->start;
1856 
1857 	for (chip_cs = 0, mem_region = 1; chip_cs < FMC2_MAX_CE;
1858 	     chip_cs++, mem_region += 3) {
1859 		if (!(fmc2->cs_assigned & BIT(chip_cs)))
1860 			continue;
1861 
1862 		res = platform_get_resource(pdev, IORESOURCE_MEM, mem_region);
1863 		fmc2->data_base[chip_cs] = devm_ioremap_resource(dev, res);
1864 		if (IS_ERR(fmc2->data_base[chip_cs]))
1865 			return PTR_ERR(fmc2->data_base[chip_cs]);
1866 
1867 		fmc2->data_phys_addr[chip_cs] = res->start;
1868 
1869 		res = platform_get_resource(pdev, IORESOURCE_MEM,
1870 					    mem_region + 1);
1871 		fmc2->cmd_base[chip_cs] = devm_ioremap_resource(dev, res);
1872 		if (IS_ERR(fmc2->cmd_base[chip_cs]))
1873 			return PTR_ERR(fmc2->cmd_base[chip_cs]);
1874 
1875 		res = platform_get_resource(pdev, IORESOURCE_MEM,
1876 					    mem_region + 2);
1877 		fmc2->addr_base[chip_cs] = devm_ioremap_resource(dev, res);
1878 		if (IS_ERR(fmc2->addr_base[chip_cs]))
1879 			return PTR_ERR(fmc2->addr_base[chip_cs]);
1880 	}
1881 
1882 	irq = platform_get_irq(pdev, 0);
1883 	if (irq < 0)
1884 		return irq;
1885 
1886 	ret = devm_request_irq(dev, irq, stm32_fmc2_irq, 0,
1887 			       dev_name(dev), fmc2);
1888 	if (ret) {
1889 		dev_err(dev, "failed to request irq\n");
1890 		return ret;
1891 	}
1892 
1893 	init_completion(&fmc2->complete);
1894 
1895 	fmc2->clk = devm_clk_get(dev, NULL);
1896 	if (IS_ERR(fmc2->clk))
1897 		return PTR_ERR(fmc2->clk);
1898 
1899 	ret = clk_prepare_enable(fmc2->clk);
1900 	if (ret) {
1901 		dev_err(dev, "can not enable the clock\n");
1902 		return ret;
1903 	}
1904 
1905 	rstc = devm_reset_control_get(dev, NULL);
1906 	if (!IS_ERR(rstc)) {
1907 		reset_control_assert(rstc);
1908 		reset_control_deassert(rstc);
1909 	}
1910 
1911 	/* DMA setup */
1912 	ret = stm32_fmc2_dma_setup(fmc2);
1913 	if (ret)
1914 		return ret;
1915 
1916 	/* FMC2 init routine */
1917 	stm32_fmc2_init(fmc2);
1918 
1919 	nand = &fmc2->nand;
1920 	chip = &nand->chip;
1921 	mtd = nand_to_mtd(chip);
1922 	mtd->dev.parent = dev;
1923 
1924 	chip->controller = &fmc2->base;
1925 	chip->options |= NAND_BUSWIDTH_AUTO | NAND_NO_SUBPAGE_WRITE |
1926 			 NAND_USE_BOUNCE_BUFFER;
1927 
1928 	/* Default ECC settings */
1929 	chip->ecc.mode = NAND_ECC_HW;
1930 	chip->ecc.size = FMC2_ECC_STEP_SIZE;
1931 	chip->ecc.strength = FMC2_ECC_BCH8;
1932 
1933 	/* Scan to find existence of the device */
1934 	ret = nand_scan(chip, nand->ncs);
1935 	if (ret)
1936 		goto err_scan;
1937 
1938 	ret = mtd_device_register(mtd, NULL, 0);
1939 	if (ret)
1940 		goto err_device_register;
1941 
1942 	platform_set_drvdata(pdev, fmc2);
1943 
1944 	return 0;
1945 
1946 err_device_register:
1947 	nand_cleanup(chip);
1948 
1949 err_scan:
1950 	if (fmc2->dma_ecc_ch)
1951 		dma_release_channel(fmc2->dma_ecc_ch);
1952 	if (fmc2->dma_tx_ch)
1953 		dma_release_channel(fmc2->dma_tx_ch);
1954 	if (fmc2->dma_rx_ch)
1955 		dma_release_channel(fmc2->dma_rx_ch);
1956 
1957 	sg_free_table(&fmc2->dma_data_sg);
1958 	sg_free_table(&fmc2->dma_ecc_sg);
1959 
1960 	clk_disable_unprepare(fmc2->clk);
1961 
1962 	return ret;
1963 }
1964 
1965 static int stm32_fmc2_remove(struct platform_device *pdev)
1966 {
1967 	struct stm32_fmc2_nfc *fmc2 = platform_get_drvdata(pdev);
1968 	struct stm32_fmc2_nand *nand = &fmc2->nand;
1969 
1970 	nand_release(&nand->chip);
1971 
1972 	if (fmc2->dma_ecc_ch)
1973 		dma_release_channel(fmc2->dma_ecc_ch);
1974 	if (fmc2->dma_tx_ch)
1975 		dma_release_channel(fmc2->dma_tx_ch);
1976 	if (fmc2->dma_rx_ch)
1977 		dma_release_channel(fmc2->dma_rx_ch);
1978 
1979 	sg_free_table(&fmc2->dma_data_sg);
1980 	sg_free_table(&fmc2->dma_ecc_sg);
1981 
1982 	clk_disable_unprepare(fmc2->clk);
1983 
1984 	return 0;
1985 }
1986 
1987 static int __maybe_unused stm32_fmc2_suspend(struct device *dev)
1988 {
1989 	struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
1990 
1991 	clk_disable_unprepare(fmc2->clk);
1992 
1993 	pinctrl_pm_select_sleep_state(dev);
1994 
1995 	return 0;
1996 }
1997 
1998 static int __maybe_unused stm32_fmc2_resume(struct device *dev)
1999 {
2000 	struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
2001 	struct stm32_fmc2_nand *nand = &fmc2->nand;
2002 	int chip_cs, ret;
2003 
2004 	pinctrl_pm_select_default_state(dev);
2005 
2006 	ret = clk_prepare_enable(fmc2->clk);
2007 	if (ret) {
2008 		dev_err(dev, "can not enable the clock\n");
2009 		return ret;
2010 	}
2011 
2012 	stm32_fmc2_init(fmc2);
2013 
2014 	for (chip_cs = 0; chip_cs < FMC2_MAX_CE; chip_cs++) {
2015 		if (!(fmc2->cs_assigned & BIT(chip_cs)))
2016 			continue;
2017 
2018 		nand_reset(&nand->chip, chip_cs);
2019 	}
2020 
2021 	return 0;
2022 }
2023 
2024 static SIMPLE_DEV_PM_OPS(stm32_fmc2_pm_ops, stm32_fmc2_suspend,
2025 			 stm32_fmc2_resume);
2026 
2027 static const struct of_device_id stm32_fmc2_match[] = {
2028 	{.compatible = "st,stm32mp15-fmc2"},
2029 	{}
2030 };
2031 MODULE_DEVICE_TABLE(of, stm32_fmc2_match);
2032 
2033 static struct platform_driver stm32_fmc2_driver = {
2034 	.probe	= stm32_fmc2_probe,
2035 	.remove	= stm32_fmc2_remove,
2036 	.driver	= {
2037 		.name = "stm32_fmc2_nand",
2038 		.of_match_table = stm32_fmc2_match,
2039 		.pm = &stm32_fmc2_pm_ops,
2040 	},
2041 };
2042 module_platform_driver(stm32_fmc2_driver);
2043 
2044 MODULE_ALIAS("platform:stm32_fmc2_nand");
2045 MODULE_AUTHOR("Christophe Kerello <christophe.kerello@st.com>");
2046 MODULE_DESCRIPTION("STMicroelectronics STM32 FMC2 nand driver");
2047 MODULE_LICENSE("GPL v2");
2048