blob: c2fc45f7c00b8fb29b3e5779bbc645fe01b1012e [file] [log] [blame]
Christophe Kerello2cd457f2018-12-14 10:58:07 +01001// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) STMicroelectronics 2018
4 * Author: Christophe Kerello <christophe.kerello@st.com>
5 */
6
7#include <linux/clk.h>
8#include <linux/dmaengine.h>
9#include <linux/dma-mapping.h>
10#include <linux/errno.h>
11#include <linux/interrupt.h>
12#include <linux/iopoll.h>
13#include <linux/module.h>
14#include <linux/mtd/rawnand.h>
15#include <linux/pinctrl/consumer.h>
16#include <linux/platform_device.h>
17#include <linux/reset.h>
18
19/* Bad block marker length */
20#define FMC2_BBM_LEN 2
21
22/* ECC step size */
23#define FMC2_ECC_STEP_SIZE 512
24
25/* BCHDSRx registers length */
26#define FMC2_BCHDSRS_LEN 20
27
28/* HECCR length */
29#define FMC2_HECCR_LEN 4
30
31/* Max requests done for a 8k nand page size */
32#define FMC2_MAX_SG 16
33
34/* Max chip enable */
35#define FMC2_MAX_CE 2
36
37/* Max ECC buffer length */
38#define FMC2_MAX_ECC_BUF_LEN (FMC2_BCHDSRS_LEN * FMC2_MAX_SG)
39
40/* Timings */
41#define FMC2_THIZ 1
42#define FMC2_TIO 8000
43#define FMC2_TSYNC 3000
44#define FMC2_PCR_TIMING_MASK 0xf
45#define FMC2_PMEM_PATT_TIMING_MASK 0xff
46
47/* FMC2 Controller Registers */
48#define FMC2_BCR1 0x0
49#define FMC2_PCR 0x80
50#define FMC2_SR 0x84
51#define FMC2_PMEM 0x88
52#define FMC2_PATT 0x8c
53#define FMC2_HECCR 0x94
54#define FMC2_CSQCR 0x200
55#define FMC2_CSQCFGR1 0x204
56#define FMC2_CSQCFGR2 0x208
57#define FMC2_CSQCFGR3 0x20c
58#define FMC2_CSQAR1 0x210
59#define FMC2_CSQAR2 0x214
60#define FMC2_CSQIER 0x220
61#define FMC2_CSQISR 0x224
62#define FMC2_CSQICR 0x228
63#define FMC2_CSQEMSR 0x230
64#define FMC2_BCHIER 0x250
65#define FMC2_BCHISR 0x254
66#define FMC2_BCHICR 0x258
67#define FMC2_BCHPBR1 0x260
68#define FMC2_BCHPBR2 0x264
69#define FMC2_BCHPBR3 0x268
70#define FMC2_BCHPBR4 0x26c
71#define FMC2_BCHDSR0 0x27c
72#define FMC2_BCHDSR1 0x280
73#define FMC2_BCHDSR2 0x284
74#define FMC2_BCHDSR3 0x288
75#define FMC2_BCHDSR4 0x28c
76
77/* Register: FMC2_BCR1 */
78#define FMC2_BCR1_FMC2EN BIT(31)
79
80/* Register: FMC2_PCR */
81#define FMC2_PCR_PWAITEN BIT(1)
82#define FMC2_PCR_PBKEN BIT(2)
83#define FMC2_PCR_PWID_MASK GENMASK(5, 4)
84#define FMC2_PCR_PWID(x) (((x) & 0x3) << 4)
85#define FMC2_PCR_PWID_BUSWIDTH_8 0
86#define FMC2_PCR_PWID_BUSWIDTH_16 1
87#define FMC2_PCR_ECCEN BIT(6)
88#define FMC2_PCR_ECCALG BIT(8)
89#define FMC2_PCR_TCLR_MASK GENMASK(12, 9)
90#define FMC2_PCR_TCLR(x) (((x) & 0xf) << 9)
91#define FMC2_PCR_TCLR_DEFAULT 0xf
92#define FMC2_PCR_TAR_MASK GENMASK(16, 13)
93#define FMC2_PCR_TAR(x) (((x) & 0xf) << 13)
94#define FMC2_PCR_TAR_DEFAULT 0xf
95#define FMC2_PCR_ECCSS_MASK GENMASK(19, 17)
96#define FMC2_PCR_ECCSS(x) (((x) & 0x7) << 17)
97#define FMC2_PCR_ECCSS_512 1
98#define FMC2_PCR_ECCSS_2048 3
99#define FMC2_PCR_BCHECC BIT(24)
100#define FMC2_PCR_WEN BIT(25)
101
102/* Register: FMC2_SR */
103#define FMC2_SR_NWRF BIT(6)
104
105/* Register: FMC2_PMEM */
106#define FMC2_PMEM_MEMSET(x) (((x) & 0xff) << 0)
107#define FMC2_PMEM_MEMWAIT(x) (((x) & 0xff) << 8)
108#define FMC2_PMEM_MEMHOLD(x) (((x) & 0xff) << 16)
109#define FMC2_PMEM_MEMHIZ(x) (((x) & 0xff) << 24)
110#define FMC2_PMEM_DEFAULT 0x0a0a0a0a
111
112/* Register: FMC2_PATT */
113#define FMC2_PATT_ATTSET(x) (((x) & 0xff) << 0)
114#define FMC2_PATT_ATTWAIT(x) (((x) & 0xff) << 8)
115#define FMC2_PATT_ATTHOLD(x) (((x) & 0xff) << 16)
116#define FMC2_PATT_ATTHIZ(x) (((x) & 0xff) << 24)
117#define FMC2_PATT_DEFAULT 0x0a0a0a0a
118
119/* Register: FMC2_CSQCR */
120#define FMC2_CSQCR_CSQSTART BIT(0)
121
122/* Register: FMC2_CSQCFGR1 */
123#define FMC2_CSQCFGR1_CMD2EN BIT(1)
124#define FMC2_CSQCFGR1_DMADEN BIT(2)
125#define FMC2_CSQCFGR1_ACYNBR(x) (((x) & 0x7) << 4)
126#define FMC2_CSQCFGR1_CMD1(x) (((x) & 0xff) << 8)
127#define FMC2_CSQCFGR1_CMD2(x) (((x) & 0xff) << 16)
128#define FMC2_CSQCFGR1_CMD1T BIT(24)
129#define FMC2_CSQCFGR1_CMD2T BIT(25)
130
131/* Register: FMC2_CSQCFGR2 */
132#define FMC2_CSQCFGR2_SQSDTEN BIT(0)
133#define FMC2_CSQCFGR2_RCMD2EN BIT(1)
134#define FMC2_CSQCFGR2_DMASEN BIT(2)
135#define FMC2_CSQCFGR2_RCMD1(x) (((x) & 0xff) << 8)
136#define FMC2_CSQCFGR2_RCMD2(x) (((x) & 0xff) << 16)
137#define FMC2_CSQCFGR2_RCMD1T BIT(24)
138#define FMC2_CSQCFGR2_RCMD2T BIT(25)
139
140/* Register: FMC2_CSQCFGR3 */
141#define FMC2_CSQCFGR3_SNBR(x) (((x) & 0x1f) << 8)
142#define FMC2_CSQCFGR3_AC1T BIT(16)
143#define FMC2_CSQCFGR3_AC2T BIT(17)
144#define FMC2_CSQCFGR3_AC3T BIT(18)
145#define FMC2_CSQCFGR3_AC4T BIT(19)
146#define FMC2_CSQCFGR3_AC5T BIT(20)
147#define FMC2_CSQCFGR3_SDT BIT(21)
148#define FMC2_CSQCFGR3_RAC1T BIT(22)
149#define FMC2_CSQCFGR3_RAC2T BIT(23)
150
151/* Register: FMC2_CSQCAR1 */
152#define FMC2_CSQCAR1_ADDC1(x) (((x) & 0xff) << 0)
153#define FMC2_CSQCAR1_ADDC2(x) (((x) & 0xff) << 8)
154#define FMC2_CSQCAR1_ADDC3(x) (((x) & 0xff) << 16)
155#define FMC2_CSQCAR1_ADDC4(x) (((x) & 0xff) << 24)
156
157/* Register: FMC2_CSQCAR2 */
158#define FMC2_CSQCAR2_ADDC5(x) (((x) & 0xff) << 0)
159#define FMC2_CSQCAR2_NANDCEN(x) (((x) & 0x3) << 10)
160#define FMC2_CSQCAR2_SAO(x) (((x) & 0xffff) << 16)
161
162/* Register: FMC2_CSQIER */
163#define FMC2_CSQIER_TCIE BIT(0)
164
165/* Register: FMC2_CSQICR */
166#define FMC2_CSQICR_CLEAR_IRQ GENMASK(4, 0)
167
168/* Register: FMC2_CSQEMSR */
169#define FMC2_CSQEMSR_SEM GENMASK(15, 0)
170
171/* Register: FMC2_BCHIER */
172#define FMC2_BCHIER_DERIE BIT(1)
173#define FMC2_BCHIER_EPBRIE BIT(4)
174
175/* Register: FMC2_BCHICR */
176#define FMC2_BCHICR_CLEAR_IRQ GENMASK(4, 0)
177
178/* Register: FMC2_BCHDSR0 */
179#define FMC2_BCHDSR0_DUE BIT(0)
180#define FMC2_BCHDSR0_DEF BIT(1)
181#define FMC2_BCHDSR0_DEN_MASK GENMASK(7, 4)
182#define FMC2_BCHDSR0_DEN_SHIFT 4
183
184/* Register: FMC2_BCHDSR1 */
185#define FMC2_BCHDSR1_EBP1_MASK GENMASK(12, 0)
186#define FMC2_BCHDSR1_EBP2_MASK GENMASK(28, 16)
187#define FMC2_BCHDSR1_EBP2_SHIFT 16
188
189/* Register: FMC2_BCHDSR2 */
190#define FMC2_BCHDSR2_EBP3_MASK GENMASK(12, 0)
191#define FMC2_BCHDSR2_EBP4_MASK GENMASK(28, 16)
192#define FMC2_BCHDSR2_EBP4_SHIFT 16
193
194/* Register: FMC2_BCHDSR3 */
195#define FMC2_BCHDSR3_EBP5_MASK GENMASK(12, 0)
196#define FMC2_BCHDSR3_EBP6_MASK GENMASK(28, 16)
197#define FMC2_BCHDSR3_EBP6_SHIFT 16
198
199/* Register: FMC2_BCHDSR4 */
200#define FMC2_BCHDSR4_EBP7_MASK GENMASK(12, 0)
201#define FMC2_BCHDSR4_EBP8_MASK GENMASK(28, 16)
202#define FMC2_BCHDSR4_EBP8_SHIFT 16
203
204enum stm32_fmc2_ecc {
205 FMC2_ECC_HAM = 1,
206 FMC2_ECC_BCH4 = 4,
207 FMC2_ECC_BCH8 = 8
208};
209
210struct stm32_fmc2_timings {
211 u8 tclr;
212 u8 tar;
213 u8 thiz;
214 u8 twait;
215 u8 thold_mem;
216 u8 tset_mem;
217 u8 thold_att;
218 u8 tset_att;
219};
220
221struct stm32_fmc2_nand {
222 struct nand_chip chip;
223 struct stm32_fmc2_timings timings;
224 int ncs;
225 int cs_used[FMC2_MAX_CE];
226};
227
228static inline struct stm32_fmc2_nand *to_fmc2_nand(struct nand_chip *chip)
229{
230 return container_of(chip, struct stm32_fmc2_nand, chip);
231}
232
233struct stm32_fmc2_nfc {
234 struct nand_controller base;
235 struct stm32_fmc2_nand nand;
236 struct device *dev;
237 void __iomem *io_base;
238 void __iomem *data_base[FMC2_MAX_CE];
239 void __iomem *cmd_base[FMC2_MAX_CE];
240 void __iomem *addr_base[FMC2_MAX_CE];
241 phys_addr_t io_phys_addr;
242 phys_addr_t data_phys_addr[FMC2_MAX_CE];
243 struct clk *clk;
244
245 struct dma_chan *dma_tx_ch;
246 struct dma_chan *dma_rx_ch;
247 struct dma_chan *dma_ecc_ch;
248 struct sg_table dma_data_sg;
249 struct sg_table dma_ecc_sg;
250 u8 *ecc_buf;
251 int dma_ecc_len;
252
253 struct completion complete;
254 struct completion dma_data_complete;
255 struct completion dma_ecc_complete;
256
257 u8 cs_assigned;
258 int cs_sel;
259};
260
261static inline struct stm32_fmc2_nfc *to_stm32_nfc(struct nand_controller *base)
262{
263 return container_of(base, struct stm32_fmc2_nfc, base);
264}
265
266/* Timings configuration */
267static void stm32_fmc2_timings_init(struct nand_chip *chip)
268{
269 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
270 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
271 struct stm32_fmc2_timings *timings = &nand->timings;
272 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
273 u32 pmem, patt;
274
275 /* Set tclr/tar timings */
276 pcr &= ~FMC2_PCR_TCLR_MASK;
277 pcr |= FMC2_PCR_TCLR(timings->tclr);
278 pcr &= ~FMC2_PCR_TAR_MASK;
279 pcr |= FMC2_PCR_TAR(timings->tar);
280
281 /* Set tset/twait/thold/thiz timings in common bank */
282 pmem = FMC2_PMEM_MEMSET(timings->tset_mem);
283 pmem |= FMC2_PMEM_MEMWAIT(timings->twait);
284 pmem |= FMC2_PMEM_MEMHOLD(timings->thold_mem);
285 pmem |= FMC2_PMEM_MEMHIZ(timings->thiz);
286
287 /* Set tset/twait/thold/thiz timings in attribut bank */
288 patt = FMC2_PATT_ATTSET(timings->tset_att);
289 patt |= FMC2_PATT_ATTWAIT(timings->twait);
290 patt |= FMC2_PATT_ATTHOLD(timings->thold_att);
291 patt |= FMC2_PATT_ATTHIZ(timings->thiz);
292
293 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
294 writel_relaxed(pmem, fmc2->io_base + FMC2_PMEM);
295 writel_relaxed(patt, fmc2->io_base + FMC2_PATT);
296}
297
298/* Controller configuration */
299static void stm32_fmc2_setup(struct nand_chip *chip)
300{
301 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
302 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
303
304 /* Configure ECC algorithm (default configuration is Hamming) */
305 pcr &= ~FMC2_PCR_ECCALG;
306 pcr &= ~FMC2_PCR_BCHECC;
307 if (chip->ecc.strength == FMC2_ECC_BCH8) {
308 pcr |= FMC2_PCR_ECCALG;
309 pcr |= FMC2_PCR_BCHECC;
310 } else if (chip->ecc.strength == FMC2_ECC_BCH4) {
311 pcr |= FMC2_PCR_ECCALG;
312 }
313
314 /* Set buswidth */
315 pcr &= ~FMC2_PCR_PWID_MASK;
316 if (chip->options & NAND_BUSWIDTH_16)
317 pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
318
319 /* Set ECC sector size */
320 pcr &= ~FMC2_PCR_ECCSS_MASK;
321 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_512);
322
323 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
324}
325
326/* Select target */
327static int stm32_fmc2_select_chip(struct nand_chip *chip, int chipnr)
328{
329 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
330 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
331 struct dma_slave_config dma_cfg;
332 int ret;
333
334 if (nand->cs_used[chipnr] == fmc2->cs_sel)
335 return 0;
336
337 fmc2->cs_sel = nand->cs_used[chipnr];
338
339 /* FMC2 setup routine */
340 stm32_fmc2_setup(chip);
341
342 /* Apply timings */
343 stm32_fmc2_timings_init(chip);
344
345 if (fmc2->dma_tx_ch && fmc2->dma_rx_ch) {
346 memset(&dma_cfg, 0, sizeof(dma_cfg));
347 dma_cfg.src_addr = fmc2->data_phys_addr[fmc2->cs_sel];
348 dma_cfg.dst_addr = fmc2->data_phys_addr[fmc2->cs_sel];
349 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
350 dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
351 dma_cfg.src_maxburst = 32;
352 dma_cfg.dst_maxburst = 32;
353
354 ret = dmaengine_slave_config(fmc2->dma_tx_ch, &dma_cfg);
355 if (ret) {
356 dev_err(fmc2->dev, "tx DMA engine slave config failed\n");
357 return ret;
358 }
359
360 ret = dmaengine_slave_config(fmc2->dma_rx_ch, &dma_cfg);
361 if (ret) {
362 dev_err(fmc2->dev, "rx DMA engine slave config failed\n");
363 return ret;
364 }
365 }
366
367 if (fmc2->dma_ecc_ch) {
368 /*
369 * Hamming: we read HECCR register
370 * BCH4/BCH8: we read BCHDSRSx registers
371 */
372 memset(&dma_cfg, 0, sizeof(dma_cfg));
373 dma_cfg.src_addr = fmc2->io_phys_addr;
374 dma_cfg.src_addr += chip->ecc.strength == FMC2_ECC_HAM ?
375 FMC2_HECCR : FMC2_BCHDSR0;
376 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
377
378 ret = dmaengine_slave_config(fmc2->dma_ecc_ch, &dma_cfg);
379 if (ret) {
380 dev_err(fmc2->dev, "ECC DMA engine slave config failed\n");
381 return ret;
382 }
383
384 /* Calculate ECC length needed for one sector */
385 fmc2->dma_ecc_len = chip->ecc.strength == FMC2_ECC_HAM ?
386 FMC2_HECCR_LEN : FMC2_BCHDSRS_LEN;
387 }
388
389 return 0;
390}
391
392/* Set bus width to 16-bit or 8-bit */
393static void stm32_fmc2_set_buswidth_16(struct stm32_fmc2_nfc *fmc2, bool set)
394{
395 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
396
397 pcr &= ~FMC2_PCR_PWID_MASK;
398 if (set)
399 pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
400 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
401}
402
403/* Enable irq sources in case of the sequencer is used */
404static inline void stm32_fmc2_enable_seq_irq(struct stm32_fmc2_nfc *fmc2)
405{
406 u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
407
408 csqier |= FMC2_CSQIER_TCIE;
409
410 writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
411}
412
413/* Disable irq sources in case of the sequencer is used */
414static inline void stm32_fmc2_disable_seq_irq(struct stm32_fmc2_nfc *fmc2)
415{
416 u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
417
418 csqier &= ~FMC2_CSQIER_TCIE;
419
420 writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
421}
422
423/* Clear irq sources in case of the sequencer is used */
424static inline void stm32_fmc2_clear_seq_irq(struct stm32_fmc2_nfc *fmc2)
425{
426 writel_relaxed(FMC2_CSQICR_CLEAR_IRQ, fmc2->io_base + FMC2_CSQICR);
427}
428
429/*
430 * ECC Hamming calculation
431 * ECC is 3 bytes for 512 bytes of data (supports error correction up to
432 * max of 1-bit)
433 */
434static inline void stm32_fmc2_ham_set_ecc(const u32 ecc_sta, u8 *ecc)
435{
436 ecc[0] = ecc_sta;
437 ecc[1] = ecc_sta >> 8;
438 ecc[2] = ecc_sta >> 16;
439}
440
441static int stm32_fmc2_ham_correct(struct nand_chip *chip, u8 *dat,
442 u8 *read_ecc, u8 *calc_ecc)
443{
444 u8 bit_position = 0, b0, b1, b2;
445 u32 byte_addr = 0, b;
446 u32 i, shifting = 1;
447
448 /* Indicate which bit and byte is faulty (if any) */
449 b0 = read_ecc[0] ^ calc_ecc[0];
450 b1 = read_ecc[1] ^ calc_ecc[1];
451 b2 = read_ecc[2] ^ calc_ecc[2];
452 b = b0 | (b1 << 8) | (b2 << 16);
453
454 /* No errors */
455 if (likely(!b))
456 return 0;
457
458 /* Calculate bit position */
459 for (i = 0; i < 3; i++) {
460 switch (b % 4) {
461 case 2:
462 bit_position += shifting;
463 case 1:
464 break;
465 default:
466 return -EBADMSG;
467 }
468 shifting <<= 1;
469 b >>= 2;
470 }
471
472 /* Calculate byte position */
473 shifting = 1;
474 for (i = 0; i < 9; i++) {
475 switch (b % 4) {
476 case 2:
477 byte_addr += shifting;
478 case 1:
479 break;
480 default:
481 return -EBADMSG;
482 }
483 shifting <<= 1;
484 b >>= 2;
485 }
486
487 /* Flip the bit */
488 dat[byte_addr] ^= (1 << bit_position);
489
490 return 1;
491}
492
493/* BCH algorithm correction */
494static int stm32_fmc2_bch_decode(int eccsize, u8 *dat, u32 *ecc_sta)
495{
496 u32 bchdsr0 = ecc_sta[0];
497 u32 bchdsr1 = ecc_sta[1];
498 u32 bchdsr2 = ecc_sta[2];
499 u32 bchdsr3 = ecc_sta[3];
500 u32 bchdsr4 = ecc_sta[4];
501 u16 pos[8];
502 int i, den;
503 unsigned int nb_errs = 0;
504
505 /* No errors found */
506 if (likely(!(bchdsr0 & FMC2_BCHDSR0_DEF)))
507 return 0;
508
509 /* Too many errors detected */
510 if (unlikely(bchdsr0 & FMC2_BCHDSR0_DUE))
511 return -EBADMSG;
512
513 pos[0] = bchdsr1 & FMC2_BCHDSR1_EBP1_MASK;
514 pos[1] = (bchdsr1 & FMC2_BCHDSR1_EBP2_MASK) >> FMC2_BCHDSR1_EBP2_SHIFT;
515 pos[2] = bchdsr2 & FMC2_BCHDSR2_EBP3_MASK;
516 pos[3] = (bchdsr2 & FMC2_BCHDSR2_EBP4_MASK) >> FMC2_BCHDSR2_EBP4_SHIFT;
517 pos[4] = bchdsr3 & FMC2_BCHDSR3_EBP5_MASK;
518 pos[5] = (bchdsr3 & FMC2_BCHDSR3_EBP6_MASK) >> FMC2_BCHDSR3_EBP6_SHIFT;
519 pos[6] = bchdsr4 & FMC2_BCHDSR4_EBP7_MASK;
520 pos[7] = (bchdsr4 & FMC2_BCHDSR4_EBP8_MASK) >> FMC2_BCHDSR4_EBP8_SHIFT;
521
522 den = (bchdsr0 & FMC2_BCHDSR0_DEN_MASK) >> FMC2_BCHDSR0_DEN_SHIFT;
523 for (i = 0; i < den; i++) {
524 if (pos[i] < eccsize * 8) {
525 change_bit(pos[i], (unsigned long *)dat);
526 nb_errs++;
527 }
528 }
529
530 return nb_errs;
531}
532
533/* Sequencer read/write configuration */
534static void stm32_fmc2_rw_page_init(struct nand_chip *chip, int page,
535 int raw, bool write_data)
536{
537 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
538 struct mtd_info *mtd = nand_to_mtd(chip);
539 u32 csqcfgr1, csqcfgr2, csqcfgr3;
540 u32 csqar1, csqar2;
541 u32 ecc_offset = mtd->writesize + FMC2_BBM_LEN;
542 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
543
544 if (write_data)
545 pcr |= FMC2_PCR_WEN;
546 else
547 pcr &= ~FMC2_PCR_WEN;
548 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
549
550 /*
551 * - Set Program Page/Page Read command
552 * - Enable DMA request data
553 * - Set timings
554 */
555 csqcfgr1 = FMC2_CSQCFGR1_DMADEN | FMC2_CSQCFGR1_CMD1T;
556 if (write_data)
557 csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_SEQIN);
558 else
559 csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_READ0) |
560 FMC2_CSQCFGR1_CMD2EN |
561 FMC2_CSQCFGR1_CMD2(NAND_CMD_READSTART) |
562 FMC2_CSQCFGR1_CMD2T;
563
564 /*
565 * - Set Random Data Input/Random Data Read command
566 * - Enable the sequencer to access the Spare data area
567 * - Enable DMA request status decoding for read
568 * - Set timings
569 */
570 if (write_data)
571 csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDIN);
572 else
573 csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDOUT) |
574 FMC2_CSQCFGR2_RCMD2EN |
575 FMC2_CSQCFGR2_RCMD2(NAND_CMD_RNDOUTSTART) |
576 FMC2_CSQCFGR2_RCMD1T |
577 FMC2_CSQCFGR2_RCMD2T;
578 if (!raw) {
579 csqcfgr2 |= write_data ? 0 : FMC2_CSQCFGR2_DMASEN;
580 csqcfgr2 |= FMC2_CSQCFGR2_SQSDTEN;
581 }
582
583 /*
584 * - Set the number of sectors to be written
585 * - Set timings
586 */
587 csqcfgr3 = FMC2_CSQCFGR3_SNBR(chip->ecc.steps - 1);
588 if (write_data) {
589 csqcfgr3 |= FMC2_CSQCFGR3_RAC2T;
590 if (chip->options & NAND_ROW_ADDR_3)
591 csqcfgr3 |= FMC2_CSQCFGR3_AC5T;
592 else
593 csqcfgr3 |= FMC2_CSQCFGR3_AC4T;
594 }
595
596 /*
597 * Set the fourth first address cycles
598 * Byte 1 and byte 2 => column, we start at 0x0
599 * Byte 3 and byte 4 => page
600 */
601 csqar1 = FMC2_CSQCAR1_ADDC3(page);
602 csqar1 |= FMC2_CSQCAR1_ADDC4(page >> 8);
603
604 /*
605 * - Set chip enable number
606 * - Set ECC byte offset in the spare area
607 * - Calculate the number of address cycles to be issued
608 * - Set byte 5 of address cycle if needed
609 */
610 csqar2 = FMC2_CSQCAR2_NANDCEN(fmc2->cs_sel);
611 if (chip->options & NAND_BUSWIDTH_16)
612 csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset >> 1);
613 else
614 csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset);
615 if (chip->options & NAND_ROW_ADDR_3) {
616 csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(5);
617 csqar2 |= FMC2_CSQCAR2_ADDC5(page >> 16);
618 } else {
619 csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(4);
620 }
621
622 writel_relaxed(csqcfgr1, fmc2->io_base + FMC2_CSQCFGR1);
623 writel_relaxed(csqcfgr2, fmc2->io_base + FMC2_CSQCFGR2);
624 writel_relaxed(csqcfgr3, fmc2->io_base + FMC2_CSQCFGR3);
625 writel_relaxed(csqar1, fmc2->io_base + FMC2_CSQAR1);
626 writel_relaxed(csqar2, fmc2->io_base + FMC2_CSQAR2);
627}
628
629static void stm32_fmc2_dma_callback(void *arg)
630{
631 complete((struct completion *)arg);
632}
633
634/* Read/write data from/to a page */
635static int stm32_fmc2_xfer(struct nand_chip *chip, const u8 *buf,
636 int raw, bool write_data)
637{
638 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
639 struct dma_async_tx_descriptor *desc_data, *desc_ecc;
640 struct scatterlist *sg;
641 struct dma_chan *dma_ch = fmc2->dma_rx_ch;
642 enum dma_data_direction dma_data_dir = DMA_FROM_DEVICE;
643 enum dma_transfer_direction dma_transfer_dir = DMA_DEV_TO_MEM;
644 u32 csqcr = readl_relaxed(fmc2->io_base + FMC2_CSQCR);
645 int eccsteps = chip->ecc.steps;
646 int eccsize = chip->ecc.size;
647 const u8 *p = buf;
648 int s, ret;
649
650 /* Configure DMA data */
651 if (write_data) {
652 dma_data_dir = DMA_TO_DEVICE;
653 dma_transfer_dir = DMA_MEM_TO_DEV;
654 dma_ch = fmc2->dma_tx_ch;
655 }
656
657 for_each_sg(fmc2->dma_data_sg.sgl, sg, eccsteps, s) {
658 sg_set_buf(sg, p, eccsize);
659 p += eccsize;
660 }
661
662 ret = dma_map_sg(fmc2->dev, fmc2->dma_data_sg.sgl,
663 eccsteps, dma_data_dir);
664 if (ret < 0)
665 return ret;
666
667 desc_data = dmaengine_prep_slave_sg(dma_ch, fmc2->dma_data_sg.sgl,
668 eccsteps, dma_transfer_dir,
669 DMA_PREP_INTERRUPT);
670 if (!desc_data) {
671 ret = -ENOMEM;
672 goto err_unmap_data;
673 }
674
675 reinit_completion(&fmc2->dma_data_complete);
676 reinit_completion(&fmc2->complete);
677 desc_data->callback = stm32_fmc2_dma_callback;
678 desc_data->callback_param = &fmc2->dma_data_complete;
679 ret = dma_submit_error(dmaengine_submit(desc_data));
680 if (ret)
681 goto err_unmap_data;
682
683 dma_async_issue_pending(dma_ch);
684
685 if (!write_data && !raw) {
686 /* Configure DMA ECC status */
687 p = fmc2->ecc_buf;
688 for_each_sg(fmc2->dma_ecc_sg.sgl, sg, eccsteps, s) {
689 sg_set_buf(sg, p, fmc2->dma_ecc_len);
690 p += fmc2->dma_ecc_len;
691 }
692
693 ret = dma_map_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
694 eccsteps, dma_data_dir);
695 if (ret < 0)
696 goto err_unmap_data;
697
698 desc_ecc = dmaengine_prep_slave_sg(fmc2->dma_ecc_ch,
699 fmc2->dma_ecc_sg.sgl,
700 eccsteps, dma_transfer_dir,
701 DMA_PREP_INTERRUPT);
702 if (!desc_ecc) {
703 ret = -ENOMEM;
704 goto err_unmap_ecc;
705 }
706
707 reinit_completion(&fmc2->dma_ecc_complete);
708 desc_ecc->callback = stm32_fmc2_dma_callback;
709 desc_ecc->callback_param = &fmc2->dma_ecc_complete;
710 ret = dma_submit_error(dmaengine_submit(desc_ecc));
711 if (ret)
712 goto err_unmap_ecc;
713
714 dma_async_issue_pending(fmc2->dma_ecc_ch);
715 }
716
717 stm32_fmc2_clear_seq_irq(fmc2);
718 stm32_fmc2_enable_seq_irq(fmc2);
719
720 /* Start the transfer */
721 csqcr |= FMC2_CSQCR_CSQSTART;
722 writel_relaxed(csqcr, fmc2->io_base + FMC2_CSQCR);
723
724 /* Wait end of sequencer transfer */
725 if (!wait_for_completion_timeout(&fmc2->complete,
726 msecs_to_jiffies(1000))) {
727 dev_err(fmc2->dev, "seq timeout\n");
728 stm32_fmc2_disable_seq_irq(fmc2);
729 dmaengine_terminate_all(dma_ch);
730 if (!write_data && !raw)
731 dmaengine_terminate_all(fmc2->dma_ecc_ch);
732 ret = -ETIMEDOUT;
733 goto err_unmap_ecc;
734 }
735
736 /* Wait DMA data transfer completion */
737 if (!wait_for_completion_timeout(&fmc2->dma_data_complete,
738 msecs_to_jiffies(100))) {
739 dev_err(fmc2->dev, "data DMA timeout\n");
740 dmaengine_terminate_all(dma_ch);
741 ret = -ETIMEDOUT;
742 }
743
744 /* Wait DMA ECC transfer completion */
745 if (!write_data && !raw) {
746 if (!wait_for_completion_timeout(&fmc2->dma_ecc_complete,
747 msecs_to_jiffies(100))) {
748 dev_err(fmc2->dev, "ECC DMA timeout\n");
749 dmaengine_terminate_all(fmc2->dma_ecc_ch);
750 ret = -ETIMEDOUT;
751 }
752 }
753
754err_unmap_ecc:
755 if (!write_data && !raw)
756 dma_unmap_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
757 eccsteps, dma_data_dir);
758
759err_unmap_data:
760 dma_unmap_sg(fmc2->dev, fmc2->dma_data_sg.sgl, eccsteps, dma_data_dir);
761
762 return ret;
763}
764
765static int stm32_fmc2_sequencer_write(struct nand_chip *chip,
766 const u8 *buf, int oob_required,
767 int page, int raw)
768{
769 struct mtd_info *mtd = nand_to_mtd(chip);
770 int ret;
771
772 /* Configure the sequencer */
773 stm32_fmc2_rw_page_init(chip, page, raw, true);
774
775 /* Write the page */
776 ret = stm32_fmc2_xfer(chip, buf, raw, true);
777 if (ret)
778 return ret;
779
780 /* Write oob */
781 if (oob_required) {
782 ret = nand_change_write_column_op(chip, mtd->writesize,
783 chip->oob_poi, mtd->oobsize,
784 false);
785 if (ret)
786 return ret;
787 }
788
789 return nand_prog_page_end_op(chip);
790}
791
792static int stm32_fmc2_sequencer_write_page(struct nand_chip *chip,
793 const u8 *buf,
794 int oob_required,
795 int page)
796{
797 int ret;
798
799 /* Select the target */
800 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
801 if (ret)
802 return ret;
803
804 return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, false);
805}
806
807static int stm32_fmc2_sequencer_write_page_raw(struct nand_chip *chip,
808 const u8 *buf,
809 int oob_required,
810 int page)
811{
812 int ret;
813
814 /* Select the target */
815 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
816 if (ret)
817 return ret;
818
819 return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, true);
820}
821
822/* Get a status indicating which sectors have errors */
823static inline u16 stm32_fmc2_get_mapping_status(struct stm32_fmc2_nfc *fmc2)
824{
825 u32 csqemsr = readl_relaxed(fmc2->io_base + FMC2_CSQEMSR);
826
827 return csqemsr & FMC2_CSQEMSR_SEM;
828}
829
830static int stm32_fmc2_sequencer_correct(struct nand_chip *chip, u8 *dat,
831 u8 *read_ecc, u8 *calc_ecc)
832{
833 struct mtd_info *mtd = nand_to_mtd(chip);
834 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
835 int eccbytes = chip->ecc.bytes;
836 int eccsteps = chip->ecc.steps;
837 int eccstrength = chip->ecc.strength;
838 int i, s, eccsize = chip->ecc.size;
839 u32 *ecc_sta = (u32 *)fmc2->ecc_buf;
840 u16 sta_map = stm32_fmc2_get_mapping_status(fmc2);
841 unsigned int max_bitflips = 0;
842
843 for (i = 0, s = 0; s < eccsteps; s++, i += eccbytes, dat += eccsize) {
844 int stat = 0;
845
846 if (eccstrength == FMC2_ECC_HAM) {
847 /* Ecc_sta = FMC2_HECCR */
848 if (sta_map & BIT(s)) {
849 stm32_fmc2_ham_set_ecc(*ecc_sta, &calc_ecc[i]);
850 stat = stm32_fmc2_ham_correct(chip, dat,
851 &read_ecc[i],
852 &calc_ecc[i]);
853 }
854 ecc_sta++;
855 } else {
856 /*
857 * Ecc_sta[0] = FMC2_BCHDSR0
858 * Ecc_sta[1] = FMC2_BCHDSR1
859 * Ecc_sta[2] = FMC2_BCHDSR2
860 * Ecc_sta[3] = FMC2_BCHDSR3
861 * Ecc_sta[4] = FMC2_BCHDSR4
862 */
863 if (sta_map & BIT(s))
864 stat = stm32_fmc2_bch_decode(eccsize, dat,
865 ecc_sta);
866 ecc_sta += 5;
867 }
868
869 if (stat == -EBADMSG)
870 /* Check for empty pages with bitflips */
871 stat = nand_check_erased_ecc_chunk(dat, eccsize,
872 &read_ecc[i],
873 eccbytes,
874 NULL, 0,
875 eccstrength);
876
877 if (stat < 0) {
878 mtd->ecc_stats.failed++;
879 } else {
880 mtd->ecc_stats.corrected += stat;
881 max_bitflips = max_t(unsigned int, max_bitflips, stat);
882 }
883 }
884
885 return max_bitflips;
886}
887
888static int stm32_fmc2_sequencer_read_page(struct nand_chip *chip, u8 *buf,
889 int oob_required, int page)
890{
891 struct mtd_info *mtd = nand_to_mtd(chip);
892 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
893 u8 *ecc_calc = chip->ecc.calc_buf;
894 u8 *ecc_code = chip->ecc.code_buf;
895 u16 sta_map;
896 int ret;
897
898 /* Select the target */
899 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
900 if (ret)
901 return ret;
902
903 /* Configure the sequencer */
904 stm32_fmc2_rw_page_init(chip, page, 0, false);
905
906 /* Read the page */
907 ret = stm32_fmc2_xfer(chip, buf, 0, false);
908 if (ret)
909 return ret;
910
911 sta_map = stm32_fmc2_get_mapping_status(fmc2);
912
913 /* Check if errors happen */
914 if (likely(!sta_map)) {
915 if (oob_required)
916 return nand_change_read_column_op(chip, mtd->writesize,
917 chip->oob_poi,
918 mtd->oobsize, false);
919
920 return 0;
921 }
922
923 /* Read oob */
924 ret = nand_change_read_column_op(chip, mtd->writesize,
925 chip->oob_poi, mtd->oobsize, false);
926 if (ret)
927 return ret;
928
929 ret = mtd_ooblayout_get_eccbytes(mtd, ecc_code, chip->oob_poi, 0,
930 chip->ecc.total);
931 if (ret)
932 return ret;
933
934 /* Correct data */
935 return chip->ecc.correct(chip, buf, ecc_code, ecc_calc);
936}
937
938static int stm32_fmc2_sequencer_read_page_raw(struct nand_chip *chip, u8 *buf,
939 int oob_required, int page)
940{
941 struct mtd_info *mtd = nand_to_mtd(chip);
942 int ret;
943
944 /* Select the target */
945 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
946 if (ret)
947 return ret;
948
949 /* Configure the sequencer */
950 stm32_fmc2_rw_page_init(chip, page, 1, false);
951
952 /* Read the page */
953 ret = stm32_fmc2_xfer(chip, buf, 1, false);
954 if (ret)
955 return ret;
956
957 /* Read oob */
958 if (oob_required)
959 return nand_change_read_column_op(chip, mtd->writesize,
960 chip->oob_poi, mtd->oobsize,
961 false);
962
963 return 0;
964}
965
966static irqreturn_t stm32_fmc2_irq(int irq, void *dev_id)
967{
968 struct stm32_fmc2_nfc *fmc2 = (struct stm32_fmc2_nfc *)dev_id;
969
970 stm32_fmc2_disable_seq_irq(fmc2);
971
972 complete(&fmc2->complete);
973
974 return IRQ_HANDLED;
975}
976
977static void stm32_fmc2_read_data(struct nand_chip *chip, void *buf,
978 unsigned int len, bool force_8bit)
979{
980 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
981 void __iomem *io_addr_r = fmc2->data_base[fmc2->cs_sel];
982
983 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
984 /* Reconfigure bus width to 8-bit */
985 stm32_fmc2_set_buswidth_16(fmc2, false);
986
987 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
988 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
989 *(u8 *)buf = readb_relaxed(io_addr_r);
990 buf += sizeof(u8);
991 len -= sizeof(u8);
992 }
993
994 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
995 len >= sizeof(u16)) {
996 *(u16 *)buf = readw_relaxed(io_addr_r);
997 buf += sizeof(u16);
998 len -= sizeof(u16);
999 }
1000 }
1001
1002 /* Buf is aligned */
1003 while (len >= sizeof(u32)) {
1004 *(u32 *)buf = readl_relaxed(io_addr_r);
1005 buf += sizeof(u32);
1006 len -= sizeof(u32);
1007 }
1008
1009 /* Read remaining bytes */
1010 if (len >= sizeof(u16)) {
1011 *(u16 *)buf = readw_relaxed(io_addr_r);
1012 buf += sizeof(u16);
1013 len -= sizeof(u16);
1014 }
1015
1016 if (len)
1017 *(u8 *)buf = readb_relaxed(io_addr_r);
1018
1019 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1020 /* Reconfigure bus width to 16-bit */
1021 stm32_fmc2_set_buswidth_16(fmc2, true);
1022}
1023
1024static void stm32_fmc2_write_data(struct nand_chip *chip, const void *buf,
1025 unsigned int len, bool force_8bit)
1026{
1027 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1028 void __iomem *io_addr_w = fmc2->data_base[fmc2->cs_sel];
1029
1030 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1031 /* Reconfigure bus width to 8-bit */
1032 stm32_fmc2_set_buswidth_16(fmc2, false);
1033
1034 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
1035 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
1036 writeb_relaxed(*(u8 *)buf, io_addr_w);
1037 buf += sizeof(u8);
1038 len -= sizeof(u8);
1039 }
1040
1041 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
1042 len >= sizeof(u16)) {
1043 writew_relaxed(*(u16 *)buf, io_addr_w);
1044 buf += sizeof(u16);
1045 len -= sizeof(u16);
1046 }
1047 }
1048
1049 /* Buf is aligned */
1050 while (len >= sizeof(u32)) {
1051 writel_relaxed(*(u32 *)buf, io_addr_w);
1052 buf += sizeof(u32);
1053 len -= sizeof(u32);
1054 }
1055
1056 /* Write remaining bytes */
1057 if (len >= sizeof(u16)) {
1058 writew_relaxed(*(u16 *)buf, io_addr_w);
1059 buf += sizeof(u16);
1060 len -= sizeof(u16);
1061 }
1062
1063 if (len)
1064 writeb_relaxed(*(u8 *)buf, io_addr_w);
1065
1066 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1067 /* Reconfigure bus width to 16-bit */
1068 stm32_fmc2_set_buswidth_16(fmc2, true);
1069}
1070
1071static int stm32_fmc2_exec_op(struct nand_chip *chip,
1072 const struct nand_operation *op,
1073 bool check_only)
1074{
1075 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1076 const struct nand_op_instr *instr = NULL;
1077 unsigned int op_id, i;
1078 int ret;
1079
1080 ret = stm32_fmc2_select_chip(chip, op->cs);
1081 if (ret)
1082 return ret;
1083
1084 if (check_only)
1085 return ret;
1086
1087 for (op_id = 0; op_id < op->ninstrs; op_id++) {
1088 instr = &op->instrs[op_id];
1089
1090 switch (instr->type) {
1091 case NAND_OP_CMD_INSTR:
1092 writeb_relaxed(instr->ctx.cmd.opcode,
1093 fmc2->cmd_base[fmc2->cs_sel]);
1094 break;
1095
1096 case NAND_OP_ADDR_INSTR:
1097 for (i = 0; i < instr->ctx.addr.naddrs; i++)
1098 writeb_relaxed(instr->ctx.addr.addrs[i],
1099 fmc2->addr_base[fmc2->cs_sel]);
1100 break;
1101
1102 case NAND_OP_DATA_IN_INSTR:
1103 stm32_fmc2_read_data(chip, instr->ctx.data.buf.in,
1104 instr->ctx.data.len,
1105 instr->ctx.data.force_8bit);
1106 break;
1107
1108 case NAND_OP_DATA_OUT_INSTR:
1109 stm32_fmc2_write_data(chip, instr->ctx.data.buf.out,
1110 instr->ctx.data.len,
1111 instr->ctx.data.force_8bit);
1112 break;
1113
1114 case NAND_OP_WAITRDY_INSTR:
1115 ret = nand_soft_waitrdy(chip,
1116 instr->ctx.waitrdy.timeout_ms);
1117 break;
1118 }
1119 }
1120
1121 return ret;
1122}
1123
1124/* Controller initialization */
1125static void stm32_fmc2_init(struct stm32_fmc2_nfc *fmc2)
1126{
1127 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
1128 u32 bcr1 = readl_relaxed(fmc2->io_base + FMC2_BCR1);
1129
1130 /* Set CS used to undefined */
1131 fmc2->cs_sel = -1;
1132
1133 /* Enable wait feature and nand flash memory bank */
1134 pcr |= FMC2_PCR_PWAITEN;
1135 pcr |= FMC2_PCR_PBKEN;
1136
1137 /* Set buswidth to 8 bits mode for identification */
1138 pcr &= ~FMC2_PCR_PWID_MASK;
1139
1140 /* ECC logic is disabled */
1141 pcr &= ~FMC2_PCR_ECCEN;
1142
1143 /* Default mode */
1144 pcr &= ~FMC2_PCR_ECCALG;
1145 pcr &= ~FMC2_PCR_BCHECC;
1146 pcr &= ~FMC2_PCR_WEN;
1147
1148 /* Set default ECC sector size */
1149 pcr &= ~FMC2_PCR_ECCSS_MASK;
1150 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_2048);
1151
1152 /* Set default tclr/tar timings */
1153 pcr &= ~FMC2_PCR_TCLR_MASK;
1154 pcr |= FMC2_PCR_TCLR(FMC2_PCR_TCLR_DEFAULT);
1155 pcr &= ~FMC2_PCR_TAR_MASK;
1156 pcr |= FMC2_PCR_TAR(FMC2_PCR_TAR_DEFAULT);
1157
1158 /* Enable FMC2 controller */
1159 bcr1 |= FMC2_BCR1_FMC2EN;
1160
1161 writel_relaxed(bcr1, fmc2->io_base + FMC2_BCR1);
1162 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
1163 writel_relaxed(FMC2_PMEM_DEFAULT, fmc2->io_base + FMC2_PMEM);
1164 writel_relaxed(FMC2_PATT_DEFAULT, fmc2->io_base + FMC2_PATT);
1165}
1166
1167/* Controller timings */
1168static void stm32_fmc2_calc_timings(struct nand_chip *chip,
1169 const struct nand_sdr_timings *sdrt)
1170{
1171 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1172 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
1173 struct stm32_fmc2_timings *tims = &nand->timings;
1174 unsigned long hclk = clk_get_rate(fmc2->clk);
1175 unsigned long hclkp = NSEC_PER_SEC / (hclk / 1000);
1176 int tar, tclr, thiz, twait, tset_mem, tset_att, thold_mem, thold_att;
1177
1178 tar = hclkp;
1179 if (tar < sdrt->tAR_min)
1180 tar = sdrt->tAR_min;
1181 tims->tar = DIV_ROUND_UP(tar, hclkp) - 1;
1182 if (tims->tar > FMC2_PCR_TIMING_MASK)
1183 tims->tar = FMC2_PCR_TIMING_MASK;
1184
1185 tclr = hclkp;
1186 if (tclr < sdrt->tCLR_min)
1187 tclr = sdrt->tCLR_min;
1188 tims->tclr = DIV_ROUND_UP(tclr, hclkp) - 1;
1189 if (tims->tclr > FMC2_PCR_TIMING_MASK)
1190 tims->tclr = FMC2_PCR_TIMING_MASK;
1191
1192 tims->thiz = FMC2_THIZ;
1193 thiz = (tims->thiz + 1) * hclkp;
1194
1195 /*
1196 * tWAIT > tRP
1197 * tWAIT > tWP
1198 * tWAIT > tREA + tIO
1199 */
1200 twait = hclkp;
1201 if (twait < sdrt->tRP_min)
1202 twait = sdrt->tRP_min;
1203 if (twait < sdrt->tWP_min)
1204 twait = sdrt->tWP_min;
1205 if (twait < sdrt->tREA_max + FMC2_TIO)
1206 twait = sdrt->tREA_max + FMC2_TIO;
1207 tims->twait = DIV_ROUND_UP(twait, hclkp);
1208 if (tims->twait == 0)
1209 tims->twait = 1;
1210 else if (tims->twait > FMC2_PMEM_PATT_TIMING_MASK)
1211 tims->twait = FMC2_PMEM_PATT_TIMING_MASK;
1212
1213 /*
1214 * tSETUP_MEM > tCS - tWAIT
1215 * tSETUP_MEM > tALS - tWAIT
1216 * tSETUP_MEM > tDS - (tWAIT - tHIZ)
1217 */
1218 tset_mem = hclkp;
1219 if (sdrt->tCS_min > twait && (tset_mem < sdrt->tCS_min - twait))
1220 tset_mem = sdrt->tCS_min - twait;
1221 if (sdrt->tALS_min > twait && (tset_mem < sdrt->tALS_min - twait))
1222 tset_mem = sdrt->tALS_min - twait;
1223 if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1224 (tset_mem < sdrt->tDS_min - (twait - thiz)))
1225 tset_mem = sdrt->tDS_min - (twait - thiz);
1226 tims->tset_mem = DIV_ROUND_UP(tset_mem, hclkp);
1227 if (tims->tset_mem == 0)
1228 tims->tset_mem = 1;
1229 else if (tims->tset_mem > FMC2_PMEM_PATT_TIMING_MASK)
1230 tims->tset_mem = FMC2_PMEM_PATT_TIMING_MASK;
1231
1232 /*
1233 * tHOLD_MEM > tCH
1234 * tHOLD_MEM > tREH - tSETUP_MEM
1235 * tHOLD_MEM > max(tRC, tWC) - (tSETUP_MEM + tWAIT)
1236 */
1237 thold_mem = hclkp;
1238 if (thold_mem < sdrt->tCH_min)
1239 thold_mem = sdrt->tCH_min;
1240 if (sdrt->tREH_min > tset_mem &&
1241 (thold_mem < sdrt->tREH_min - tset_mem))
1242 thold_mem = sdrt->tREH_min - tset_mem;
1243 if ((sdrt->tRC_min > tset_mem + twait) &&
1244 (thold_mem < sdrt->tRC_min - (tset_mem + twait)))
1245 thold_mem = sdrt->tRC_min - (tset_mem + twait);
1246 if ((sdrt->tWC_min > tset_mem + twait) &&
1247 (thold_mem < sdrt->tWC_min - (tset_mem + twait)))
1248 thold_mem = sdrt->tWC_min - (tset_mem + twait);
1249 tims->thold_mem = DIV_ROUND_UP(thold_mem, hclkp);
1250 if (tims->thold_mem == 0)
1251 tims->thold_mem = 1;
1252 else if (tims->thold_mem > FMC2_PMEM_PATT_TIMING_MASK)
1253 tims->thold_mem = FMC2_PMEM_PATT_TIMING_MASK;
1254
1255 /*
1256 * tSETUP_ATT > tCS - tWAIT
1257 * tSETUP_ATT > tCLS - tWAIT
1258 * tSETUP_ATT > tALS - tWAIT
1259 * tSETUP_ATT > tRHW - tHOLD_MEM
1260 * tSETUP_ATT > tDS - (tWAIT - tHIZ)
1261 */
1262 tset_att = hclkp;
1263 if (sdrt->tCS_min > twait && (tset_att < sdrt->tCS_min - twait))
1264 tset_att = sdrt->tCS_min - twait;
1265 if (sdrt->tCLS_min > twait && (tset_att < sdrt->tCLS_min - twait))
1266 tset_att = sdrt->tCLS_min - twait;
1267 if (sdrt->tALS_min > twait && (tset_att < sdrt->tALS_min - twait))
1268 tset_att = sdrt->tALS_min - twait;
1269 if (sdrt->tRHW_min > thold_mem &&
1270 (tset_att < sdrt->tRHW_min - thold_mem))
1271 tset_att = sdrt->tRHW_min - thold_mem;
1272 if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1273 (tset_att < sdrt->tDS_min - (twait - thiz)))
1274 tset_att = sdrt->tDS_min - (twait - thiz);
1275 tims->tset_att = DIV_ROUND_UP(tset_att, hclkp);
1276 if (tims->tset_att == 0)
1277 tims->tset_att = 1;
1278 else if (tims->tset_att > FMC2_PMEM_PATT_TIMING_MASK)
1279 tims->tset_att = FMC2_PMEM_PATT_TIMING_MASK;
1280
1281 /*
1282 * tHOLD_ATT > tALH
1283 * tHOLD_ATT > tCH
1284 * tHOLD_ATT > tCLH
1285 * tHOLD_ATT > tCOH
1286 * tHOLD_ATT > tDH
1287 * tHOLD_ATT > tWB + tIO + tSYNC - tSETUP_MEM
1288 * tHOLD_ATT > tADL - tSETUP_MEM
1289 * tHOLD_ATT > tWH - tSETUP_MEM
1290 * tHOLD_ATT > tWHR - tSETUP_MEM
1291 * tHOLD_ATT > tRC - (tSETUP_ATT + tWAIT)
1292 * tHOLD_ATT > tWC - (tSETUP_ATT + tWAIT)
1293 */
1294 thold_att = hclkp;
1295 if (thold_att < sdrt->tALH_min)
1296 thold_att = sdrt->tALH_min;
1297 if (thold_att < sdrt->tCH_min)
1298 thold_att = sdrt->tCH_min;
1299 if (thold_att < sdrt->tCLH_min)
1300 thold_att = sdrt->tCLH_min;
1301 if (thold_att < sdrt->tCOH_min)
1302 thold_att = sdrt->tCOH_min;
1303 if (thold_att < sdrt->tDH_min)
1304 thold_att = sdrt->tDH_min;
1305 if ((sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC > tset_mem) &&
1306 (thold_att < sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem))
1307 thold_att = sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem;
1308 if (sdrt->tADL_min > tset_mem &&
1309 (thold_att < sdrt->tADL_min - tset_mem))
1310 thold_att = sdrt->tADL_min - tset_mem;
1311 if (sdrt->tWH_min > tset_mem &&
1312 (thold_att < sdrt->tWH_min - tset_mem))
1313 thold_att = sdrt->tWH_min - tset_mem;
1314 if (sdrt->tWHR_min > tset_mem &&
1315 (thold_att < sdrt->tWHR_min - tset_mem))
1316 thold_att = sdrt->tWHR_min - tset_mem;
1317 if ((sdrt->tRC_min > tset_att + twait) &&
1318 (thold_att < sdrt->tRC_min - (tset_att + twait)))
1319 thold_att = sdrt->tRC_min - (tset_att + twait);
1320 if ((sdrt->tWC_min > tset_att + twait) &&
1321 (thold_att < sdrt->tWC_min - (tset_att + twait)))
1322 thold_att = sdrt->tWC_min - (tset_att + twait);
1323 tims->thold_att = DIV_ROUND_UP(thold_att, hclkp);
1324 if (tims->thold_att == 0)
1325 tims->thold_att = 1;
1326 else if (tims->thold_att > FMC2_PMEM_PATT_TIMING_MASK)
1327 tims->thold_att = FMC2_PMEM_PATT_TIMING_MASK;
1328}
1329
1330static int stm32_fmc2_setup_interface(struct nand_chip *chip, int chipnr,
1331 const struct nand_data_interface *conf)
1332{
1333 const struct nand_sdr_timings *sdrt;
1334
1335 sdrt = nand_get_sdr_timings(conf);
1336 if (IS_ERR(sdrt))
1337 return PTR_ERR(sdrt);
1338
1339 if (chipnr == NAND_DATA_IFACE_CHECK_ONLY)
1340 return 0;
1341
1342 stm32_fmc2_calc_timings(chip, sdrt);
1343
1344 /* Apply timings */
1345 stm32_fmc2_timings_init(chip);
1346
1347 return 0;
1348}
1349
1350/* DMA configuration */
1351static int stm32_fmc2_dma_setup(struct stm32_fmc2_nfc *fmc2)
1352{
1353 int ret;
1354
1355 fmc2->dma_tx_ch = dma_request_slave_channel(fmc2->dev, "tx");
1356 fmc2->dma_rx_ch = dma_request_slave_channel(fmc2->dev, "rx");
1357 fmc2->dma_ecc_ch = dma_request_slave_channel(fmc2->dev, "ecc");
1358
1359 if (fmc2->dma_ecc_ch) {
1360 ret = sg_alloc_table(&fmc2->dma_ecc_sg, FMC2_MAX_SG,
1361 GFP_KERNEL);
1362 if (ret)
1363 return ret;
1364
1365 /* Allocate a buffer to store ECC status registers */
1366 fmc2->ecc_buf = devm_kzalloc(fmc2->dev,
1367 FMC2_MAX_ECC_BUF_LEN,
1368 GFP_KERNEL);
1369 if (!fmc2->ecc_buf)
1370 return -ENOMEM;
1371 } else {
1372 dev_err(fmc2->dev, "ECC DMA not defined in the device tree\n");
1373 return -ENOENT;
1374 }
1375
1376 if (fmc2->dma_tx_ch && fmc2->dma_rx_ch) {
1377 ret = sg_alloc_table(&fmc2->dma_data_sg, FMC2_MAX_SG,
1378 GFP_KERNEL);
1379 if (ret)
1380 return ret;
1381
1382 init_completion(&fmc2->dma_data_complete);
1383 init_completion(&fmc2->dma_ecc_complete);
1384 } else {
1385 dev_err(fmc2->dev, "rx/tx DMA not defined in the device tree\n");
1386 return -ENOENT;
1387 }
1388
1389 return 0;
1390}
1391
1392/* NAND callbacks setup */
1393static void stm32_fmc2_nand_callbacks_setup(struct nand_chip *chip)
1394{
1395 /* Specific callbacks to read/write a page */
1396 chip->ecc.correct = stm32_fmc2_sequencer_correct;
1397 chip->ecc.write_page = stm32_fmc2_sequencer_write_page;
1398 chip->ecc.read_page = stm32_fmc2_sequencer_read_page;
1399 chip->ecc.write_page_raw = stm32_fmc2_sequencer_write_page_raw;
1400 chip->ecc.read_page_raw = stm32_fmc2_sequencer_read_page_raw;
1401
1402 /* Specific configurations depending on the algo used */
1403 if (chip->ecc.strength == FMC2_ECC_HAM)
1404 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 4 : 3;
1405 else if (chip->ecc.strength == FMC2_ECC_BCH8)
1406 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 14 : 13;
1407 else
1408 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 8 : 7;
1409}
1410
1411/* FMC2 layout */
1412static int stm32_fmc2_nand_ooblayout_ecc(struct mtd_info *mtd, int section,
1413 struct mtd_oob_region *oobregion)
1414{
1415 struct nand_chip *chip = mtd_to_nand(mtd);
1416 struct nand_ecc_ctrl *ecc = &chip->ecc;
1417
1418 if (section)
1419 return -ERANGE;
1420
1421 oobregion->length = ecc->total;
1422 oobregion->offset = FMC2_BBM_LEN;
1423
1424 return 0;
1425}
1426
1427static int stm32_fmc2_nand_ooblayout_free(struct mtd_info *mtd, int section,
1428 struct mtd_oob_region *oobregion)
1429{
1430 struct nand_chip *chip = mtd_to_nand(mtd);
1431 struct nand_ecc_ctrl *ecc = &chip->ecc;
1432
1433 if (section)
1434 return -ERANGE;
1435
1436 oobregion->length = mtd->oobsize - ecc->total - FMC2_BBM_LEN;
1437 oobregion->offset = ecc->total + FMC2_BBM_LEN;
1438
1439 return 0;
1440}
1441
1442static const struct mtd_ooblayout_ops stm32_fmc2_nand_ooblayout_ops = {
1443 .ecc = stm32_fmc2_nand_ooblayout_ecc,
1444 .free = stm32_fmc2_nand_ooblayout_free,
1445};
1446
1447/* FMC2 caps */
1448static int stm32_fmc2_calc_ecc_bytes(int step_size, int strength)
1449{
1450 /* Hamming */
1451 if (strength == FMC2_ECC_HAM)
1452 return 4;
1453
1454 /* BCH8 */
1455 if (strength == FMC2_ECC_BCH8)
1456 return 14;
1457
1458 /* BCH4 */
1459 return 8;
1460}
1461
1462NAND_ECC_CAPS_SINGLE(stm32_fmc2_ecc_caps, stm32_fmc2_calc_ecc_bytes,
1463 FMC2_ECC_STEP_SIZE,
1464 FMC2_ECC_HAM, FMC2_ECC_BCH4, FMC2_ECC_BCH8);
1465
1466/* FMC2 controller ops */
1467static int stm32_fmc2_attach_chip(struct nand_chip *chip)
1468{
1469 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1470 struct mtd_info *mtd = nand_to_mtd(chip);
1471 int ret;
1472
1473 /*
1474 * Only NAND_ECC_HW mode is actually supported
1475 * Hamming => ecc.strength = 1
1476 * BCH4 => ecc.strength = 4
1477 * BCH8 => ecc.strength = 8
1478 * ECC sector size = 512
1479 */
1480 if (chip->ecc.mode != NAND_ECC_HW) {
1481 dev_err(fmc2->dev, "nand_ecc_mode is not well defined in the DT\n");
1482 return -EINVAL;
1483 }
1484
1485 ret = nand_ecc_choose_conf(chip, &stm32_fmc2_ecc_caps,
1486 mtd->oobsize - FMC2_BBM_LEN);
1487 if (ret) {
1488 dev_err(fmc2->dev, "no valid ECC settings set\n");
1489 return ret;
1490 }
1491
1492 if (mtd->writesize / chip->ecc.size > FMC2_MAX_SG) {
1493 dev_err(fmc2->dev, "nand page size is not supported\n");
1494 return -EINVAL;
1495 }
1496
1497 if (chip->bbt_options & NAND_BBT_USE_FLASH)
1498 chip->bbt_options |= NAND_BBT_NO_OOB;
1499
1500 /* NAND callbacks setup */
1501 stm32_fmc2_nand_callbacks_setup(chip);
1502
1503 /* Define ECC layout */
1504 mtd_set_ooblayout(mtd, &stm32_fmc2_nand_ooblayout_ops);
1505
1506 /* Configure bus width to 16-bit */
1507 if (chip->options & NAND_BUSWIDTH_16)
1508 stm32_fmc2_set_buswidth_16(fmc2, true);
1509
1510 return 0;
1511}
1512
1513static const struct nand_controller_ops stm32_fmc2_nand_controller_ops = {
1514 .attach_chip = stm32_fmc2_attach_chip,
1515 .exec_op = stm32_fmc2_exec_op,
1516 .setup_data_interface = stm32_fmc2_setup_interface,
1517};
1518
1519/* FMC2 probe */
1520static int stm32_fmc2_parse_child(struct stm32_fmc2_nfc *fmc2,
1521 struct device_node *dn)
1522{
1523 struct stm32_fmc2_nand *nand = &fmc2->nand;
1524 u32 cs;
1525 int ret, i;
1526
1527 if (!of_get_property(dn, "reg", &nand->ncs))
1528 return -EINVAL;
1529
1530 nand->ncs /= sizeof(u32);
1531 if (!nand->ncs) {
1532 dev_err(fmc2->dev, "invalid reg property size\n");
1533 return -EINVAL;
1534 }
1535
1536 for (i = 0; i < nand->ncs; i++) {
1537 ret = of_property_read_u32_index(dn, "reg", i, &cs);
1538 if (ret) {
1539 dev_err(fmc2->dev, "could not retrieve reg property: %d\n",
1540 ret);
1541 return ret;
1542 }
1543
1544 if (cs > FMC2_MAX_CE) {
1545 dev_err(fmc2->dev, "invalid reg value: %d\n", cs);
1546 return -EINVAL;
1547 }
1548
1549 if (fmc2->cs_assigned & BIT(cs)) {
1550 dev_err(fmc2->dev, "cs already assigned: %d\n", cs);
1551 return -EINVAL;
1552 }
1553
1554 fmc2->cs_assigned |= BIT(cs);
1555 nand->cs_used[i] = cs;
1556 }
1557
1558 nand_set_flash_node(&nand->chip, dn);
1559
1560 return 0;
1561}
1562
1563static int stm32_fmc2_parse_dt(struct stm32_fmc2_nfc *fmc2)
1564{
1565 struct device_node *dn = fmc2->dev->of_node;
1566 struct device_node *child;
1567 int nchips = of_get_child_count(dn);
1568 int ret = 0;
1569
1570 if (!nchips) {
1571 dev_err(fmc2->dev, "NAND chip not defined\n");
1572 return -EINVAL;
1573 }
1574
1575 if (nchips > 1) {
1576 dev_err(fmc2->dev, "too many NAND chips defined\n");
1577 return -EINVAL;
1578 }
1579
1580 for_each_child_of_node(dn, child) {
1581 ret = stm32_fmc2_parse_child(fmc2, child);
1582 if (ret < 0) {
1583 of_node_put(child);
1584 return ret;
1585 }
1586 }
1587
1588 return ret;
1589}
1590
1591static int stm32_fmc2_probe(struct platform_device *pdev)
1592{
1593 struct device *dev = &pdev->dev;
1594 struct reset_control *rstc;
1595 struct stm32_fmc2_nfc *fmc2;
1596 struct stm32_fmc2_nand *nand;
1597 struct resource *res;
1598 struct mtd_info *mtd;
1599 struct nand_chip *chip;
1600 int chip_cs, mem_region, ret, irq;
1601
1602 fmc2 = devm_kzalloc(dev, sizeof(*fmc2), GFP_KERNEL);
1603 if (!fmc2)
1604 return -ENOMEM;
1605
1606 fmc2->dev = dev;
1607 nand_controller_init(&fmc2->base);
1608 fmc2->base.ops = &stm32_fmc2_nand_controller_ops;
1609
1610 ret = stm32_fmc2_parse_dt(fmc2);
1611 if (ret)
1612 return ret;
1613
1614 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1615 fmc2->io_base = devm_ioremap_resource(dev, res);
1616 if (IS_ERR(fmc2->io_base))
1617 return PTR_ERR(fmc2->io_base);
1618
1619 fmc2->io_phys_addr = res->start;
1620
1621 for (chip_cs = 0, mem_region = 1; chip_cs < FMC2_MAX_CE;
1622 chip_cs++, mem_region += 3) {
1623 if (!(fmc2->cs_assigned & BIT(chip_cs)))
1624 continue;
1625
1626 res = platform_get_resource(pdev, IORESOURCE_MEM, mem_region);
1627 fmc2->data_base[chip_cs] = devm_ioremap_resource(dev, res);
1628 if (IS_ERR(fmc2->data_base[chip_cs]))
1629 return PTR_ERR(fmc2->data_base[chip_cs]);
1630
1631 fmc2->data_phys_addr[chip_cs] = res->start;
1632
1633 res = platform_get_resource(pdev, IORESOURCE_MEM,
1634 mem_region + 1);
1635 fmc2->cmd_base[chip_cs] = devm_ioremap_resource(dev, res);
1636 if (IS_ERR(fmc2->cmd_base[chip_cs]))
1637 return PTR_ERR(fmc2->cmd_base[chip_cs]);
1638
1639 res = platform_get_resource(pdev, IORESOURCE_MEM,
1640 mem_region + 2);
1641 fmc2->addr_base[chip_cs] = devm_ioremap_resource(dev, res);
1642 if (IS_ERR(fmc2->addr_base[chip_cs]))
1643 return PTR_ERR(fmc2->addr_base[chip_cs]);
1644 }
1645
1646 irq = platform_get_irq(pdev, 0);
1647 ret = devm_request_irq(dev, irq, stm32_fmc2_irq, 0,
1648 dev_name(dev), fmc2);
1649 if (ret) {
1650 dev_err(dev, "failed to request irq\n");
1651 return ret;
1652 }
1653
1654 init_completion(&fmc2->complete);
1655
1656 fmc2->clk = devm_clk_get(dev, NULL);
1657 if (IS_ERR(fmc2->clk))
1658 return PTR_ERR(fmc2->clk);
1659
1660 ret = clk_prepare_enable(fmc2->clk);
1661 if (ret) {
1662 dev_err(dev, "can not enable the clock\n");
1663 return ret;
1664 }
1665
1666 rstc = devm_reset_control_get(dev, NULL);
1667 if (!IS_ERR(rstc)) {
1668 reset_control_assert(rstc);
1669 reset_control_deassert(rstc);
1670 }
1671
1672 /* DMA setup */
1673 ret = stm32_fmc2_dma_setup(fmc2);
1674 if (ret)
1675 return ret;
1676
1677 /* FMC2 init routine */
1678 stm32_fmc2_init(fmc2);
1679
1680 nand = &fmc2->nand;
1681 chip = &nand->chip;
1682 mtd = nand_to_mtd(chip);
1683 mtd->dev.parent = dev;
1684
1685 chip->controller = &fmc2->base;
1686 chip->options |= NAND_BUSWIDTH_AUTO | NAND_NO_SUBPAGE_WRITE |
1687 NAND_USE_BOUNCE_BUFFER;
1688
1689 /* Default ECC settings */
1690 chip->ecc.mode = NAND_ECC_HW;
1691 chip->ecc.size = FMC2_ECC_STEP_SIZE;
1692 chip->ecc.strength = FMC2_ECC_BCH8;
1693
1694 /* Scan to find existence of the device */
1695 ret = nand_scan(chip, nand->ncs);
1696 if (ret)
1697 goto err_scan;
1698
1699 ret = mtd_device_register(mtd, NULL, 0);
1700 if (ret)
1701 goto err_device_register;
1702
1703 platform_set_drvdata(pdev, fmc2);
1704
1705 return 0;
1706
1707err_device_register:
1708 nand_cleanup(chip);
1709
1710err_scan:
1711 if (fmc2->dma_ecc_ch)
1712 dma_release_channel(fmc2->dma_ecc_ch);
1713 if (fmc2->dma_tx_ch)
1714 dma_release_channel(fmc2->dma_tx_ch);
1715 if (fmc2->dma_rx_ch)
1716 dma_release_channel(fmc2->dma_rx_ch);
1717
1718 sg_free_table(&fmc2->dma_data_sg);
1719 sg_free_table(&fmc2->dma_ecc_sg);
1720
1721 clk_disable_unprepare(fmc2->clk);
1722
1723 return ret;
1724}
1725
1726static int stm32_fmc2_remove(struct platform_device *pdev)
1727{
1728 struct stm32_fmc2_nfc *fmc2 = platform_get_drvdata(pdev);
1729 struct stm32_fmc2_nand *nand = &fmc2->nand;
1730
1731 nand_release(&nand->chip);
1732
1733 if (fmc2->dma_ecc_ch)
1734 dma_release_channel(fmc2->dma_ecc_ch);
1735 if (fmc2->dma_tx_ch)
1736 dma_release_channel(fmc2->dma_tx_ch);
1737 if (fmc2->dma_rx_ch)
1738 dma_release_channel(fmc2->dma_rx_ch);
1739
1740 sg_free_table(&fmc2->dma_data_sg);
1741 sg_free_table(&fmc2->dma_ecc_sg);
1742
1743 clk_disable_unprepare(fmc2->clk);
1744
1745 return 0;
1746}
1747
1748static int __maybe_unused stm32_fmc2_suspend(struct device *dev)
1749{
1750 struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
1751
1752 clk_disable_unprepare(fmc2->clk);
1753
1754 pinctrl_pm_select_sleep_state(dev);
1755
1756 return 0;
1757}
1758
1759static int __maybe_unused stm32_fmc2_resume(struct device *dev)
1760{
1761 struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
1762 struct stm32_fmc2_nand *nand = &fmc2->nand;
1763 int chip_cs, ret;
1764
1765 pinctrl_pm_select_default_state(dev);
1766
1767 ret = clk_prepare_enable(fmc2->clk);
1768 if (ret) {
1769 dev_err(dev, "can not enable the clock\n");
1770 return ret;
1771 }
1772
1773 stm32_fmc2_init(fmc2);
1774
1775 for (chip_cs = 0; chip_cs < FMC2_MAX_CE; chip_cs++) {
1776 if (!(fmc2->cs_assigned & BIT(chip_cs)))
1777 continue;
1778
1779 nand_reset(&nand->chip, chip_cs);
1780 }
1781
1782 return 0;
1783}
1784
1785static SIMPLE_DEV_PM_OPS(stm32_fmc2_pm_ops, stm32_fmc2_suspend,
1786 stm32_fmc2_resume);
1787
1788static const struct of_device_id stm32_fmc2_match[] = {
1789 {.compatible = "st,stm32mp15-fmc2"},
1790 {}
1791};
1792MODULE_DEVICE_TABLE(of, stm32_fmc2_match);
1793
1794static struct platform_driver stm32_fmc2_driver = {
1795 .probe = stm32_fmc2_probe,
1796 .remove = stm32_fmc2_remove,
1797 .driver = {
1798 .name = "stm32_fmc2_nand",
1799 .of_match_table = stm32_fmc2_match,
1800 .pm = &stm32_fmc2_pm_ops,
1801 },
1802};
1803module_platform_driver(stm32_fmc2_driver);
1804
1805MODULE_ALIAS("platform:stm32_fmc2_nand");
1806MODULE_AUTHOR("Christophe Kerello <christophe.kerello@st.com>");
1807MODULE_DESCRIPTION("STMicroelectronics STM32 FMC2 nand driver");
1808MODULE_LICENSE("GPL v2");