blob: 999ca6a66036c82cc277ea9733fff02d02ca27c5 [file] [log] [blame]
Christophe Kerello2cd457f2018-12-14 10:58:07 +01001// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) STMicroelectronics 2018
4 * Author: Christophe Kerello <christophe.kerello@st.com>
5 */
6
7#include <linux/clk.h>
8#include <linux/dmaengine.h>
9#include <linux/dma-mapping.h>
10#include <linux/errno.h>
11#include <linux/interrupt.h>
12#include <linux/iopoll.h>
13#include <linux/module.h>
14#include <linux/mtd/rawnand.h>
15#include <linux/pinctrl/consumer.h>
16#include <linux/platform_device.h>
17#include <linux/reset.h>
18
19/* Bad block marker length */
20#define FMC2_BBM_LEN 2
21
22/* ECC step size */
23#define FMC2_ECC_STEP_SIZE 512
24
25/* BCHDSRx registers length */
26#define FMC2_BCHDSRS_LEN 20
27
28/* HECCR length */
29#define FMC2_HECCR_LEN 4
30
31/* Max requests done for a 8k nand page size */
32#define FMC2_MAX_SG 16
33
34/* Max chip enable */
35#define FMC2_MAX_CE 2
36
37/* Max ECC buffer length */
38#define FMC2_MAX_ECC_BUF_LEN (FMC2_BCHDSRS_LEN * FMC2_MAX_SG)
39
40/* Timings */
41#define FMC2_THIZ 1
42#define FMC2_TIO 8000
43#define FMC2_TSYNC 3000
44#define FMC2_PCR_TIMING_MASK 0xf
45#define FMC2_PMEM_PATT_TIMING_MASK 0xff
46
47/* FMC2 Controller Registers */
48#define FMC2_BCR1 0x0
49#define FMC2_PCR 0x80
50#define FMC2_SR 0x84
51#define FMC2_PMEM 0x88
52#define FMC2_PATT 0x8c
53#define FMC2_HECCR 0x94
54#define FMC2_CSQCR 0x200
55#define FMC2_CSQCFGR1 0x204
56#define FMC2_CSQCFGR2 0x208
57#define FMC2_CSQCFGR3 0x20c
58#define FMC2_CSQAR1 0x210
59#define FMC2_CSQAR2 0x214
60#define FMC2_CSQIER 0x220
61#define FMC2_CSQISR 0x224
62#define FMC2_CSQICR 0x228
63#define FMC2_CSQEMSR 0x230
64#define FMC2_BCHIER 0x250
65#define FMC2_BCHISR 0x254
66#define FMC2_BCHICR 0x258
67#define FMC2_BCHPBR1 0x260
68#define FMC2_BCHPBR2 0x264
69#define FMC2_BCHPBR3 0x268
70#define FMC2_BCHPBR4 0x26c
71#define FMC2_BCHDSR0 0x27c
72#define FMC2_BCHDSR1 0x280
73#define FMC2_BCHDSR2 0x284
74#define FMC2_BCHDSR3 0x288
75#define FMC2_BCHDSR4 0x28c
76
77/* Register: FMC2_BCR1 */
78#define FMC2_BCR1_FMC2EN BIT(31)
79
80/* Register: FMC2_PCR */
81#define FMC2_PCR_PWAITEN BIT(1)
82#define FMC2_PCR_PBKEN BIT(2)
83#define FMC2_PCR_PWID_MASK GENMASK(5, 4)
84#define FMC2_PCR_PWID(x) (((x) & 0x3) << 4)
85#define FMC2_PCR_PWID_BUSWIDTH_8 0
86#define FMC2_PCR_PWID_BUSWIDTH_16 1
87#define FMC2_PCR_ECCEN BIT(6)
88#define FMC2_PCR_ECCALG BIT(8)
89#define FMC2_PCR_TCLR_MASK GENMASK(12, 9)
90#define FMC2_PCR_TCLR(x) (((x) & 0xf) << 9)
91#define FMC2_PCR_TCLR_DEFAULT 0xf
92#define FMC2_PCR_TAR_MASK GENMASK(16, 13)
93#define FMC2_PCR_TAR(x) (((x) & 0xf) << 13)
94#define FMC2_PCR_TAR_DEFAULT 0xf
95#define FMC2_PCR_ECCSS_MASK GENMASK(19, 17)
96#define FMC2_PCR_ECCSS(x) (((x) & 0x7) << 17)
97#define FMC2_PCR_ECCSS_512 1
98#define FMC2_PCR_ECCSS_2048 3
99#define FMC2_PCR_BCHECC BIT(24)
100#define FMC2_PCR_WEN BIT(25)
101
102/* Register: FMC2_SR */
103#define FMC2_SR_NWRF BIT(6)
104
105/* Register: FMC2_PMEM */
106#define FMC2_PMEM_MEMSET(x) (((x) & 0xff) << 0)
107#define FMC2_PMEM_MEMWAIT(x) (((x) & 0xff) << 8)
108#define FMC2_PMEM_MEMHOLD(x) (((x) & 0xff) << 16)
109#define FMC2_PMEM_MEMHIZ(x) (((x) & 0xff) << 24)
110#define FMC2_PMEM_DEFAULT 0x0a0a0a0a
111
112/* Register: FMC2_PATT */
113#define FMC2_PATT_ATTSET(x) (((x) & 0xff) << 0)
114#define FMC2_PATT_ATTWAIT(x) (((x) & 0xff) << 8)
115#define FMC2_PATT_ATTHOLD(x) (((x) & 0xff) << 16)
116#define FMC2_PATT_ATTHIZ(x) (((x) & 0xff) << 24)
117#define FMC2_PATT_DEFAULT 0x0a0a0a0a
118
119/* Register: FMC2_CSQCR */
120#define FMC2_CSQCR_CSQSTART BIT(0)
121
122/* Register: FMC2_CSQCFGR1 */
123#define FMC2_CSQCFGR1_CMD2EN BIT(1)
124#define FMC2_CSQCFGR1_DMADEN BIT(2)
125#define FMC2_CSQCFGR1_ACYNBR(x) (((x) & 0x7) << 4)
126#define FMC2_CSQCFGR1_CMD1(x) (((x) & 0xff) << 8)
127#define FMC2_CSQCFGR1_CMD2(x) (((x) & 0xff) << 16)
128#define FMC2_CSQCFGR1_CMD1T BIT(24)
129#define FMC2_CSQCFGR1_CMD2T BIT(25)
130
131/* Register: FMC2_CSQCFGR2 */
132#define FMC2_CSQCFGR2_SQSDTEN BIT(0)
133#define FMC2_CSQCFGR2_RCMD2EN BIT(1)
134#define FMC2_CSQCFGR2_DMASEN BIT(2)
135#define FMC2_CSQCFGR2_RCMD1(x) (((x) & 0xff) << 8)
136#define FMC2_CSQCFGR2_RCMD2(x) (((x) & 0xff) << 16)
137#define FMC2_CSQCFGR2_RCMD1T BIT(24)
138#define FMC2_CSQCFGR2_RCMD2T BIT(25)
139
140/* Register: FMC2_CSQCFGR3 */
141#define FMC2_CSQCFGR3_SNBR(x) (((x) & 0x1f) << 8)
142#define FMC2_CSQCFGR3_AC1T BIT(16)
143#define FMC2_CSQCFGR3_AC2T BIT(17)
144#define FMC2_CSQCFGR3_AC3T BIT(18)
145#define FMC2_CSQCFGR3_AC4T BIT(19)
146#define FMC2_CSQCFGR3_AC5T BIT(20)
147#define FMC2_CSQCFGR3_SDT BIT(21)
148#define FMC2_CSQCFGR3_RAC1T BIT(22)
149#define FMC2_CSQCFGR3_RAC2T BIT(23)
150
151/* Register: FMC2_CSQCAR1 */
152#define FMC2_CSQCAR1_ADDC1(x) (((x) & 0xff) << 0)
153#define FMC2_CSQCAR1_ADDC2(x) (((x) & 0xff) << 8)
154#define FMC2_CSQCAR1_ADDC3(x) (((x) & 0xff) << 16)
155#define FMC2_CSQCAR1_ADDC4(x) (((x) & 0xff) << 24)
156
157/* Register: FMC2_CSQCAR2 */
158#define FMC2_CSQCAR2_ADDC5(x) (((x) & 0xff) << 0)
159#define FMC2_CSQCAR2_NANDCEN(x) (((x) & 0x3) << 10)
160#define FMC2_CSQCAR2_SAO(x) (((x) & 0xffff) << 16)
161
162/* Register: FMC2_CSQIER */
163#define FMC2_CSQIER_TCIE BIT(0)
164
165/* Register: FMC2_CSQICR */
166#define FMC2_CSQICR_CLEAR_IRQ GENMASK(4, 0)
167
168/* Register: FMC2_CSQEMSR */
169#define FMC2_CSQEMSR_SEM GENMASK(15, 0)
170
171/* Register: FMC2_BCHIER */
172#define FMC2_BCHIER_DERIE BIT(1)
173#define FMC2_BCHIER_EPBRIE BIT(4)
174
175/* Register: FMC2_BCHICR */
176#define FMC2_BCHICR_CLEAR_IRQ GENMASK(4, 0)
177
178/* Register: FMC2_BCHDSR0 */
179#define FMC2_BCHDSR0_DUE BIT(0)
180#define FMC2_BCHDSR0_DEF BIT(1)
181#define FMC2_BCHDSR0_DEN_MASK GENMASK(7, 4)
182#define FMC2_BCHDSR0_DEN_SHIFT 4
183
184/* Register: FMC2_BCHDSR1 */
185#define FMC2_BCHDSR1_EBP1_MASK GENMASK(12, 0)
186#define FMC2_BCHDSR1_EBP2_MASK GENMASK(28, 16)
187#define FMC2_BCHDSR1_EBP2_SHIFT 16
188
189/* Register: FMC2_BCHDSR2 */
190#define FMC2_BCHDSR2_EBP3_MASK GENMASK(12, 0)
191#define FMC2_BCHDSR2_EBP4_MASK GENMASK(28, 16)
192#define FMC2_BCHDSR2_EBP4_SHIFT 16
193
194/* Register: FMC2_BCHDSR3 */
195#define FMC2_BCHDSR3_EBP5_MASK GENMASK(12, 0)
196#define FMC2_BCHDSR3_EBP6_MASK GENMASK(28, 16)
197#define FMC2_BCHDSR3_EBP6_SHIFT 16
198
199/* Register: FMC2_BCHDSR4 */
200#define FMC2_BCHDSR4_EBP7_MASK GENMASK(12, 0)
201#define FMC2_BCHDSR4_EBP8_MASK GENMASK(28, 16)
202#define FMC2_BCHDSR4_EBP8_SHIFT 16
203
204enum stm32_fmc2_ecc {
205 FMC2_ECC_HAM = 1,
206 FMC2_ECC_BCH4 = 4,
207 FMC2_ECC_BCH8 = 8
208};
209
Christophe Kerello33c8cf42018-12-14 10:58:08 +0100210enum stm32_fmc2_irq_state {
211 FMC2_IRQ_UNKNOWN = 0,
212 FMC2_IRQ_BCH,
213 FMC2_IRQ_SEQ
214};
215
Christophe Kerello2cd457f2018-12-14 10:58:07 +0100216struct stm32_fmc2_timings {
217 u8 tclr;
218 u8 tar;
219 u8 thiz;
220 u8 twait;
221 u8 thold_mem;
222 u8 tset_mem;
223 u8 thold_att;
224 u8 tset_att;
225};
226
227struct stm32_fmc2_nand {
228 struct nand_chip chip;
229 struct stm32_fmc2_timings timings;
230 int ncs;
231 int cs_used[FMC2_MAX_CE];
232};
233
234static inline struct stm32_fmc2_nand *to_fmc2_nand(struct nand_chip *chip)
235{
236 return container_of(chip, struct stm32_fmc2_nand, chip);
237}
238
239struct stm32_fmc2_nfc {
240 struct nand_controller base;
241 struct stm32_fmc2_nand nand;
242 struct device *dev;
243 void __iomem *io_base;
244 void __iomem *data_base[FMC2_MAX_CE];
245 void __iomem *cmd_base[FMC2_MAX_CE];
246 void __iomem *addr_base[FMC2_MAX_CE];
247 phys_addr_t io_phys_addr;
248 phys_addr_t data_phys_addr[FMC2_MAX_CE];
249 struct clk *clk;
Christophe Kerello33c8cf42018-12-14 10:58:08 +0100250 u8 irq_state;
Christophe Kerello2cd457f2018-12-14 10:58:07 +0100251
252 struct dma_chan *dma_tx_ch;
253 struct dma_chan *dma_rx_ch;
254 struct dma_chan *dma_ecc_ch;
255 struct sg_table dma_data_sg;
256 struct sg_table dma_ecc_sg;
257 u8 *ecc_buf;
258 int dma_ecc_len;
259
260 struct completion complete;
261 struct completion dma_data_complete;
262 struct completion dma_ecc_complete;
263
264 u8 cs_assigned;
265 int cs_sel;
266};
267
268static inline struct stm32_fmc2_nfc *to_stm32_nfc(struct nand_controller *base)
269{
270 return container_of(base, struct stm32_fmc2_nfc, base);
271}
272
273/* Timings configuration */
274static void stm32_fmc2_timings_init(struct nand_chip *chip)
275{
276 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
277 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
278 struct stm32_fmc2_timings *timings = &nand->timings;
279 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
280 u32 pmem, patt;
281
282 /* Set tclr/tar timings */
283 pcr &= ~FMC2_PCR_TCLR_MASK;
284 pcr |= FMC2_PCR_TCLR(timings->tclr);
285 pcr &= ~FMC2_PCR_TAR_MASK;
286 pcr |= FMC2_PCR_TAR(timings->tar);
287
288 /* Set tset/twait/thold/thiz timings in common bank */
289 pmem = FMC2_PMEM_MEMSET(timings->tset_mem);
290 pmem |= FMC2_PMEM_MEMWAIT(timings->twait);
291 pmem |= FMC2_PMEM_MEMHOLD(timings->thold_mem);
292 pmem |= FMC2_PMEM_MEMHIZ(timings->thiz);
293
294 /* Set tset/twait/thold/thiz timings in attribut bank */
295 patt = FMC2_PATT_ATTSET(timings->tset_att);
296 patt |= FMC2_PATT_ATTWAIT(timings->twait);
297 patt |= FMC2_PATT_ATTHOLD(timings->thold_att);
298 patt |= FMC2_PATT_ATTHIZ(timings->thiz);
299
300 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
301 writel_relaxed(pmem, fmc2->io_base + FMC2_PMEM);
302 writel_relaxed(patt, fmc2->io_base + FMC2_PATT);
303}
304
305/* Controller configuration */
306static void stm32_fmc2_setup(struct nand_chip *chip)
307{
308 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
309 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
310
311 /* Configure ECC algorithm (default configuration is Hamming) */
312 pcr &= ~FMC2_PCR_ECCALG;
313 pcr &= ~FMC2_PCR_BCHECC;
314 if (chip->ecc.strength == FMC2_ECC_BCH8) {
315 pcr |= FMC2_PCR_ECCALG;
316 pcr |= FMC2_PCR_BCHECC;
317 } else if (chip->ecc.strength == FMC2_ECC_BCH4) {
318 pcr |= FMC2_PCR_ECCALG;
319 }
320
321 /* Set buswidth */
322 pcr &= ~FMC2_PCR_PWID_MASK;
323 if (chip->options & NAND_BUSWIDTH_16)
324 pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
325
326 /* Set ECC sector size */
327 pcr &= ~FMC2_PCR_ECCSS_MASK;
328 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_512);
329
330 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
331}
332
333/* Select target */
334static int stm32_fmc2_select_chip(struct nand_chip *chip, int chipnr)
335{
336 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
337 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
338 struct dma_slave_config dma_cfg;
339 int ret;
340
341 if (nand->cs_used[chipnr] == fmc2->cs_sel)
342 return 0;
343
344 fmc2->cs_sel = nand->cs_used[chipnr];
345
346 /* FMC2 setup routine */
347 stm32_fmc2_setup(chip);
348
349 /* Apply timings */
350 stm32_fmc2_timings_init(chip);
351
352 if (fmc2->dma_tx_ch && fmc2->dma_rx_ch) {
353 memset(&dma_cfg, 0, sizeof(dma_cfg));
354 dma_cfg.src_addr = fmc2->data_phys_addr[fmc2->cs_sel];
355 dma_cfg.dst_addr = fmc2->data_phys_addr[fmc2->cs_sel];
356 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
357 dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
358 dma_cfg.src_maxburst = 32;
359 dma_cfg.dst_maxburst = 32;
360
361 ret = dmaengine_slave_config(fmc2->dma_tx_ch, &dma_cfg);
362 if (ret) {
363 dev_err(fmc2->dev, "tx DMA engine slave config failed\n");
364 return ret;
365 }
366
367 ret = dmaengine_slave_config(fmc2->dma_rx_ch, &dma_cfg);
368 if (ret) {
369 dev_err(fmc2->dev, "rx DMA engine slave config failed\n");
370 return ret;
371 }
372 }
373
374 if (fmc2->dma_ecc_ch) {
375 /*
376 * Hamming: we read HECCR register
377 * BCH4/BCH8: we read BCHDSRSx registers
378 */
379 memset(&dma_cfg, 0, sizeof(dma_cfg));
380 dma_cfg.src_addr = fmc2->io_phys_addr;
381 dma_cfg.src_addr += chip->ecc.strength == FMC2_ECC_HAM ?
382 FMC2_HECCR : FMC2_BCHDSR0;
383 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
384
385 ret = dmaengine_slave_config(fmc2->dma_ecc_ch, &dma_cfg);
386 if (ret) {
387 dev_err(fmc2->dev, "ECC DMA engine slave config failed\n");
388 return ret;
389 }
390
391 /* Calculate ECC length needed for one sector */
392 fmc2->dma_ecc_len = chip->ecc.strength == FMC2_ECC_HAM ?
393 FMC2_HECCR_LEN : FMC2_BCHDSRS_LEN;
394 }
395
396 return 0;
397}
398
399/* Set bus width to 16-bit or 8-bit */
400static void stm32_fmc2_set_buswidth_16(struct stm32_fmc2_nfc *fmc2, bool set)
401{
402 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
403
404 pcr &= ~FMC2_PCR_PWID_MASK;
405 if (set)
406 pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
407 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
408}
409
Christophe Kerello33c8cf42018-12-14 10:58:08 +0100410/* Enable/disable ECC */
411static void stm32_fmc2_set_ecc(struct stm32_fmc2_nfc *fmc2, bool enable)
412{
413 u32 pcr = readl(fmc2->io_base + FMC2_PCR);
414
415 pcr &= ~FMC2_PCR_ECCEN;
416 if (enable)
417 pcr |= FMC2_PCR_ECCEN;
418 writel(pcr, fmc2->io_base + FMC2_PCR);
419}
420
Christophe Kerello2cd457f2018-12-14 10:58:07 +0100421/* Enable irq sources in case of the sequencer is used */
422static inline void stm32_fmc2_enable_seq_irq(struct stm32_fmc2_nfc *fmc2)
423{
424 u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
425
426 csqier |= FMC2_CSQIER_TCIE;
427
Christophe Kerello33c8cf42018-12-14 10:58:08 +0100428 fmc2->irq_state = FMC2_IRQ_SEQ;
429
Christophe Kerello2cd457f2018-12-14 10:58:07 +0100430 writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
431}
432
433/* Disable irq sources in case of the sequencer is used */
434static inline void stm32_fmc2_disable_seq_irq(struct stm32_fmc2_nfc *fmc2)
435{
436 u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
437
438 csqier &= ~FMC2_CSQIER_TCIE;
439
440 writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
Christophe Kerello33c8cf42018-12-14 10:58:08 +0100441
442 fmc2->irq_state = FMC2_IRQ_UNKNOWN;
Christophe Kerello2cd457f2018-12-14 10:58:07 +0100443}
444
445/* Clear irq sources in case of the sequencer is used */
446static inline void stm32_fmc2_clear_seq_irq(struct stm32_fmc2_nfc *fmc2)
447{
448 writel_relaxed(FMC2_CSQICR_CLEAR_IRQ, fmc2->io_base + FMC2_CSQICR);
449}
450
Christophe Kerello33c8cf42018-12-14 10:58:08 +0100451/* Enable irq sources in case of bch is used */
452static inline void stm32_fmc2_enable_bch_irq(struct stm32_fmc2_nfc *fmc2,
453 int mode)
454{
455 u32 bchier = readl_relaxed(fmc2->io_base + FMC2_BCHIER);
456
457 if (mode == NAND_ECC_WRITE)
458 bchier |= FMC2_BCHIER_EPBRIE;
459 else
460 bchier |= FMC2_BCHIER_DERIE;
461
462 fmc2->irq_state = FMC2_IRQ_BCH;
463
464 writel_relaxed(bchier, fmc2->io_base + FMC2_BCHIER);
465}
466
467/* Disable irq sources in case of bch is used */
468static inline void stm32_fmc2_disable_bch_irq(struct stm32_fmc2_nfc *fmc2)
469{
470 u32 bchier = readl_relaxed(fmc2->io_base + FMC2_BCHIER);
471
472 bchier &= ~FMC2_BCHIER_DERIE;
473 bchier &= ~FMC2_BCHIER_EPBRIE;
474
475 writel_relaxed(bchier, fmc2->io_base + FMC2_BCHIER);
476
477 fmc2->irq_state = FMC2_IRQ_UNKNOWN;
478}
479
480/* Clear irq sources in case of bch is used */
481static inline void stm32_fmc2_clear_bch_irq(struct stm32_fmc2_nfc *fmc2)
482{
483 writel_relaxed(FMC2_BCHICR_CLEAR_IRQ, fmc2->io_base + FMC2_BCHICR);
484}
485
486/*
487 * Enable ECC logic and reset syndrome/parity bits previously calculated
488 * Syndrome/parity bits is cleared by setting the ECCEN bit to 0
489 */
490static void stm32_fmc2_hwctl(struct nand_chip *chip, int mode)
491{
492 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
493
494 stm32_fmc2_set_ecc(fmc2, false);
495
496 if (chip->ecc.strength != FMC2_ECC_HAM) {
497 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
498
499 if (mode == NAND_ECC_WRITE)
500 pcr |= FMC2_PCR_WEN;
501 else
502 pcr &= ~FMC2_PCR_WEN;
503 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
504
505 reinit_completion(&fmc2->complete);
506 stm32_fmc2_clear_bch_irq(fmc2);
507 stm32_fmc2_enable_bch_irq(fmc2, mode);
508 }
509
510 stm32_fmc2_set_ecc(fmc2, true);
511}
512
Christophe Kerello2cd457f2018-12-14 10:58:07 +0100513/*
514 * ECC Hamming calculation
515 * ECC is 3 bytes for 512 bytes of data (supports error correction up to
516 * max of 1-bit)
517 */
518static inline void stm32_fmc2_ham_set_ecc(const u32 ecc_sta, u8 *ecc)
519{
520 ecc[0] = ecc_sta;
521 ecc[1] = ecc_sta >> 8;
522 ecc[2] = ecc_sta >> 16;
523}
524
Christophe Kerello33c8cf42018-12-14 10:58:08 +0100525static int stm32_fmc2_ham_calculate(struct nand_chip *chip, const u8 *data,
526 u8 *ecc)
527{
528 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
529 u32 sr, heccr;
530 int ret;
531
532 ret = readl_relaxed_poll_timeout(fmc2->io_base + FMC2_SR,
533 sr, sr & FMC2_SR_NWRF, 10, 1000);
534 if (ret) {
535 dev_err(fmc2->dev, "ham timeout\n");
536 return ret;
537 }
538
539 heccr = readl_relaxed(fmc2->io_base + FMC2_HECCR);
540
541 stm32_fmc2_ham_set_ecc(heccr, ecc);
542
543 /* Disable ECC */
544 stm32_fmc2_set_ecc(fmc2, false);
545
546 return 0;
547}
548
Christophe Kerello2cd457f2018-12-14 10:58:07 +0100549static int stm32_fmc2_ham_correct(struct nand_chip *chip, u8 *dat,
550 u8 *read_ecc, u8 *calc_ecc)
551{
552 u8 bit_position = 0, b0, b1, b2;
553 u32 byte_addr = 0, b;
554 u32 i, shifting = 1;
555
556 /* Indicate which bit and byte is faulty (if any) */
557 b0 = read_ecc[0] ^ calc_ecc[0];
558 b1 = read_ecc[1] ^ calc_ecc[1];
559 b2 = read_ecc[2] ^ calc_ecc[2];
560 b = b0 | (b1 << 8) | (b2 << 16);
561
562 /* No errors */
563 if (likely(!b))
564 return 0;
565
566 /* Calculate bit position */
567 for (i = 0; i < 3; i++) {
568 switch (b % 4) {
569 case 2:
570 bit_position += shifting;
571 case 1:
572 break;
573 default:
574 return -EBADMSG;
575 }
576 shifting <<= 1;
577 b >>= 2;
578 }
579
580 /* Calculate byte position */
581 shifting = 1;
582 for (i = 0; i < 9; i++) {
583 switch (b % 4) {
584 case 2:
585 byte_addr += shifting;
586 case 1:
587 break;
588 default:
589 return -EBADMSG;
590 }
591 shifting <<= 1;
592 b >>= 2;
593 }
594
595 /* Flip the bit */
596 dat[byte_addr] ^= (1 << bit_position);
597
598 return 1;
599}
600
Christophe Kerello33c8cf42018-12-14 10:58:08 +0100601/*
602 * ECC BCH calculation and correction
603 * ECC is 7/13 bytes for 512 bytes of data (supports error correction up to
604 * max of 4-bit/8-bit)
605 */
606static int stm32_fmc2_bch_calculate(struct nand_chip *chip, const u8 *data,
607 u8 *ecc)
608{
609 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
610 u32 bchpbr;
611
612 /* Wait until the BCH code is ready */
613 if (!wait_for_completion_timeout(&fmc2->complete,
614 msecs_to_jiffies(1000))) {
615 dev_err(fmc2->dev, "bch timeout\n");
616 stm32_fmc2_disable_bch_irq(fmc2);
617 return -ETIMEDOUT;
618 }
619
620 /* Read parity bits */
621 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR1);
622 ecc[0] = bchpbr;
623 ecc[1] = bchpbr >> 8;
624 ecc[2] = bchpbr >> 16;
625 ecc[3] = bchpbr >> 24;
626
627 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR2);
628 ecc[4] = bchpbr;
629 ecc[5] = bchpbr >> 8;
630 ecc[6] = bchpbr >> 16;
631
632 if (chip->ecc.strength == FMC2_ECC_BCH8) {
633 ecc[7] = bchpbr >> 24;
634
635 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR3);
636 ecc[8] = bchpbr;
637 ecc[9] = bchpbr >> 8;
638 ecc[10] = bchpbr >> 16;
639 ecc[11] = bchpbr >> 24;
640
641 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR4);
642 ecc[12] = bchpbr;
643 }
644
645 /* Disable ECC */
646 stm32_fmc2_set_ecc(fmc2, false);
647
648 return 0;
649}
650
Christophe Kerello2cd457f2018-12-14 10:58:07 +0100651/* BCH algorithm correction */
652static int stm32_fmc2_bch_decode(int eccsize, u8 *dat, u32 *ecc_sta)
653{
654 u32 bchdsr0 = ecc_sta[0];
655 u32 bchdsr1 = ecc_sta[1];
656 u32 bchdsr2 = ecc_sta[2];
657 u32 bchdsr3 = ecc_sta[3];
658 u32 bchdsr4 = ecc_sta[4];
659 u16 pos[8];
660 int i, den;
661 unsigned int nb_errs = 0;
662
663 /* No errors found */
664 if (likely(!(bchdsr0 & FMC2_BCHDSR0_DEF)))
665 return 0;
666
667 /* Too many errors detected */
668 if (unlikely(bchdsr0 & FMC2_BCHDSR0_DUE))
669 return -EBADMSG;
670
671 pos[0] = bchdsr1 & FMC2_BCHDSR1_EBP1_MASK;
672 pos[1] = (bchdsr1 & FMC2_BCHDSR1_EBP2_MASK) >> FMC2_BCHDSR1_EBP2_SHIFT;
673 pos[2] = bchdsr2 & FMC2_BCHDSR2_EBP3_MASK;
674 pos[3] = (bchdsr2 & FMC2_BCHDSR2_EBP4_MASK) >> FMC2_BCHDSR2_EBP4_SHIFT;
675 pos[4] = bchdsr3 & FMC2_BCHDSR3_EBP5_MASK;
676 pos[5] = (bchdsr3 & FMC2_BCHDSR3_EBP6_MASK) >> FMC2_BCHDSR3_EBP6_SHIFT;
677 pos[6] = bchdsr4 & FMC2_BCHDSR4_EBP7_MASK;
678 pos[7] = (bchdsr4 & FMC2_BCHDSR4_EBP8_MASK) >> FMC2_BCHDSR4_EBP8_SHIFT;
679
680 den = (bchdsr0 & FMC2_BCHDSR0_DEN_MASK) >> FMC2_BCHDSR0_DEN_SHIFT;
681 for (i = 0; i < den; i++) {
682 if (pos[i] < eccsize * 8) {
683 change_bit(pos[i], (unsigned long *)dat);
684 nb_errs++;
685 }
686 }
687
688 return nb_errs;
689}
690
Christophe Kerello33c8cf42018-12-14 10:58:08 +0100691static int stm32_fmc2_bch_correct(struct nand_chip *chip, u8 *dat,
692 u8 *read_ecc, u8 *calc_ecc)
693{
694 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
695 u32 ecc_sta[5];
696
697 /* Wait until the decoding error is ready */
698 if (!wait_for_completion_timeout(&fmc2->complete,
699 msecs_to_jiffies(1000))) {
700 dev_err(fmc2->dev, "bch timeout\n");
701 stm32_fmc2_disable_bch_irq(fmc2);
702 return -ETIMEDOUT;
703 }
704
705 ecc_sta[0] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR0);
706 ecc_sta[1] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR1);
707 ecc_sta[2] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR2);
708 ecc_sta[3] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR3);
709 ecc_sta[4] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR4);
710
711 /* Disable ECC */
712 stm32_fmc2_set_ecc(fmc2, false);
713
714 return stm32_fmc2_bch_decode(chip->ecc.size, dat, ecc_sta);
715}
716
717static int stm32_fmc2_read_page(struct nand_chip *chip, u8 *buf,
718 int oob_required, int page)
719{
720 struct mtd_info *mtd = nand_to_mtd(chip);
721 int ret, i, s, stat, eccsize = chip->ecc.size;
722 int eccbytes = chip->ecc.bytes;
723 int eccsteps = chip->ecc.steps;
724 int eccstrength = chip->ecc.strength;
725 u8 *p = buf;
726 u8 *ecc_calc = chip->ecc.calc_buf;
727 u8 *ecc_code = chip->ecc.code_buf;
728 unsigned int max_bitflips = 0;
729
730 ret = nand_read_page_op(chip, page, 0, NULL, 0);
731 if (ret)
732 return ret;
733
734 for (i = mtd->writesize + FMC2_BBM_LEN, s = 0; s < eccsteps;
735 s++, i += eccbytes, p += eccsize) {
736 chip->ecc.hwctl(chip, NAND_ECC_READ);
737
738 /* Read the nand page sector (512 bytes) */
739 ret = nand_change_read_column_op(chip, s * eccsize, p,
740 eccsize, false);
741 if (ret)
742 return ret;
743
744 /* Read the corresponding ECC bytes */
745 ret = nand_change_read_column_op(chip, i, ecc_code,
746 eccbytes, false);
747 if (ret)
748 return ret;
749
750 /* Correct the data */
751 stat = chip->ecc.correct(chip, p, ecc_code, ecc_calc);
752 if (stat == -EBADMSG)
753 /* Check for empty pages with bitflips */
754 stat = nand_check_erased_ecc_chunk(p, eccsize,
755 ecc_code, eccbytes,
756 NULL, 0,
757 eccstrength);
758
759 if (stat < 0) {
760 mtd->ecc_stats.failed++;
761 } else {
762 mtd->ecc_stats.corrected += stat;
763 max_bitflips = max_t(unsigned int, max_bitflips, stat);
764 }
765 }
766
767 /* Read oob */
768 if (oob_required) {
769 ret = nand_change_read_column_op(chip, mtd->writesize,
770 chip->oob_poi, mtd->oobsize,
771 false);
772 if (ret)
773 return ret;
774 }
775
776 return max_bitflips;
777}
778
Christophe Kerello2cd457f2018-12-14 10:58:07 +0100779/* Sequencer read/write configuration */
780static void stm32_fmc2_rw_page_init(struct nand_chip *chip, int page,
781 int raw, bool write_data)
782{
783 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
784 struct mtd_info *mtd = nand_to_mtd(chip);
785 u32 csqcfgr1, csqcfgr2, csqcfgr3;
786 u32 csqar1, csqar2;
787 u32 ecc_offset = mtd->writesize + FMC2_BBM_LEN;
788 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
789
790 if (write_data)
791 pcr |= FMC2_PCR_WEN;
792 else
793 pcr &= ~FMC2_PCR_WEN;
794 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
795
796 /*
797 * - Set Program Page/Page Read command
798 * - Enable DMA request data
799 * - Set timings
800 */
801 csqcfgr1 = FMC2_CSQCFGR1_DMADEN | FMC2_CSQCFGR1_CMD1T;
802 if (write_data)
803 csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_SEQIN);
804 else
805 csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_READ0) |
806 FMC2_CSQCFGR1_CMD2EN |
807 FMC2_CSQCFGR1_CMD2(NAND_CMD_READSTART) |
808 FMC2_CSQCFGR1_CMD2T;
809
810 /*
811 * - Set Random Data Input/Random Data Read command
812 * - Enable the sequencer to access the Spare data area
813 * - Enable DMA request status decoding for read
814 * - Set timings
815 */
816 if (write_data)
817 csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDIN);
818 else
819 csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDOUT) |
820 FMC2_CSQCFGR2_RCMD2EN |
821 FMC2_CSQCFGR2_RCMD2(NAND_CMD_RNDOUTSTART) |
822 FMC2_CSQCFGR2_RCMD1T |
823 FMC2_CSQCFGR2_RCMD2T;
824 if (!raw) {
825 csqcfgr2 |= write_data ? 0 : FMC2_CSQCFGR2_DMASEN;
826 csqcfgr2 |= FMC2_CSQCFGR2_SQSDTEN;
827 }
828
829 /*
830 * - Set the number of sectors to be written
831 * - Set timings
832 */
833 csqcfgr3 = FMC2_CSQCFGR3_SNBR(chip->ecc.steps - 1);
834 if (write_data) {
835 csqcfgr3 |= FMC2_CSQCFGR3_RAC2T;
836 if (chip->options & NAND_ROW_ADDR_3)
837 csqcfgr3 |= FMC2_CSQCFGR3_AC5T;
838 else
839 csqcfgr3 |= FMC2_CSQCFGR3_AC4T;
840 }
841
842 /*
843 * Set the fourth first address cycles
844 * Byte 1 and byte 2 => column, we start at 0x0
845 * Byte 3 and byte 4 => page
846 */
847 csqar1 = FMC2_CSQCAR1_ADDC3(page);
848 csqar1 |= FMC2_CSQCAR1_ADDC4(page >> 8);
849
850 /*
851 * - Set chip enable number
852 * - Set ECC byte offset in the spare area
853 * - Calculate the number of address cycles to be issued
854 * - Set byte 5 of address cycle if needed
855 */
856 csqar2 = FMC2_CSQCAR2_NANDCEN(fmc2->cs_sel);
857 if (chip->options & NAND_BUSWIDTH_16)
858 csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset >> 1);
859 else
860 csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset);
861 if (chip->options & NAND_ROW_ADDR_3) {
862 csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(5);
863 csqar2 |= FMC2_CSQCAR2_ADDC5(page >> 16);
864 } else {
865 csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(4);
866 }
867
868 writel_relaxed(csqcfgr1, fmc2->io_base + FMC2_CSQCFGR1);
869 writel_relaxed(csqcfgr2, fmc2->io_base + FMC2_CSQCFGR2);
870 writel_relaxed(csqcfgr3, fmc2->io_base + FMC2_CSQCFGR3);
871 writel_relaxed(csqar1, fmc2->io_base + FMC2_CSQAR1);
872 writel_relaxed(csqar2, fmc2->io_base + FMC2_CSQAR2);
873}
874
875static void stm32_fmc2_dma_callback(void *arg)
876{
877 complete((struct completion *)arg);
878}
879
880/* Read/write data from/to a page */
881static int stm32_fmc2_xfer(struct nand_chip *chip, const u8 *buf,
882 int raw, bool write_data)
883{
884 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
885 struct dma_async_tx_descriptor *desc_data, *desc_ecc;
886 struct scatterlist *sg;
887 struct dma_chan *dma_ch = fmc2->dma_rx_ch;
888 enum dma_data_direction dma_data_dir = DMA_FROM_DEVICE;
889 enum dma_transfer_direction dma_transfer_dir = DMA_DEV_TO_MEM;
890 u32 csqcr = readl_relaxed(fmc2->io_base + FMC2_CSQCR);
891 int eccsteps = chip->ecc.steps;
892 int eccsize = chip->ecc.size;
893 const u8 *p = buf;
894 int s, ret;
895
896 /* Configure DMA data */
897 if (write_data) {
898 dma_data_dir = DMA_TO_DEVICE;
899 dma_transfer_dir = DMA_MEM_TO_DEV;
900 dma_ch = fmc2->dma_tx_ch;
901 }
902
903 for_each_sg(fmc2->dma_data_sg.sgl, sg, eccsteps, s) {
904 sg_set_buf(sg, p, eccsize);
905 p += eccsize;
906 }
907
908 ret = dma_map_sg(fmc2->dev, fmc2->dma_data_sg.sgl,
909 eccsteps, dma_data_dir);
910 if (ret < 0)
911 return ret;
912
913 desc_data = dmaengine_prep_slave_sg(dma_ch, fmc2->dma_data_sg.sgl,
914 eccsteps, dma_transfer_dir,
915 DMA_PREP_INTERRUPT);
916 if (!desc_data) {
917 ret = -ENOMEM;
918 goto err_unmap_data;
919 }
920
921 reinit_completion(&fmc2->dma_data_complete);
922 reinit_completion(&fmc2->complete);
923 desc_data->callback = stm32_fmc2_dma_callback;
924 desc_data->callback_param = &fmc2->dma_data_complete;
925 ret = dma_submit_error(dmaengine_submit(desc_data));
926 if (ret)
927 goto err_unmap_data;
928
929 dma_async_issue_pending(dma_ch);
930
931 if (!write_data && !raw) {
932 /* Configure DMA ECC status */
933 p = fmc2->ecc_buf;
934 for_each_sg(fmc2->dma_ecc_sg.sgl, sg, eccsteps, s) {
935 sg_set_buf(sg, p, fmc2->dma_ecc_len);
936 p += fmc2->dma_ecc_len;
937 }
938
939 ret = dma_map_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
940 eccsteps, dma_data_dir);
941 if (ret < 0)
942 goto err_unmap_data;
943
944 desc_ecc = dmaengine_prep_slave_sg(fmc2->dma_ecc_ch,
945 fmc2->dma_ecc_sg.sgl,
946 eccsteps, dma_transfer_dir,
947 DMA_PREP_INTERRUPT);
948 if (!desc_ecc) {
949 ret = -ENOMEM;
950 goto err_unmap_ecc;
951 }
952
953 reinit_completion(&fmc2->dma_ecc_complete);
954 desc_ecc->callback = stm32_fmc2_dma_callback;
955 desc_ecc->callback_param = &fmc2->dma_ecc_complete;
956 ret = dma_submit_error(dmaengine_submit(desc_ecc));
957 if (ret)
958 goto err_unmap_ecc;
959
960 dma_async_issue_pending(fmc2->dma_ecc_ch);
961 }
962
963 stm32_fmc2_clear_seq_irq(fmc2);
964 stm32_fmc2_enable_seq_irq(fmc2);
965
966 /* Start the transfer */
967 csqcr |= FMC2_CSQCR_CSQSTART;
968 writel_relaxed(csqcr, fmc2->io_base + FMC2_CSQCR);
969
970 /* Wait end of sequencer transfer */
971 if (!wait_for_completion_timeout(&fmc2->complete,
972 msecs_to_jiffies(1000))) {
973 dev_err(fmc2->dev, "seq timeout\n");
974 stm32_fmc2_disable_seq_irq(fmc2);
975 dmaengine_terminate_all(dma_ch);
976 if (!write_data && !raw)
977 dmaengine_terminate_all(fmc2->dma_ecc_ch);
978 ret = -ETIMEDOUT;
979 goto err_unmap_ecc;
980 }
981
982 /* Wait DMA data transfer completion */
983 if (!wait_for_completion_timeout(&fmc2->dma_data_complete,
984 msecs_to_jiffies(100))) {
985 dev_err(fmc2->dev, "data DMA timeout\n");
986 dmaengine_terminate_all(dma_ch);
987 ret = -ETIMEDOUT;
988 }
989
990 /* Wait DMA ECC transfer completion */
991 if (!write_data && !raw) {
992 if (!wait_for_completion_timeout(&fmc2->dma_ecc_complete,
993 msecs_to_jiffies(100))) {
994 dev_err(fmc2->dev, "ECC DMA timeout\n");
995 dmaengine_terminate_all(fmc2->dma_ecc_ch);
996 ret = -ETIMEDOUT;
997 }
998 }
999
1000err_unmap_ecc:
1001 if (!write_data && !raw)
1002 dma_unmap_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
1003 eccsteps, dma_data_dir);
1004
1005err_unmap_data:
1006 dma_unmap_sg(fmc2->dev, fmc2->dma_data_sg.sgl, eccsteps, dma_data_dir);
1007
1008 return ret;
1009}
1010
1011static int stm32_fmc2_sequencer_write(struct nand_chip *chip,
1012 const u8 *buf, int oob_required,
1013 int page, int raw)
1014{
1015 struct mtd_info *mtd = nand_to_mtd(chip);
1016 int ret;
1017
1018 /* Configure the sequencer */
1019 stm32_fmc2_rw_page_init(chip, page, raw, true);
1020
1021 /* Write the page */
1022 ret = stm32_fmc2_xfer(chip, buf, raw, true);
1023 if (ret)
1024 return ret;
1025
1026 /* Write oob */
1027 if (oob_required) {
1028 ret = nand_change_write_column_op(chip, mtd->writesize,
1029 chip->oob_poi, mtd->oobsize,
1030 false);
1031 if (ret)
1032 return ret;
1033 }
1034
1035 return nand_prog_page_end_op(chip);
1036}
1037
1038static int stm32_fmc2_sequencer_write_page(struct nand_chip *chip,
1039 const u8 *buf,
1040 int oob_required,
1041 int page)
1042{
1043 int ret;
1044
1045 /* Select the target */
1046 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1047 if (ret)
1048 return ret;
1049
1050 return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, false);
1051}
1052
1053static int stm32_fmc2_sequencer_write_page_raw(struct nand_chip *chip,
1054 const u8 *buf,
1055 int oob_required,
1056 int page)
1057{
1058 int ret;
1059
1060 /* Select the target */
1061 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1062 if (ret)
1063 return ret;
1064
1065 return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, true);
1066}
1067
1068/* Get a status indicating which sectors have errors */
1069static inline u16 stm32_fmc2_get_mapping_status(struct stm32_fmc2_nfc *fmc2)
1070{
1071 u32 csqemsr = readl_relaxed(fmc2->io_base + FMC2_CSQEMSR);
1072
1073 return csqemsr & FMC2_CSQEMSR_SEM;
1074}
1075
1076static int stm32_fmc2_sequencer_correct(struct nand_chip *chip, u8 *dat,
1077 u8 *read_ecc, u8 *calc_ecc)
1078{
1079 struct mtd_info *mtd = nand_to_mtd(chip);
1080 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1081 int eccbytes = chip->ecc.bytes;
1082 int eccsteps = chip->ecc.steps;
1083 int eccstrength = chip->ecc.strength;
1084 int i, s, eccsize = chip->ecc.size;
1085 u32 *ecc_sta = (u32 *)fmc2->ecc_buf;
1086 u16 sta_map = stm32_fmc2_get_mapping_status(fmc2);
1087 unsigned int max_bitflips = 0;
1088
1089 for (i = 0, s = 0; s < eccsteps; s++, i += eccbytes, dat += eccsize) {
1090 int stat = 0;
1091
1092 if (eccstrength == FMC2_ECC_HAM) {
1093 /* Ecc_sta = FMC2_HECCR */
1094 if (sta_map & BIT(s)) {
1095 stm32_fmc2_ham_set_ecc(*ecc_sta, &calc_ecc[i]);
1096 stat = stm32_fmc2_ham_correct(chip, dat,
1097 &read_ecc[i],
1098 &calc_ecc[i]);
1099 }
1100 ecc_sta++;
1101 } else {
1102 /*
1103 * Ecc_sta[0] = FMC2_BCHDSR0
1104 * Ecc_sta[1] = FMC2_BCHDSR1
1105 * Ecc_sta[2] = FMC2_BCHDSR2
1106 * Ecc_sta[3] = FMC2_BCHDSR3
1107 * Ecc_sta[4] = FMC2_BCHDSR4
1108 */
1109 if (sta_map & BIT(s))
1110 stat = stm32_fmc2_bch_decode(eccsize, dat,
1111 ecc_sta);
1112 ecc_sta += 5;
1113 }
1114
1115 if (stat == -EBADMSG)
1116 /* Check for empty pages with bitflips */
1117 stat = nand_check_erased_ecc_chunk(dat, eccsize,
1118 &read_ecc[i],
1119 eccbytes,
1120 NULL, 0,
1121 eccstrength);
1122
1123 if (stat < 0) {
1124 mtd->ecc_stats.failed++;
1125 } else {
1126 mtd->ecc_stats.corrected += stat;
1127 max_bitflips = max_t(unsigned int, max_bitflips, stat);
1128 }
1129 }
1130
1131 return max_bitflips;
1132}
1133
1134static int stm32_fmc2_sequencer_read_page(struct nand_chip *chip, u8 *buf,
1135 int oob_required, int page)
1136{
1137 struct mtd_info *mtd = nand_to_mtd(chip);
1138 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1139 u8 *ecc_calc = chip->ecc.calc_buf;
1140 u8 *ecc_code = chip->ecc.code_buf;
1141 u16 sta_map;
1142 int ret;
1143
1144 /* Select the target */
1145 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1146 if (ret)
1147 return ret;
1148
1149 /* Configure the sequencer */
1150 stm32_fmc2_rw_page_init(chip, page, 0, false);
1151
1152 /* Read the page */
1153 ret = stm32_fmc2_xfer(chip, buf, 0, false);
1154 if (ret)
1155 return ret;
1156
1157 sta_map = stm32_fmc2_get_mapping_status(fmc2);
1158
1159 /* Check if errors happen */
1160 if (likely(!sta_map)) {
1161 if (oob_required)
1162 return nand_change_read_column_op(chip, mtd->writesize,
1163 chip->oob_poi,
1164 mtd->oobsize, false);
1165
1166 return 0;
1167 }
1168
1169 /* Read oob */
1170 ret = nand_change_read_column_op(chip, mtd->writesize,
1171 chip->oob_poi, mtd->oobsize, false);
1172 if (ret)
1173 return ret;
1174
1175 ret = mtd_ooblayout_get_eccbytes(mtd, ecc_code, chip->oob_poi, 0,
1176 chip->ecc.total);
1177 if (ret)
1178 return ret;
1179
1180 /* Correct data */
1181 return chip->ecc.correct(chip, buf, ecc_code, ecc_calc);
1182}
1183
1184static int stm32_fmc2_sequencer_read_page_raw(struct nand_chip *chip, u8 *buf,
1185 int oob_required, int page)
1186{
1187 struct mtd_info *mtd = nand_to_mtd(chip);
1188 int ret;
1189
1190 /* Select the target */
1191 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1192 if (ret)
1193 return ret;
1194
1195 /* Configure the sequencer */
1196 stm32_fmc2_rw_page_init(chip, page, 1, false);
1197
1198 /* Read the page */
1199 ret = stm32_fmc2_xfer(chip, buf, 1, false);
1200 if (ret)
1201 return ret;
1202
1203 /* Read oob */
1204 if (oob_required)
1205 return nand_change_read_column_op(chip, mtd->writesize,
1206 chip->oob_poi, mtd->oobsize,
1207 false);
1208
1209 return 0;
1210}
1211
1212static irqreturn_t stm32_fmc2_irq(int irq, void *dev_id)
1213{
1214 struct stm32_fmc2_nfc *fmc2 = (struct stm32_fmc2_nfc *)dev_id;
1215
Christophe Kerello33c8cf42018-12-14 10:58:08 +01001216 if (fmc2->irq_state == FMC2_IRQ_SEQ)
1217 /* Sequencer is used */
1218 stm32_fmc2_disable_seq_irq(fmc2);
1219 else if (fmc2->irq_state == FMC2_IRQ_BCH)
1220 /* BCH is used */
1221 stm32_fmc2_disable_bch_irq(fmc2);
Christophe Kerello2cd457f2018-12-14 10:58:07 +01001222
1223 complete(&fmc2->complete);
1224
1225 return IRQ_HANDLED;
1226}
1227
1228static void stm32_fmc2_read_data(struct nand_chip *chip, void *buf,
1229 unsigned int len, bool force_8bit)
1230{
1231 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1232 void __iomem *io_addr_r = fmc2->data_base[fmc2->cs_sel];
1233
1234 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1235 /* Reconfigure bus width to 8-bit */
1236 stm32_fmc2_set_buswidth_16(fmc2, false);
1237
1238 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
1239 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
1240 *(u8 *)buf = readb_relaxed(io_addr_r);
1241 buf += sizeof(u8);
1242 len -= sizeof(u8);
1243 }
1244
1245 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
1246 len >= sizeof(u16)) {
1247 *(u16 *)buf = readw_relaxed(io_addr_r);
1248 buf += sizeof(u16);
1249 len -= sizeof(u16);
1250 }
1251 }
1252
1253 /* Buf is aligned */
1254 while (len >= sizeof(u32)) {
1255 *(u32 *)buf = readl_relaxed(io_addr_r);
1256 buf += sizeof(u32);
1257 len -= sizeof(u32);
1258 }
1259
1260 /* Read remaining bytes */
1261 if (len >= sizeof(u16)) {
1262 *(u16 *)buf = readw_relaxed(io_addr_r);
1263 buf += sizeof(u16);
1264 len -= sizeof(u16);
1265 }
1266
1267 if (len)
1268 *(u8 *)buf = readb_relaxed(io_addr_r);
1269
1270 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1271 /* Reconfigure bus width to 16-bit */
1272 stm32_fmc2_set_buswidth_16(fmc2, true);
1273}
1274
1275static void stm32_fmc2_write_data(struct nand_chip *chip, const void *buf,
1276 unsigned int len, bool force_8bit)
1277{
1278 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1279 void __iomem *io_addr_w = fmc2->data_base[fmc2->cs_sel];
1280
1281 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1282 /* Reconfigure bus width to 8-bit */
1283 stm32_fmc2_set_buswidth_16(fmc2, false);
1284
1285 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
1286 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
1287 writeb_relaxed(*(u8 *)buf, io_addr_w);
1288 buf += sizeof(u8);
1289 len -= sizeof(u8);
1290 }
1291
1292 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
1293 len >= sizeof(u16)) {
1294 writew_relaxed(*(u16 *)buf, io_addr_w);
1295 buf += sizeof(u16);
1296 len -= sizeof(u16);
1297 }
1298 }
1299
1300 /* Buf is aligned */
1301 while (len >= sizeof(u32)) {
1302 writel_relaxed(*(u32 *)buf, io_addr_w);
1303 buf += sizeof(u32);
1304 len -= sizeof(u32);
1305 }
1306
1307 /* Write remaining bytes */
1308 if (len >= sizeof(u16)) {
1309 writew_relaxed(*(u16 *)buf, io_addr_w);
1310 buf += sizeof(u16);
1311 len -= sizeof(u16);
1312 }
1313
1314 if (len)
1315 writeb_relaxed(*(u8 *)buf, io_addr_w);
1316
1317 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1318 /* Reconfigure bus width to 16-bit */
1319 stm32_fmc2_set_buswidth_16(fmc2, true);
1320}
1321
1322static int stm32_fmc2_exec_op(struct nand_chip *chip,
1323 const struct nand_operation *op,
1324 bool check_only)
1325{
1326 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1327 const struct nand_op_instr *instr = NULL;
1328 unsigned int op_id, i;
1329 int ret;
1330
1331 ret = stm32_fmc2_select_chip(chip, op->cs);
1332 if (ret)
1333 return ret;
1334
1335 if (check_only)
1336 return ret;
1337
1338 for (op_id = 0; op_id < op->ninstrs; op_id++) {
1339 instr = &op->instrs[op_id];
1340
1341 switch (instr->type) {
1342 case NAND_OP_CMD_INSTR:
1343 writeb_relaxed(instr->ctx.cmd.opcode,
1344 fmc2->cmd_base[fmc2->cs_sel]);
1345 break;
1346
1347 case NAND_OP_ADDR_INSTR:
1348 for (i = 0; i < instr->ctx.addr.naddrs; i++)
1349 writeb_relaxed(instr->ctx.addr.addrs[i],
1350 fmc2->addr_base[fmc2->cs_sel]);
1351 break;
1352
1353 case NAND_OP_DATA_IN_INSTR:
1354 stm32_fmc2_read_data(chip, instr->ctx.data.buf.in,
1355 instr->ctx.data.len,
1356 instr->ctx.data.force_8bit);
1357 break;
1358
1359 case NAND_OP_DATA_OUT_INSTR:
1360 stm32_fmc2_write_data(chip, instr->ctx.data.buf.out,
1361 instr->ctx.data.len,
1362 instr->ctx.data.force_8bit);
1363 break;
1364
1365 case NAND_OP_WAITRDY_INSTR:
1366 ret = nand_soft_waitrdy(chip,
1367 instr->ctx.waitrdy.timeout_ms);
1368 break;
1369 }
1370 }
1371
1372 return ret;
1373}
1374
1375/* Controller initialization */
1376static void stm32_fmc2_init(struct stm32_fmc2_nfc *fmc2)
1377{
1378 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
1379 u32 bcr1 = readl_relaxed(fmc2->io_base + FMC2_BCR1);
1380
1381 /* Set CS used to undefined */
1382 fmc2->cs_sel = -1;
1383
1384 /* Enable wait feature and nand flash memory bank */
1385 pcr |= FMC2_PCR_PWAITEN;
1386 pcr |= FMC2_PCR_PBKEN;
1387
1388 /* Set buswidth to 8 bits mode for identification */
1389 pcr &= ~FMC2_PCR_PWID_MASK;
1390
1391 /* ECC logic is disabled */
1392 pcr &= ~FMC2_PCR_ECCEN;
1393
1394 /* Default mode */
1395 pcr &= ~FMC2_PCR_ECCALG;
1396 pcr &= ~FMC2_PCR_BCHECC;
1397 pcr &= ~FMC2_PCR_WEN;
1398
1399 /* Set default ECC sector size */
1400 pcr &= ~FMC2_PCR_ECCSS_MASK;
1401 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_2048);
1402
1403 /* Set default tclr/tar timings */
1404 pcr &= ~FMC2_PCR_TCLR_MASK;
1405 pcr |= FMC2_PCR_TCLR(FMC2_PCR_TCLR_DEFAULT);
1406 pcr &= ~FMC2_PCR_TAR_MASK;
1407 pcr |= FMC2_PCR_TAR(FMC2_PCR_TAR_DEFAULT);
1408
1409 /* Enable FMC2 controller */
1410 bcr1 |= FMC2_BCR1_FMC2EN;
1411
1412 writel_relaxed(bcr1, fmc2->io_base + FMC2_BCR1);
1413 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
1414 writel_relaxed(FMC2_PMEM_DEFAULT, fmc2->io_base + FMC2_PMEM);
1415 writel_relaxed(FMC2_PATT_DEFAULT, fmc2->io_base + FMC2_PATT);
1416}
1417
1418/* Controller timings */
1419static void stm32_fmc2_calc_timings(struct nand_chip *chip,
1420 const struct nand_sdr_timings *sdrt)
1421{
1422 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1423 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
1424 struct stm32_fmc2_timings *tims = &nand->timings;
1425 unsigned long hclk = clk_get_rate(fmc2->clk);
1426 unsigned long hclkp = NSEC_PER_SEC / (hclk / 1000);
1427 int tar, tclr, thiz, twait, tset_mem, tset_att, thold_mem, thold_att;
1428
1429 tar = hclkp;
1430 if (tar < sdrt->tAR_min)
1431 tar = sdrt->tAR_min;
1432 tims->tar = DIV_ROUND_UP(tar, hclkp) - 1;
1433 if (tims->tar > FMC2_PCR_TIMING_MASK)
1434 tims->tar = FMC2_PCR_TIMING_MASK;
1435
1436 tclr = hclkp;
1437 if (tclr < sdrt->tCLR_min)
1438 tclr = sdrt->tCLR_min;
1439 tims->tclr = DIV_ROUND_UP(tclr, hclkp) - 1;
1440 if (tims->tclr > FMC2_PCR_TIMING_MASK)
1441 tims->tclr = FMC2_PCR_TIMING_MASK;
1442
1443 tims->thiz = FMC2_THIZ;
1444 thiz = (tims->thiz + 1) * hclkp;
1445
1446 /*
1447 * tWAIT > tRP
1448 * tWAIT > tWP
1449 * tWAIT > tREA + tIO
1450 */
1451 twait = hclkp;
1452 if (twait < sdrt->tRP_min)
1453 twait = sdrt->tRP_min;
1454 if (twait < sdrt->tWP_min)
1455 twait = sdrt->tWP_min;
1456 if (twait < sdrt->tREA_max + FMC2_TIO)
1457 twait = sdrt->tREA_max + FMC2_TIO;
1458 tims->twait = DIV_ROUND_UP(twait, hclkp);
1459 if (tims->twait == 0)
1460 tims->twait = 1;
1461 else if (tims->twait > FMC2_PMEM_PATT_TIMING_MASK)
1462 tims->twait = FMC2_PMEM_PATT_TIMING_MASK;
1463
1464 /*
1465 * tSETUP_MEM > tCS - tWAIT
1466 * tSETUP_MEM > tALS - tWAIT
1467 * tSETUP_MEM > tDS - (tWAIT - tHIZ)
1468 */
1469 tset_mem = hclkp;
1470 if (sdrt->tCS_min > twait && (tset_mem < sdrt->tCS_min - twait))
1471 tset_mem = sdrt->tCS_min - twait;
1472 if (sdrt->tALS_min > twait && (tset_mem < sdrt->tALS_min - twait))
1473 tset_mem = sdrt->tALS_min - twait;
1474 if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1475 (tset_mem < sdrt->tDS_min - (twait - thiz)))
1476 tset_mem = sdrt->tDS_min - (twait - thiz);
1477 tims->tset_mem = DIV_ROUND_UP(tset_mem, hclkp);
1478 if (tims->tset_mem == 0)
1479 tims->tset_mem = 1;
1480 else if (tims->tset_mem > FMC2_PMEM_PATT_TIMING_MASK)
1481 tims->tset_mem = FMC2_PMEM_PATT_TIMING_MASK;
1482
1483 /*
1484 * tHOLD_MEM > tCH
1485 * tHOLD_MEM > tREH - tSETUP_MEM
1486 * tHOLD_MEM > max(tRC, tWC) - (tSETUP_MEM + tWAIT)
1487 */
1488 thold_mem = hclkp;
1489 if (thold_mem < sdrt->tCH_min)
1490 thold_mem = sdrt->tCH_min;
1491 if (sdrt->tREH_min > tset_mem &&
1492 (thold_mem < sdrt->tREH_min - tset_mem))
1493 thold_mem = sdrt->tREH_min - tset_mem;
1494 if ((sdrt->tRC_min > tset_mem + twait) &&
1495 (thold_mem < sdrt->tRC_min - (tset_mem + twait)))
1496 thold_mem = sdrt->tRC_min - (tset_mem + twait);
1497 if ((sdrt->tWC_min > tset_mem + twait) &&
1498 (thold_mem < sdrt->tWC_min - (tset_mem + twait)))
1499 thold_mem = sdrt->tWC_min - (tset_mem + twait);
1500 tims->thold_mem = DIV_ROUND_UP(thold_mem, hclkp);
1501 if (tims->thold_mem == 0)
1502 tims->thold_mem = 1;
1503 else if (tims->thold_mem > FMC2_PMEM_PATT_TIMING_MASK)
1504 tims->thold_mem = FMC2_PMEM_PATT_TIMING_MASK;
1505
1506 /*
1507 * tSETUP_ATT > tCS - tWAIT
1508 * tSETUP_ATT > tCLS - tWAIT
1509 * tSETUP_ATT > tALS - tWAIT
1510 * tSETUP_ATT > tRHW - tHOLD_MEM
1511 * tSETUP_ATT > tDS - (tWAIT - tHIZ)
1512 */
1513 tset_att = hclkp;
1514 if (sdrt->tCS_min > twait && (tset_att < sdrt->tCS_min - twait))
1515 tset_att = sdrt->tCS_min - twait;
1516 if (sdrt->tCLS_min > twait && (tset_att < sdrt->tCLS_min - twait))
1517 tset_att = sdrt->tCLS_min - twait;
1518 if (sdrt->tALS_min > twait && (tset_att < sdrt->tALS_min - twait))
1519 tset_att = sdrt->tALS_min - twait;
1520 if (sdrt->tRHW_min > thold_mem &&
1521 (tset_att < sdrt->tRHW_min - thold_mem))
1522 tset_att = sdrt->tRHW_min - thold_mem;
1523 if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1524 (tset_att < sdrt->tDS_min - (twait - thiz)))
1525 tset_att = sdrt->tDS_min - (twait - thiz);
1526 tims->tset_att = DIV_ROUND_UP(tset_att, hclkp);
1527 if (tims->tset_att == 0)
1528 tims->tset_att = 1;
1529 else if (tims->tset_att > FMC2_PMEM_PATT_TIMING_MASK)
1530 tims->tset_att = FMC2_PMEM_PATT_TIMING_MASK;
1531
1532 /*
1533 * tHOLD_ATT > tALH
1534 * tHOLD_ATT > tCH
1535 * tHOLD_ATT > tCLH
1536 * tHOLD_ATT > tCOH
1537 * tHOLD_ATT > tDH
1538 * tHOLD_ATT > tWB + tIO + tSYNC - tSETUP_MEM
1539 * tHOLD_ATT > tADL - tSETUP_MEM
1540 * tHOLD_ATT > tWH - tSETUP_MEM
1541 * tHOLD_ATT > tWHR - tSETUP_MEM
1542 * tHOLD_ATT > tRC - (tSETUP_ATT + tWAIT)
1543 * tHOLD_ATT > tWC - (tSETUP_ATT + tWAIT)
1544 */
1545 thold_att = hclkp;
1546 if (thold_att < sdrt->tALH_min)
1547 thold_att = sdrt->tALH_min;
1548 if (thold_att < sdrt->tCH_min)
1549 thold_att = sdrt->tCH_min;
1550 if (thold_att < sdrt->tCLH_min)
1551 thold_att = sdrt->tCLH_min;
1552 if (thold_att < sdrt->tCOH_min)
1553 thold_att = sdrt->tCOH_min;
1554 if (thold_att < sdrt->tDH_min)
1555 thold_att = sdrt->tDH_min;
1556 if ((sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC > tset_mem) &&
1557 (thold_att < sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem))
1558 thold_att = sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem;
1559 if (sdrt->tADL_min > tset_mem &&
1560 (thold_att < sdrt->tADL_min - tset_mem))
1561 thold_att = sdrt->tADL_min - tset_mem;
1562 if (sdrt->tWH_min > tset_mem &&
1563 (thold_att < sdrt->tWH_min - tset_mem))
1564 thold_att = sdrt->tWH_min - tset_mem;
1565 if (sdrt->tWHR_min > tset_mem &&
1566 (thold_att < sdrt->tWHR_min - tset_mem))
1567 thold_att = sdrt->tWHR_min - tset_mem;
1568 if ((sdrt->tRC_min > tset_att + twait) &&
1569 (thold_att < sdrt->tRC_min - (tset_att + twait)))
1570 thold_att = sdrt->tRC_min - (tset_att + twait);
1571 if ((sdrt->tWC_min > tset_att + twait) &&
1572 (thold_att < sdrt->tWC_min - (tset_att + twait)))
1573 thold_att = sdrt->tWC_min - (tset_att + twait);
1574 tims->thold_att = DIV_ROUND_UP(thold_att, hclkp);
1575 if (tims->thold_att == 0)
1576 tims->thold_att = 1;
1577 else if (tims->thold_att > FMC2_PMEM_PATT_TIMING_MASK)
1578 tims->thold_att = FMC2_PMEM_PATT_TIMING_MASK;
1579}
1580
1581static int stm32_fmc2_setup_interface(struct nand_chip *chip, int chipnr,
1582 const struct nand_data_interface *conf)
1583{
1584 const struct nand_sdr_timings *sdrt;
1585
1586 sdrt = nand_get_sdr_timings(conf);
1587 if (IS_ERR(sdrt))
1588 return PTR_ERR(sdrt);
1589
1590 if (chipnr == NAND_DATA_IFACE_CHECK_ONLY)
1591 return 0;
1592
1593 stm32_fmc2_calc_timings(chip, sdrt);
1594
1595 /* Apply timings */
1596 stm32_fmc2_timings_init(chip);
1597
1598 return 0;
1599}
1600
1601/* DMA configuration */
1602static int stm32_fmc2_dma_setup(struct stm32_fmc2_nfc *fmc2)
1603{
1604 int ret;
1605
1606 fmc2->dma_tx_ch = dma_request_slave_channel(fmc2->dev, "tx");
1607 fmc2->dma_rx_ch = dma_request_slave_channel(fmc2->dev, "rx");
1608 fmc2->dma_ecc_ch = dma_request_slave_channel(fmc2->dev, "ecc");
1609
Christophe Kerello33c8cf42018-12-14 10:58:08 +01001610 if (!fmc2->dma_tx_ch || !fmc2->dma_rx_ch || !fmc2->dma_ecc_ch) {
1611 dev_warn(fmc2->dev, "DMAs not defined in the device tree, polling mode is used\n");
1612 return 0;
Christophe Kerello2cd457f2018-12-14 10:58:07 +01001613 }
1614
Christophe Kerello33c8cf42018-12-14 10:58:08 +01001615 ret = sg_alloc_table(&fmc2->dma_ecc_sg, FMC2_MAX_SG, GFP_KERNEL);
1616 if (ret)
1617 return ret;
Christophe Kerello2cd457f2018-12-14 10:58:07 +01001618
Christophe Kerello33c8cf42018-12-14 10:58:08 +01001619 /* Allocate a buffer to store ECC status registers */
1620 fmc2->ecc_buf = devm_kzalloc(fmc2->dev, FMC2_MAX_ECC_BUF_LEN,
1621 GFP_KERNEL);
1622 if (!fmc2->ecc_buf)
1623 return -ENOMEM;
1624
1625 ret = sg_alloc_table(&fmc2->dma_data_sg, FMC2_MAX_SG, GFP_KERNEL);
1626 if (ret)
1627 return ret;
1628
1629 init_completion(&fmc2->dma_data_complete);
1630 init_completion(&fmc2->dma_ecc_complete);
Christophe Kerello2cd457f2018-12-14 10:58:07 +01001631
1632 return 0;
1633}
1634
1635/* NAND callbacks setup */
1636static void stm32_fmc2_nand_callbacks_setup(struct nand_chip *chip)
1637{
Christophe Kerello33c8cf42018-12-14 10:58:08 +01001638 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1639
1640 /*
1641 * Specific callbacks to read/write a page depending on
1642 * the mode (polling/sequencer) and the algo used (Hamming, BCH).
1643 */
1644 if (fmc2->dma_tx_ch && fmc2->dma_rx_ch && fmc2->dma_ecc_ch) {
1645 /* DMA => use sequencer mode callbacks */
1646 chip->ecc.correct = stm32_fmc2_sequencer_correct;
1647 chip->ecc.write_page = stm32_fmc2_sequencer_write_page;
1648 chip->ecc.read_page = stm32_fmc2_sequencer_read_page;
1649 chip->ecc.write_page_raw = stm32_fmc2_sequencer_write_page_raw;
1650 chip->ecc.read_page_raw = stm32_fmc2_sequencer_read_page_raw;
1651 } else {
1652 /* No DMA => use polling mode callbacks */
1653 chip->ecc.hwctl = stm32_fmc2_hwctl;
1654 if (chip->ecc.strength == FMC2_ECC_HAM) {
1655 /* Hamming is used */
1656 chip->ecc.calculate = stm32_fmc2_ham_calculate;
1657 chip->ecc.correct = stm32_fmc2_ham_correct;
1658 chip->ecc.options |= NAND_ECC_GENERIC_ERASED_CHECK;
1659 } else {
1660 /* BCH is used */
1661 chip->ecc.calculate = stm32_fmc2_bch_calculate;
1662 chip->ecc.correct = stm32_fmc2_bch_correct;
1663 chip->ecc.read_page = stm32_fmc2_read_page;
1664 }
1665 }
Christophe Kerello2cd457f2018-12-14 10:58:07 +01001666
1667 /* Specific configurations depending on the algo used */
1668 if (chip->ecc.strength == FMC2_ECC_HAM)
1669 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 4 : 3;
1670 else if (chip->ecc.strength == FMC2_ECC_BCH8)
1671 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 14 : 13;
1672 else
1673 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 8 : 7;
1674}
1675
1676/* FMC2 layout */
1677static int stm32_fmc2_nand_ooblayout_ecc(struct mtd_info *mtd, int section,
1678 struct mtd_oob_region *oobregion)
1679{
1680 struct nand_chip *chip = mtd_to_nand(mtd);
1681 struct nand_ecc_ctrl *ecc = &chip->ecc;
1682
1683 if (section)
1684 return -ERANGE;
1685
1686 oobregion->length = ecc->total;
1687 oobregion->offset = FMC2_BBM_LEN;
1688
1689 return 0;
1690}
1691
1692static int stm32_fmc2_nand_ooblayout_free(struct mtd_info *mtd, int section,
1693 struct mtd_oob_region *oobregion)
1694{
1695 struct nand_chip *chip = mtd_to_nand(mtd);
1696 struct nand_ecc_ctrl *ecc = &chip->ecc;
1697
1698 if (section)
1699 return -ERANGE;
1700
1701 oobregion->length = mtd->oobsize - ecc->total - FMC2_BBM_LEN;
1702 oobregion->offset = ecc->total + FMC2_BBM_LEN;
1703
1704 return 0;
1705}
1706
1707static const struct mtd_ooblayout_ops stm32_fmc2_nand_ooblayout_ops = {
1708 .ecc = stm32_fmc2_nand_ooblayout_ecc,
1709 .free = stm32_fmc2_nand_ooblayout_free,
1710};
1711
1712/* FMC2 caps */
1713static int stm32_fmc2_calc_ecc_bytes(int step_size, int strength)
1714{
1715 /* Hamming */
1716 if (strength == FMC2_ECC_HAM)
1717 return 4;
1718
1719 /* BCH8 */
1720 if (strength == FMC2_ECC_BCH8)
1721 return 14;
1722
1723 /* BCH4 */
1724 return 8;
1725}
1726
1727NAND_ECC_CAPS_SINGLE(stm32_fmc2_ecc_caps, stm32_fmc2_calc_ecc_bytes,
1728 FMC2_ECC_STEP_SIZE,
1729 FMC2_ECC_HAM, FMC2_ECC_BCH4, FMC2_ECC_BCH8);
1730
1731/* FMC2 controller ops */
1732static int stm32_fmc2_attach_chip(struct nand_chip *chip)
1733{
1734 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1735 struct mtd_info *mtd = nand_to_mtd(chip);
1736 int ret;
1737
1738 /*
1739 * Only NAND_ECC_HW mode is actually supported
1740 * Hamming => ecc.strength = 1
1741 * BCH4 => ecc.strength = 4
1742 * BCH8 => ecc.strength = 8
1743 * ECC sector size = 512
1744 */
1745 if (chip->ecc.mode != NAND_ECC_HW) {
1746 dev_err(fmc2->dev, "nand_ecc_mode is not well defined in the DT\n");
1747 return -EINVAL;
1748 }
1749
1750 ret = nand_ecc_choose_conf(chip, &stm32_fmc2_ecc_caps,
1751 mtd->oobsize - FMC2_BBM_LEN);
1752 if (ret) {
1753 dev_err(fmc2->dev, "no valid ECC settings set\n");
1754 return ret;
1755 }
1756
1757 if (mtd->writesize / chip->ecc.size > FMC2_MAX_SG) {
1758 dev_err(fmc2->dev, "nand page size is not supported\n");
1759 return -EINVAL;
1760 }
1761
1762 if (chip->bbt_options & NAND_BBT_USE_FLASH)
1763 chip->bbt_options |= NAND_BBT_NO_OOB;
1764
1765 /* NAND callbacks setup */
1766 stm32_fmc2_nand_callbacks_setup(chip);
1767
1768 /* Define ECC layout */
1769 mtd_set_ooblayout(mtd, &stm32_fmc2_nand_ooblayout_ops);
1770
1771 /* Configure bus width to 16-bit */
1772 if (chip->options & NAND_BUSWIDTH_16)
1773 stm32_fmc2_set_buswidth_16(fmc2, true);
1774
1775 return 0;
1776}
1777
1778static const struct nand_controller_ops stm32_fmc2_nand_controller_ops = {
1779 .attach_chip = stm32_fmc2_attach_chip,
1780 .exec_op = stm32_fmc2_exec_op,
1781 .setup_data_interface = stm32_fmc2_setup_interface,
1782};
1783
1784/* FMC2 probe */
1785static int stm32_fmc2_parse_child(struct stm32_fmc2_nfc *fmc2,
1786 struct device_node *dn)
1787{
1788 struct stm32_fmc2_nand *nand = &fmc2->nand;
1789 u32 cs;
1790 int ret, i;
1791
1792 if (!of_get_property(dn, "reg", &nand->ncs))
1793 return -EINVAL;
1794
1795 nand->ncs /= sizeof(u32);
1796 if (!nand->ncs) {
1797 dev_err(fmc2->dev, "invalid reg property size\n");
1798 return -EINVAL;
1799 }
1800
1801 for (i = 0; i < nand->ncs; i++) {
1802 ret = of_property_read_u32_index(dn, "reg", i, &cs);
1803 if (ret) {
1804 dev_err(fmc2->dev, "could not retrieve reg property: %d\n",
1805 ret);
1806 return ret;
1807 }
1808
1809 if (cs > FMC2_MAX_CE) {
1810 dev_err(fmc2->dev, "invalid reg value: %d\n", cs);
1811 return -EINVAL;
1812 }
1813
1814 if (fmc2->cs_assigned & BIT(cs)) {
1815 dev_err(fmc2->dev, "cs already assigned: %d\n", cs);
1816 return -EINVAL;
1817 }
1818
1819 fmc2->cs_assigned |= BIT(cs);
1820 nand->cs_used[i] = cs;
1821 }
1822
1823 nand_set_flash_node(&nand->chip, dn);
1824
1825 return 0;
1826}
1827
1828static int stm32_fmc2_parse_dt(struct stm32_fmc2_nfc *fmc2)
1829{
1830 struct device_node *dn = fmc2->dev->of_node;
1831 struct device_node *child;
1832 int nchips = of_get_child_count(dn);
1833 int ret = 0;
1834
1835 if (!nchips) {
1836 dev_err(fmc2->dev, "NAND chip not defined\n");
1837 return -EINVAL;
1838 }
1839
1840 if (nchips > 1) {
1841 dev_err(fmc2->dev, "too many NAND chips defined\n");
1842 return -EINVAL;
1843 }
1844
1845 for_each_child_of_node(dn, child) {
1846 ret = stm32_fmc2_parse_child(fmc2, child);
1847 if (ret < 0) {
1848 of_node_put(child);
1849 return ret;
1850 }
1851 }
1852
1853 return ret;
1854}
1855
1856static int stm32_fmc2_probe(struct platform_device *pdev)
1857{
1858 struct device *dev = &pdev->dev;
1859 struct reset_control *rstc;
1860 struct stm32_fmc2_nfc *fmc2;
1861 struct stm32_fmc2_nand *nand;
1862 struct resource *res;
1863 struct mtd_info *mtd;
1864 struct nand_chip *chip;
1865 int chip_cs, mem_region, ret, irq;
1866
1867 fmc2 = devm_kzalloc(dev, sizeof(*fmc2), GFP_KERNEL);
1868 if (!fmc2)
1869 return -ENOMEM;
1870
1871 fmc2->dev = dev;
1872 nand_controller_init(&fmc2->base);
1873 fmc2->base.ops = &stm32_fmc2_nand_controller_ops;
1874
1875 ret = stm32_fmc2_parse_dt(fmc2);
1876 if (ret)
1877 return ret;
1878
1879 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1880 fmc2->io_base = devm_ioremap_resource(dev, res);
1881 if (IS_ERR(fmc2->io_base))
1882 return PTR_ERR(fmc2->io_base);
1883
1884 fmc2->io_phys_addr = res->start;
1885
1886 for (chip_cs = 0, mem_region = 1; chip_cs < FMC2_MAX_CE;
1887 chip_cs++, mem_region += 3) {
1888 if (!(fmc2->cs_assigned & BIT(chip_cs)))
1889 continue;
1890
1891 res = platform_get_resource(pdev, IORESOURCE_MEM, mem_region);
1892 fmc2->data_base[chip_cs] = devm_ioremap_resource(dev, res);
1893 if (IS_ERR(fmc2->data_base[chip_cs]))
1894 return PTR_ERR(fmc2->data_base[chip_cs]);
1895
1896 fmc2->data_phys_addr[chip_cs] = res->start;
1897
1898 res = platform_get_resource(pdev, IORESOURCE_MEM,
1899 mem_region + 1);
1900 fmc2->cmd_base[chip_cs] = devm_ioremap_resource(dev, res);
1901 if (IS_ERR(fmc2->cmd_base[chip_cs]))
1902 return PTR_ERR(fmc2->cmd_base[chip_cs]);
1903
1904 res = platform_get_resource(pdev, IORESOURCE_MEM,
1905 mem_region + 2);
1906 fmc2->addr_base[chip_cs] = devm_ioremap_resource(dev, res);
1907 if (IS_ERR(fmc2->addr_base[chip_cs]))
1908 return PTR_ERR(fmc2->addr_base[chip_cs]);
1909 }
1910
1911 irq = platform_get_irq(pdev, 0);
1912 ret = devm_request_irq(dev, irq, stm32_fmc2_irq, 0,
1913 dev_name(dev), fmc2);
1914 if (ret) {
1915 dev_err(dev, "failed to request irq\n");
1916 return ret;
1917 }
1918
1919 init_completion(&fmc2->complete);
1920
1921 fmc2->clk = devm_clk_get(dev, NULL);
1922 if (IS_ERR(fmc2->clk))
1923 return PTR_ERR(fmc2->clk);
1924
1925 ret = clk_prepare_enable(fmc2->clk);
1926 if (ret) {
1927 dev_err(dev, "can not enable the clock\n");
1928 return ret;
1929 }
1930
1931 rstc = devm_reset_control_get(dev, NULL);
1932 if (!IS_ERR(rstc)) {
1933 reset_control_assert(rstc);
1934 reset_control_deassert(rstc);
1935 }
1936
1937 /* DMA setup */
1938 ret = stm32_fmc2_dma_setup(fmc2);
1939 if (ret)
1940 return ret;
1941
1942 /* FMC2 init routine */
1943 stm32_fmc2_init(fmc2);
1944
1945 nand = &fmc2->nand;
1946 chip = &nand->chip;
1947 mtd = nand_to_mtd(chip);
1948 mtd->dev.parent = dev;
1949
1950 chip->controller = &fmc2->base;
1951 chip->options |= NAND_BUSWIDTH_AUTO | NAND_NO_SUBPAGE_WRITE |
1952 NAND_USE_BOUNCE_BUFFER;
1953
1954 /* Default ECC settings */
1955 chip->ecc.mode = NAND_ECC_HW;
1956 chip->ecc.size = FMC2_ECC_STEP_SIZE;
1957 chip->ecc.strength = FMC2_ECC_BCH8;
1958
1959 /* Scan to find existence of the device */
1960 ret = nand_scan(chip, nand->ncs);
1961 if (ret)
1962 goto err_scan;
1963
1964 ret = mtd_device_register(mtd, NULL, 0);
1965 if (ret)
1966 goto err_device_register;
1967
1968 platform_set_drvdata(pdev, fmc2);
1969
1970 return 0;
1971
1972err_device_register:
1973 nand_cleanup(chip);
1974
1975err_scan:
1976 if (fmc2->dma_ecc_ch)
1977 dma_release_channel(fmc2->dma_ecc_ch);
1978 if (fmc2->dma_tx_ch)
1979 dma_release_channel(fmc2->dma_tx_ch);
1980 if (fmc2->dma_rx_ch)
1981 dma_release_channel(fmc2->dma_rx_ch);
1982
1983 sg_free_table(&fmc2->dma_data_sg);
1984 sg_free_table(&fmc2->dma_ecc_sg);
1985
1986 clk_disable_unprepare(fmc2->clk);
1987
1988 return ret;
1989}
1990
1991static int stm32_fmc2_remove(struct platform_device *pdev)
1992{
1993 struct stm32_fmc2_nfc *fmc2 = platform_get_drvdata(pdev);
1994 struct stm32_fmc2_nand *nand = &fmc2->nand;
1995
1996 nand_release(&nand->chip);
1997
1998 if (fmc2->dma_ecc_ch)
1999 dma_release_channel(fmc2->dma_ecc_ch);
2000 if (fmc2->dma_tx_ch)
2001 dma_release_channel(fmc2->dma_tx_ch);
2002 if (fmc2->dma_rx_ch)
2003 dma_release_channel(fmc2->dma_rx_ch);
2004
2005 sg_free_table(&fmc2->dma_data_sg);
2006 sg_free_table(&fmc2->dma_ecc_sg);
2007
2008 clk_disable_unprepare(fmc2->clk);
2009
2010 return 0;
2011}
2012
2013static int __maybe_unused stm32_fmc2_suspend(struct device *dev)
2014{
2015 struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
2016
2017 clk_disable_unprepare(fmc2->clk);
2018
2019 pinctrl_pm_select_sleep_state(dev);
2020
2021 return 0;
2022}
2023
2024static int __maybe_unused stm32_fmc2_resume(struct device *dev)
2025{
2026 struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
2027 struct stm32_fmc2_nand *nand = &fmc2->nand;
2028 int chip_cs, ret;
2029
2030 pinctrl_pm_select_default_state(dev);
2031
2032 ret = clk_prepare_enable(fmc2->clk);
2033 if (ret) {
2034 dev_err(dev, "can not enable the clock\n");
2035 return ret;
2036 }
2037
2038 stm32_fmc2_init(fmc2);
2039
2040 for (chip_cs = 0; chip_cs < FMC2_MAX_CE; chip_cs++) {
2041 if (!(fmc2->cs_assigned & BIT(chip_cs)))
2042 continue;
2043
2044 nand_reset(&nand->chip, chip_cs);
2045 }
2046
2047 return 0;
2048}
2049
2050static SIMPLE_DEV_PM_OPS(stm32_fmc2_pm_ops, stm32_fmc2_suspend,
2051 stm32_fmc2_resume);
2052
2053static const struct of_device_id stm32_fmc2_match[] = {
2054 {.compatible = "st,stm32mp15-fmc2"},
2055 {}
2056};
2057MODULE_DEVICE_TABLE(of, stm32_fmc2_match);
2058
2059static struct platform_driver stm32_fmc2_driver = {
2060 .probe = stm32_fmc2_probe,
2061 .remove = stm32_fmc2_remove,
2062 .driver = {
2063 .name = "stm32_fmc2_nand",
2064 .of_match_table = stm32_fmc2_match,
2065 .pm = &stm32_fmc2_pm_ops,
2066 },
2067};
2068module_platform_driver(stm32_fmc2_driver);
2069
2070MODULE_ALIAS("platform:stm32_fmc2_nand");
2071MODULE_AUTHOR("Christophe Kerello <christophe.kerello@st.com>");
2072MODULE_DESCRIPTION("STMicroelectronics STM32 FMC2 nand driver");
2073MODULE_LICENSE("GPL v2");