Lines Matching defs:nandc

207 #define dev_cmd_reg_addr(nandc, reg) ((nandc)->props->dev_cmd_reg_start + (reg))
567 static void free_bam_transaction(struct qcom_nand_controller *nandc)
569 struct bam_transaction *bam_txn = nandc->bam_txn;
571 devm_kfree(nandc->dev, bam_txn);
576 alloc_bam_transaction(struct qcom_nand_controller *nandc)
580 unsigned int num_cw = nandc->max_cwperpage;
589 bam_txn_buf = devm_kzalloc(nandc->dev, bam_txn_size, GFP_KERNEL);
612 static void clear_bam_transaction(struct qcom_nand_controller *nandc)
614 struct bam_transaction *bam_txn = nandc->bam_txn;
616 if (!nandc->props->is_bam)
630 sg_init_table(bam_txn->cmd_sgl, nandc->max_cwperpage *
632 sg_init_table(bam_txn->data_sgl, nandc->max_cwperpage *
668 static inline u32 nandc_read(struct qcom_nand_controller *nandc, int offset)
670 return ioread32(nandc->base + offset);
673 static inline void nandc_write(struct qcom_nand_controller *nandc, int offset,
676 iowrite32(val, nandc->base + offset);
679 static inline void nandc_read_buffer_sync(struct qcom_nand_controller *nandc,
682 if (!nandc->props->is_bam)
686 dma_sync_single_for_cpu(nandc->dev, nandc->reg_read_dma,
688 sizeof(*nandc->reg_read_buf),
691 dma_sync_single_for_device(nandc->dev, nandc->reg_read_dma,
693 sizeof(*nandc->reg_read_buf),
754 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
755 struct nandc_regs *regs = nandc->regs;
774 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
778 if (nandc->props->qpic_v2 && qcom_nandc_is_last_cw(ecc, cw))
783 if (nandc->props->qpic_v2 && qcom_nandc_is_last_cw(ecc, cw))
815 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
844 if (!nandc->props->qpic_v2)
860 static int prepare_bam_async_desc(struct qcom_nand_controller *nandc,
868 struct bam_transaction *bam_txn = nandc->bam_txn;
876 if (chan == nandc->cmd_chan) {
882 } else if (chan == nandc->tx_chan) {
897 ret = dma_map_sg(nandc->dev, sgl, sgl_cnt, desc->dir);
899 dev_err(nandc->dev, "failure in mapping desc\n");
911 dev_err(nandc->dev, "failure in prep desc\n");
912 dma_unmap_sg(nandc->dev, sgl, sgl_cnt, desc->dir);
920 if (chan == nandc->cmd_chan)
925 list_add_tail(&desc->node, &nandc->desc_list);
939 static int prep_bam_dma_desc_cmd(struct qcom_nand_controller *nandc, bool read,
946 struct bam_transaction *bam_txn = nandc->bam_txn;
954 nandc_reg_phys(nandc, reg_off + 4 * i),
956 reg_buf_dma_addr(nandc,
960 nandc_reg_phys(nandc, reg_off + 4 * i),
979 ret = prepare_bam_async_desc(nandc, nandc->cmd_chan,
994 static int prep_bam_dma_desc_data(struct qcom_nand_controller *nandc, bool read,
999 struct bam_transaction *bam_txn = nandc->bam_txn;
1015 ret = prepare_bam_async_desc(nandc, nandc->tx_chan,
1025 static int prep_adm_dma_desc(struct qcom_nand_controller *nandc, bool read,
1053 ret = dma_map_sg(nandc->dev, sgl, 1, desc->dir);
1064 slave_conf.src_addr = nandc->base_dma + reg_off;
1065 if (nandc->data_crci) {
1066 periph_conf.crci = nandc->data_crci;
1072 slave_conf.dst_addr = nandc->base_dma + reg_off;
1073 if (nandc->cmd_crci) {
1074 periph_conf.crci = nandc->cmd_crci;
1080 ret = dmaengine_slave_config(nandc->chan, &slave_conf);
1082 dev_err(nandc->dev, "failed to configure dma channel\n");
1086 dma_desc = dmaengine_prep_slave_sg(nandc->chan, sgl, 1, dir_eng, 0);
1088 dev_err(nandc->dev, "failed to prepare desc\n");
1095 list_add_tail(&desc->node, &nandc->desc_list);
1112 static int read_reg_dma(struct qcom_nand_controller *nandc, int first,
1118 vaddr = nandc->reg_read_buf + nandc->reg_read_pos;
1119 nandc->reg_read_pos += num_regs;
1122 first = dev_cmd_reg_addr(nandc, first);
1124 if (nandc->props->is_bam)
1125 return prep_bam_dma_desc_cmd(nandc, true, first, vaddr,
1131 return prep_adm_dma_desc(nandc, true, first, vaddr,
1143 static int write_reg_dma(struct qcom_nand_controller *nandc, int first,
1147 struct nandc_regs *regs = nandc->regs;
1163 first = dev_cmd_reg_addr(nandc, NAND_DEV_CMD1);
1166 first = dev_cmd_reg_addr(nandc, NAND_DEV_CMD_VLD);
1168 if (nandc->props->is_bam)
1169 return prep_bam_dma_desc_cmd(nandc, false, first, vaddr,
1175 return prep_adm_dma_desc(nandc, false, first, vaddr,
1188 static int read_data_dma(struct qcom_nand_controller *nandc, int reg_off,
1191 if (nandc->props->is_bam)
1192 return prep_bam_dma_desc_data(nandc, true, vaddr, size, flags);
1194 return prep_adm_dma_desc(nandc, true, reg_off, vaddr, size, false);
1206 static int write_data_dma(struct qcom_nand_controller *nandc, int reg_off,
1209 if (nandc->props->is_bam)
1210 return prep_bam_dma_desc_data(nandc, false, vaddr, size, flags);
1212 return prep_adm_dma_desc(nandc, false, reg_off, vaddr, size, false);
1221 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1223 write_reg_dma(nandc, NAND_ADDR0, 2, 0);
1224 write_reg_dma(nandc, NAND_DEV0_CFG0, 3, 0);
1225 if (!nandc->props->qpic_v2)
1226 write_reg_dma(nandc, NAND_EBI2_ECC_BUF_CFG, 1, 0);
1227 write_reg_dma(nandc, NAND_ERASED_CW_DETECT_CFG, 1, 0);
1228 write_reg_dma(nandc, NAND_ERASED_CW_DETECT_CFG, 1,
1239 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1244 if (nandc->props->qpic_v2 && qcom_nandc_is_last_cw(ecc, cw))
1247 if (nandc->props->is_bam)
1248 write_reg_dma(nandc, reg, 4, NAND_BAM_NEXT_SGL);
1250 write_reg_dma(nandc, NAND_FLASH_CMD, 1, NAND_BAM_NEXT_SGL);
1251 write_reg_dma(nandc, NAND_EXEC_CMD, 1, NAND_BAM_NEXT_SGL);
1254 read_reg_dma(nandc, NAND_FLASH_STATUS, 2, 0);
1255 read_reg_dma(nandc, NAND_ERASED_CW_DETECT_STATUS, 1,
1258 read_reg_dma(nandc, NAND_FLASH_STATUS, 1, NAND_BAM_NEXT_SGL);
1280 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1282 write_reg_dma(nandc, NAND_ADDR0, 2, 0);
1283 write_reg_dma(nandc, NAND_DEV0_CFG0, 3, 0);
1284 if (!nandc->props->qpic_v2)
1285 write_reg_dma(nandc, NAND_EBI2_ECC_BUF_CFG, 1,
1295 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1297 write_reg_dma(nandc, NAND_FLASH_CMD, 1, NAND_BAM_NEXT_SGL);
1298 write_reg_dma(nandc, NAND_EXEC_CMD, 1, NAND_BAM_NEXT_SGL);
1300 read_reg_dma(nandc, NAND_FLASH_STATUS, 1, NAND_BAM_NEXT_SGL);
1302 write_reg_dma(nandc, NAND_FLASH_STATUS, 1, 0);
1303 write_reg_dma(nandc, NAND_READ_STATUS, 1, NAND_BAM_NEXT_SGL);
1307 static int submit_descs(struct qcom_nand_controller *nandc)
1311 struct bam_transaction *bam_txn = nandc->bam_txn;
1314 if (nandc->props->is_bam) {
1316 ret = prepare_bam_async_desc(nandc, nandc->rx_chan, 0);
1322 ret = prepare_bam_async_desc(nandc, nandc->tx_chan,
1329 ret = prepare_bam_async_desc(nandc, nandc->cmd_chan,
1336 list_for_each_entry(desc, &nandc->desc_list, node)
1339 if (nandc->props->is_bam) {
1348 dma_async_issue_pending(nandc->tx_chan);
1349 dma_async_issue_pending(nandc->rx_chan);
1350 dma_async_issue_pending(nandc->cmd_chan);
1356 if (dma_sync_wait(nandc->chan, cookie) != DMA_COMPLETE)
1365 list_for_each_entry_safe(desc, n, &nandc->desc_list, node) {
1368 if (nandc->props->is_bam)
1369 dma_unmap_sg(nandc->dev, desc->bam_sgl,
1372 dma_unmap_sg(nandc->dev, &desc->adm_sgl, 1,
1382 static void clear_read_regs(struct qcom_nand_controller *nandc)
1384 nandc->reg_read_pos = 0;
1385 nandc_read_buffer_sync(nandc, false);
1446 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1449 nandc_read_buffer_sync(nandc, true);
1452 u32 flash = le32_to_cpu(nandc->reg_read_buf[i]);
1467 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1474 nandc->buf_count = 0;
1475 nandc->buf_start = 0;
1476 clear_read_regs(nandc);
1479 if (nandc->props->qpic_v2)
1482 clear_bam_transaction(nandc);
1500 if (nandc->props->is_bam) {
1515 read_data_dma(nandc, reg_off, data_buf, data_size1, 0);
1518 read_data_dma(nandc, reg_off, oob_buf, oob_size1, 0);
1521 read_data_dma(nandc, reg_off, data_buf + data_size1, data_size2, 0);
1524 read_data_dma(nandc, reg_off, oob_buf + oob_size1, oob_size2, 0);
1526 ret = submit_descs(nandc);
1528 dev_err(nandc->dev, "failure to read raw cw %d\n", cw);
1614 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1623 buf = (struct read_stats *)nandc->reg_read_buf;
1624 nandc_read_buffer_sync(nandc, true);
1717 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1737 if (nandc->props->is_bam) {
1753 read_data_dma(nandc, FLASH_BUF_ACC, data_buf,
1769 read_data_dma(nandc, FLASH_BUF_ACC + data_size,
1779 ret = submit_descs(nandc);
1781 dev_err(nandc->dev, "failure to read page/oob\n");
1795 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1800 clear_read_regs(nandc);
1805 memset(nandc->data_buffer, 0xff, size);
1812 read_data_dma(nandc, FLASH_BUF_ACC, nandc->data_buffer, size, 0);
1814 ret = submit_descs(nandc);
1816 dev_err(nandc->dev, "failed to copy last codeword\n");
1889 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1897 nandc->buf_count = 0;
1898 nandc->buf_start = 0;
1900 clear_read_regs(nandc);
1907 clear_bam_transaction(nandc);
1942 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1948 clear_read_regs(nandc);
1949 clear_bam_transaction(nandc);
1963 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
1974 nandc->buf_count = 0;
1975 nandc->buf_start = 0;
1976 clear_read_regs(nandc);
1977 clear_bam_transaction(nandc);
1998 write_data_dma(nandc, FLASH_BUF_ACC, data_buf, data_size,
2011 write_data_dma(nandc, FLASH_BUF_ACC + data_size,
2021 ret = submit_descs(nandc);
2023 dev_err(nandc->dev, "failure to write page\n");
2037 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2046 clear_read_regs(nandc);
2047 clear_bam_transaction(nandc);
2073 write_data_dma(nandc, reg_off, data_buf, data_size1,
2078 write_data_dma(nandc, reg_off, oob_buf, oob_size1,
2083 write_data_dma(nandc, reg_off, data_buf, data_size2,
2088 write_data_dma(nandc, reg_off, oob_buf, oob_size2, 0);
2094 ret = submit_descs(nandc);
2096 dev_err(nandc->dev, "failure to write raw page\n");
2114 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2124 clear_bam_transaction(nandc);
2130 memset(nandc->data_buffer, 0xff, host->cw_data);
2132 mtd_ooblayout_get_databytes(mtd, nandc->data_buffer + data_size, oob,
2139 write_data_dma(nandc, FLASH_BUF_ACC,
2140 nandc->data_buffer, data_size + oob_size, 0);
2143 ret = submit_descs(nandc);
2145 dev_err(nandc->dev, "failure to write oob\n");
2156 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2170 clear_bam_transaction(nandc);
2176 dev_warn(nandc->dev, "error when trying to read BBM\n");
2182 bad = nandc->data_buffer[bbpos] != 0xff;
2185 bad = bad || (nandc->data_buffer[bbpos + 1] != 0xff);
2193 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2197 clear_read_regs(nandc);
2198 clear_bam_transaction(nandc);
2205 memset(nandc->data_buffer, 0x00, host->cw_size);
2215 write_data_dma(nandc, FLASH_BUF_ACC,
2216 nandc->data_buffer, host->cw_size, 0);
2219 ret = submit_descs(nandc);
2221 dev_err(nandc->dev, "failure to update BBM\n");
2370 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2387 dev_err(nandc->dev, "No valid ECC settings possible\n");
2411 if (nandc->props->ecc_modes & ECC_BCH_4BIT) {
2458 if (nandc->props->is_bam)
2459 free_bam_transaction(nandc);
2461 nandc->max_cwperpage = max_t(unsigned int, nandc->max_cwperpage,
2465 if (nandc->props->is_bam) {
2466 nandc->bam_txn = alloc_bam_transaction(nandc);
2467 if (!nandc->bam_txn) {
2468 dev_err(nandc->dev,
2525 if (!nandc->props->qpic_v2)
2530 nandc->regs->erased_cw_detect_cfg_clr =
2532 nandc->regs->erased_cw_detect_cfg_set =
2535 dev_dbg(nandc->dev,
2547 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2559 if (nandc->props->qpic_v2)
2574 nandc->exec_opwrite = true;
2584 dev_err(nandc->dev, "Opcode not supported: %u\n", opcode);
2662 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2666 nandc_read_buffer_sync(nandc, true);
2669 flash = le32_to_cpu(nandc->reg_read_buf[0]);
2675 dev_err(nandc->dev, "Timeout waiting for device to be ready:0x%08x\n", flash);
2684 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2699 num_cw = nandc->exec_opwrite ? ecc->steps : 1;
2700 nandc->exec_opwrite = false;
2702 nandc->buf_count = 0;
2703 nandc->buf_start = 0;
2706 clear_read_regs(nandc);
2707 clear_bam_transaction(nandc);
2712 write_reg_dma(nandc, NAND_FLASH_CMD, 1, NAND_BAM_NEXT_SGL);
2713 write_reg_dma(nandc, NAND_EXEC_CMD, 1, NAND_BAM_NEXT_SGL);
2714 read_reg_dma(nandc, NAND_FLASH_STATUS, 1, NAND_BAM_NEXT_SGL);
2716 ret = submit_descs(nandc);
2718 dev_err(nandc->dev, "failure in submitting status descriptor\n");
2722 nandc_read_buffer_sync(nandc, true);
2725 flash_status = le32_to_cpu(nandc->reg_read_buf[i]);
2747 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2759 nandc->buf_count = 0;
2760 nandc->buf_start = 0;
2763 clear_read_regs(nandc);
2764 clear_bam_transaction(nandc);
2770 nandc->props->is_bam ? 0 : DM_EN);
2774 write_reg_dma(nandc, NAND_FLASH_CMD, 4, NAND_BAM_NEXT_SGL);
2775 write_reg_dma(nandc, NAND_EXEC_CMD, 1, NAND_BAM_NEXT_SGL);
2777 read_reg_dma(nandc, NAND_READ_ID, 1, NAND_BAM_NEXT_SGL);
2779 ret = submit_descs(nandc);
2781 dev_err(nandc->dev, "failure in submitting read id descriptor\n");
2789 nandc_read_buffer_sync(nandc, true);
2790 memcpy(instr->ctx.data.buf.in, nandc->reg_read_buf, len);
2798 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2822 nandc->buf_count = 0;
2823 nandc->buf_start = 0;
2826 clear_read_regs(nandc);
2827 clear_bam_transaction(nandc);
2832 write_reg_dma(nandc, NAND_FLASH_CMD, instrs, NAND_BAM_NEXT_SGL);
2833 (q_op.cmd_reg == OP_BLOCK_ERASE) ? write_reg_dma(nandc, NAND_DEV0_CFG0,
2834 2, NAND_BAM_NEXT_SGL) : read_reg_dma(nandc,
2837 write_reg_dma(nandc, NAND_EXEC_CMD, 1, NAND_BAM_NEXT_SGL);
2838 read_reg_dma(nandc, NAND_FLASH_STATUS, 1, NAND_BAM_NEXT_SGL);
2840 ret = submit_descs(nandc);
2842 dev_err(nandc->dev, "failure in submitting misc descriptor\n");
2857 struct qcom_nand_controller *nandc = get_qcom_nand_controller(chip);
2870 nandc->buf_count = 0;
2871 nandc->buf_start = 0;
2873 clear_read_regs(nandc);
2874 clear_bam_transaction(nandc);
2891 if (!nandc->props->qpic_v2)
2895 if (!nandc->props->qpic_v2) {
2897 (nandc->vld & ~READ_START_VLD));
2899 (nandc->cmd1 & ~(0xFF << READ_ADDR))
2905 if (!nandc->props->qpic_v2) {
2906 nandc_set_reg(chip, NAND_DEV_CMD1_RESTORE, nandc->cmd1);
2907 nandc_set_reg(chip, NAND_DEV_CMD_VLD_RESTORE, nandc->vld);
2916 if (!nandc->props->qpic_v2) {
2917 write_reg_dma(nandc, NAND_DEV_CMD_VLD, 1, 0);
2918 write_reg_dma(nandc, NAND_DEV_CMD1, 1, NAND_BAM_NEXT_SGL);
2921 nandc->buf_count = len;
2922 memset(nandc->data_buffer, 0xff, nandc->buf_count);
2926 read_data_dma(nandc, FLASH_BUF_ACC, nandc->data_buffer,
2927 nandc->buf_count, 0);
2930 if (!nandc->props->qpic_v2) {
2931 write_reg_dma(nandc, NAND_DEV_CMD1_RESTORE, 1, 0);
2932 write_reg_dma(nandc, NAND_DEV_CMD_VLD_RESTORE, 1, NAND_BAM_NEXT_SGL);
2935 ret = submit_descs(nandc);
2937 dev_err(nandc->dev, "failure in submitting param page descriptor\n");
2945 memcpy(instr->ctx.data.buf.in, nandc->data_buffer, len);
3019 static void qcom_nandc_unalloc(struct qcom_nand_controller *nandc)
3021 if (nandc->props->is_bam) {
3022 if (!dma_mapping_error(nandc->dev, nandc->reg_read_dma))
3023 dma_unmap_single(nandc->dev, nandc->reg_read_dma,
3025 sizeof(*nandc->reg_read_buf),
3028 if (nandc->tx_chan)
3029 dma_release_channel(nandc->tx_chan);
3031 if (nandc->rx_chan)
3032 dma_release_channel(nandc->rx_chan);
3034 if (nandc->cmd_chan)
3035 dma_release_channel(nandc->cmd_chan);
3037 if (nandc->chan)
3038 dma_release_channel(nandc->chan);
3042 static int qcom_nandc_alloc(struct qcom_nand_controller *nandc)
3046 ret = dma_set_coherent_mask(nandc->dev, DMA_BIT_MASK(32));
3048 dev_err(nandc->dev, "failed to set DMA mask\n");
3058 nandc->buf_size = 532;
3060 nandc->data_buffer = devm_kzalloc(nandc->dev, nandc->buf_size, GFP_KERNEL);
3061 if (!nandc->data_buffer)
3064 nandc->regs = devm_kzalloc(nandc->dev, sizeof(*nandc->regs), GFP_KERNEL);
3065 if (!nandc->regs)
3068 nandc->reg_read_buf = devm_kcalloc(nandc->dev, MAX_REG_RD,
3069 sizeof(*nandc->reg_read_buf),
3071 if (!nandc->reg_read_buf)
3074 if (nandc->props->is_bam) {
3075 nandc->reg_read_dma =
3076 dma_map_single(nandc->dev, nandc->reg_read_buf,
3078 sizeof(*nandc->reg_read_buf),
3080 if (dma_mapping_error(nandc->dev, nandc->reg_read_dma)) {
3081 dev_err(nandc->dev, "failed to DMA MAP reg buffer\n");
3085 nandc->tx_chan = dma_request_chan(nandc->dev, "tx");
3086 if (IS_ERR(nandc->tx_chan)) {
3087 ret = PTR_ERR(nandc->tx_chan);
3088 nandc->tx_chan = NULL;
3089 dev_err_probe(nandc->dev, ret,
3094 nandc->rx_chan = dma_request_chan(nandc->dev, "rx");
3095 if (IS_ERR(nandc->rx_chan)) {
3096 ret = PTR_ERR(nandc->rx_chan);
3097 nandc->rx_chan = NULL;
3098 dev_err_probe(nandc->dev, ret,
3103 nandc->cmd_chan = dma_request_chan(nandc->dev, "cmd");
3104 if (IS_ERR(nandc->cmd_chan)) {
3105 ret = PTR_ERR(nandc->cmd_chan);
3106 nandc->cmd_chan = NULL;
3107 dev_err_probe(nandc->dev, ret,
3118 nandc->max_cwperpage = 1;
3119 nandc->bam_txn = alloc_bam_transaction(nandc);
3120 if (!nandc->bam_txn) {
3121 dev_err(nandc->dev,
3127 nandc->chan = dma_request_chan(nandc->dev, "rxtx");
3128 if (IS_ERR(nandc->chan)) {
3129 ret = PTR_ERR(nandc->chan);
3130 nandc->chan = NULL;
3131 dev_err_probe(nandc->dev, ret,
3137 INIT_LIST_HEAD(&nandc->desc_list);
3138 INIT_LIST_HEAD(&nandc->host_list);
3140 nand_controller_init(&nandc->controller);
3141 nandc->controller.ops = &qcom_nandc_ops;
3145 qcom_nandc_unalloc(nandc);
3150 static int qcom_nandc_setup(struct qcom_nand_controller *nandc)
3155 if (!nandc->props->is_qpic)
3156 nandc_write(nandc, SFLASHC_BURST_CFG, 0);
3158 if (!nandc->props->qpic_v2)
3159 nandc_write(nandc, dev_cmd_reg_addr(nandc, NAND_DEV_CMD_VLD),
3163 if (nandc->props->is_bam) {
3164 nand_ctrl = nandc_read(nandc, NAND_CTRL);
3174 nandc_write(nandc, NAND_CTRL, nand_ctrl | BAM_MODE_EN);
3176 nandc_write(nandc, NAND_FLASH_CHIP_SELECT, DM_EN);
3180 if (!nandc->props->qpic_v2) {
3181 nandc->cmd1 = nandc_read(nandc, dev_cmd_reg_addr(nandc, NAND_DEV_CMD1));
3182 nandc->vld = NAND_DEV_CMD_VLD_VAL;
3190 static int qcom_nand_host_parse_boot_partitions(struct qcom_nand_controller *nandc,
3197 struct device *dev = nandc->dev;
3258 static int qcom_nand_host_init_and_register(struct qcom_nand_controller *nandc,
3264 struct device *dev = nandc->dev;
3292 chip->controller = &nandc->controller;
3307 if (nandc->props->use_codeword_fixup) {
3308 ret = qcom_nand_host_parse_boot_partitions(nandc, host, dn);
3320 static int qcom_probe_nand_devices(struct qcom_nand_controller *nandc)
3322 struct device *dev = nandc->dev;
3334 ret = qcom_nand_host_init_and_register(nandc, host, child);
3340 list_add_tail(&host->node, &nandc->host_list);
3349 struct qcom_nand_controller *nandc = platform_get_drvdata(pdev);
3350 struct device_node *np = nandc->dev->of_node;
3353 if (!nandc->props->is_bam) {
3355 &nandc->cmd_crci);
3357 dev_err(nandc->dev, "command CRCI unspecified\n");
3362 &nandc->data_crci);
3364 dev_err(nandc->dev, "data CRCI unspecified\n");
3374 struct qcom_nand_controller *nandc;
3380 nandc = devm_kzalloc(&pdev->dev, sizeof(*nandc), GFP_KERNEL);
3381 if (!nandc)
3384 platform_set_drvdata(pdev, nandc);
3385 nandc->dev = dev;
3393 nandc->props = dev_data;
3395 nandc->core_clk = devm_clk_get(dev, "core");
3396 if (IS_ERR(nandc->core_clk))
3397 return PTR_ERR(nandc->core_clk);
3399 nandc->aon_clk = devm_clk_get(dev, "aon");
3400 if (IS_ERR(nandc->aon_clk))
3401 return PTR_ERR(nandc->aon_clk);
3407 nandc->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
3408 if (IS_ERR(nandc->base))
3409 return PTR_ERR(nandc->base);
3411 nandc->base_phys = res->start;
3412 nandc->base_dma = dma_map_resource(dev, res->start,
3415 if (dma_mapping_error(dev, nandc->base_dma))
3418 ret = clk_prepare_enable(nandc->core_clk);
3422 ret = clk_prepare_enable(nandc->aon_clk);
3426 ret = qcom_nandc_alloc(nandc);
3430 ret = qcom_nandc_setup(nandc);
3434 ret = qcom_probe_nand_devices(nandc);
3441 qcom_nandc_unalloc(nandc);
3443 clk_disable_unprepare(nandc->aon_clk);
3445 clk_disable_unprepare(nandc->core_clk);
3447 dma_unmap_resource(dev, nandc->base_dma, resource_size(res),
3454 struct qcom_nand_controller *nandc = platform_get_drvdata(pdev);
3460 list_for_each_entry(host, &nandc->host_list, node) {
3467 qcom_nandc_unalloc(nandc);
3469 clk_disable_unprepare(nandc->aon_clk);
3470 clk_disable_unprepare(nandc->core_clk);
3472 dma_unmap_resource(&pdev->dev, nandc->base_dma, resource_size(res),
3536 .name = "qcom-nandc",