Lines Matching refs:dev

34 static inline uint8_t r852_read_reg(struct r852_device *dev, int address)
36 uint8_t reg = readb(dev->mmio + address);
41 static inline void r852_write_reg(struct r852_device *dev,
44 writeb(value, dev->mmio + address);
49 static inline uint32_t r852_read_reg_dword(struct r852_device *dev, int address)
51 uint32_t reg = le32_to_cpu(readl(dev->mmio + address));
56 static inline void r852_write_reg_dword(struct r852_device *dev,
59 writel(cpu_to_le32(value), dev->mmio + address);
71 static void r852_dma_test(struct r852_device *dev)
73 dev->dma_usable = (r852_read_reg(dev, R852_DMA_CAP) &
76 if (!dev->dma_usable)
81 dev->dma_usable = 0;
87 * Expects dev->dma_dir and dev->dma_state be set
89 static void r852_dma_enable(struct r852_device *dev)
94 dma_reg = r852_read_reg_dword(dev, R852_DMA_SETTINGS);
97 if (dev->dma_dir)
100 if (dev->dma_state == DMA_INTERNAL) {
104 r852_write_reg_dword(dev, R852_DMA_ADDR,
105 cpu_to_le32(dev->phys_bounce_buffer));
108 r852_write_reg_dword(dev, R852_DMA_ADDR,
109 cpu_to_le32(dev->phys_dma_addr));
113 r852_read_reg_dword(dev, R852_DMA_ADDR);
115 r852_write_reg_dword(dev, R852_DMA_SETTINGS, dma_reg);
118 dma_irq_reg = r852_read_reg_dword(dev, R852_DMA_IRQ_ENABLE);
119 r852_write_reg_dword(dev, R852_DMA_IRQ_ENABLE,
130 static void r852_dma_done(struct r852_device *dev, int error)
132 WARN_ON(dev->dma_stage == 0);
134 r852_write_reg_dword(dev, R852_DMA_IRQ_STA,
135 r852_read_reg_dword(dev, R852_DMA_IRQ_STA));
137 r852_write_reg_dword(dev, R852_DMA_SETTINGS, 0);
138 r852_write_reg_dword(dev, R852_DMA_IRQ_ENABLE, 0);
141 r852_write_reg_dword(dev, R852_DMA_ADDR,
142 cpu_to_le32(dev->phys_bounce_buffer));
143 r852_read_reg_dword(dev, R852_DMA_ADDR);
145 dev->dma_error = error;
146 dev->dma_stage = 0;
148 if (dev->phys_dma_addr && dev->phys_dma_addr != dev->phys_bounce_buffer)
149 dma_unmap_single(&dev->pci_dev->dev, dev->phys_dma_addr,
151 dev->dma_dir ? DMA_FROM_DEVICE : DMA_TO_DEVICE);
157 static int r852_dma_wait(struct r852_device *dev)
159 long timeout = wait_for_completion_timeout(&dev->dma_done,
172 static void r852_do_dma(struct r852_device *dev, uint8_t *buf, int do_read)
178 dev->dma_error = 0;
181 dev->dma_dir = do_read;
182 dev->dma_stage = 1;
183 reinit_completion(&dev->dma_done);
189 dev->dma_state = do_read ? DMA_INTERNAL : DMA_MEMORY;
196 dev->phys_dma_addr = dma_map_single(&dev->pci_dev->dev, buf,
199 if (dma_mapping_error(&dev->pci_dev->dev, dev->phys_dma_addr))
205 dev->phys_dma_addr = dev->phys_bounce_buffer;
207 memcpy(dev->bounce_buffer, buf, R852_DMA_LEN);
211 spin_lock_irqsave(&dev->irqlock, flags);
212 r852_dma_enable(dev);
213 spin_unlock_irqrestore(&dev->irqlock, flags);
216 error = r852_dma_wait(dev);
219 r852_dma_done(dev, error);
224 memcpy((void *)buf, dev->bounce_buffer, R852_DMA_LEN);
232 struct r852_device *dev = r852_get_dev(nand_to_mtd(chip));
236 if (dev->card_unstable)
240 if (len == R852_DMA_LEN && dev->dma_usable) {
241 r852_do_dma(dev, (uint8_t *)buf, 0);
248 r852_write_reg_dword(dev, R852_DATALINE, reg);
256 r852_write_reg(dev, R852_DATALINE, *buf++);
266 struct r852_device *dev = r852_get_dev(nand_to_mtd(chip));
269 if (dev->card_unstable) {
277 if (len == R852_DMA_LEN && dev->dma_usable) {
278 r852_do_dma(dev, buf, 1);
285 reg = r852_read_reg_dword(dev, R852_DATALINE);
295 *buf++ = r852_read_reg(dev, R852_DATALINE);
303 struct r852_device *dev = r852_get_dev(nand_to_mtd(chip));
306 if (dev->card_unstable)
309 return r852_read_reg(dev, R852_DATALINE);
317 struct r852_device *dev = r852_get_dev(nand_to_mtd(chip));
319 if (dev->card_unstable)
324 dev->ctlreg &= ~(R852_CTL_DATA | R852_CTL_COMMAND |
328 dev->ctlreg |= R852_CTL_DATA;
331 dev->ctlreg |= R852_CTL_COMMAND;
334 dev->ctlreg |= (R852_CTL_CARDENABLE | R852_CTL_ON);
336 dev->ctlreg &= ~R852_CTL_WRITE;
340 dev->ctlreg |= R852_CTL_WRITE;
342 r852_write_reg(dev, R852_CTL, dev->ctlreg);
347 if (dat == NAND_CMD_SEQIN && (dev->ctlreg & R852_CTL_COMMAND)) {
348 dev->ctlreg |= R852_CTL_WRITE;
349 r852_write_reg(dev, R852_CTL, dev->ctlreg);
353 r852_write_reg(dev, R852_DATALINE, dat);
362 struct r852_device *dev = nand_get_controller_data(chip);
376 if (dev->dma_error) {
378 dev->dma_error = 0;
389 struct r852_device *dev = r852_get_dev(nand_to_mtd(chip));
390 return !(r852_read_reg(dev, R852_CARD_STA) & R852_CARD_STA_BUSY);
400 struct r852_device *dev = r852_get_dev(nand_to_mtd(chip));
402 if (dev->card_unstable)
409 dev->ctlreg |= R852_CTL_ECC_ENABLE;
412 r852_write_reg(dev, R852_CTL,
413 dev->ctlreg | R852_CTL_ECC_ACCESS);
415 r852_read_reg_dword(dev, R852_DATALINE);
416 r852_write_reg(dev, R852_CTL, dev->ctlreg);
421 dev->ctlreg &= ~R852_CTL_ECC_ENABLE;
422 r852_write_reg(dev, R852_CTL, dev->ctlreg);
433 struct r852_device *dev = r852_get_dev(nand_to_mtd(chip));
437 if (dev->card_unstable)
440 dev->ctlreg &= ~R852_CTL_ECC_ENABLE;
441 r852_write_reg(dev, R852_CTL, dev->ctlreg | R852_CTL_ECC_ACCESS);
443 ecc1 = r852_read_reg_dword(dev, R852_DATALINE);
444 ecc2 = r852_read_reg_dword(dev, R852_DATALINE);
454 r852_write_reg(dev, R852_CTL, dev->ctlreg);
469 struct r852_device *dev = r852_get_dev(nand_to_mtd(chip));
471 if (dev->card_unstable)
474 if (dev->dma_error) {
475 dev->dma_error = 0;
479 r852_write_reg(dev, R852_CTL, dev->ctlreg | R852_CTL_ECC_ACCESS);
480 ecc_reg = r852_read_reg_dword(dev, R852_DATALINE);
481 r852_write_reg(dev, R852_CTL, dev->ctlreg);
529 static void r852_engine_enable(struct r852_device *dev)
531 if (r852_read_reg_dword(dev, R852_HW) & R852_HW_UNKNOWN) {
532 r852_write_reg(dev, R852_CTL, R852_CTL_RESET | R852_CTL_ON);
533 r852_write_reg_dword(dev, R852_HW, R852_HW_ENABLED);
535 r852_write_reg_dword(dev, R852_HW, R852_HW_ENABLED);
536 r852_write_reg(dev, R852_CTL, R852_CTL_RESET | R852_CTL_ON);
539 r852_write_reg(dev, R852_CTL, 0);
547 static void r852_engine_disable(struct r852_device *dev)
549 r852_write_reg_dword(dev, R852_HW, 0);
550 r852_write_reg(dev, R852_CTL, R852_CTL_RESET);
557 static void r852_card_update_present(struct r852_device *dev)
562 spin_lock_irqsave(&dev->irqlock, flags);
563 reg = r852_read_reg(dev, R852_CARD_STA);
564 dev->card_detected = !!(reg & R852_CARD_STA_PRESENT);
565 spin_unlock_irqrestore(&dev->irqlock, flags);
572 static void r852_update_card_detect(struct r852_device *dev)
574 int card_detect_reg = r852_read_reg(dev, R852_CARD_IRQ_ENABLE);
575 dev->card_unstable = 0;
580 card_detect_reg |= dev->card_detected ?
583 r852_write_reg(dev, R852_CARD_IRQ_ENABLE, card_detect_reg);
589 struct mtd_info *mtd = container_of(sys_dev, struct mtd_info, dev);
590 struct r852_device *dev = r852_get_dev(mtd);
591 char *data = dev->sm ? "smartmedia" : "xd";
600 static void r852_update_media_status(struct r852_device *dev)
606 spin_lock_irqsave(&dev->irqlock, flags);
607 if (!dev->card_detected) {
609 spin_unlock_irqrestore(&dev->irqlock, flags);
613 readonly = r852_read_reg(dev, R852_CARD_STA) & R852_CARD_STA_RO;
614 reg = r852_read_reg(dev, R852_DMA_CAP);
615 dev->sm = (reg & (R852_DMA1 | R852_DMA2)) && (reg & R852_SMBIT);
618 dev->sm ? "SmartMedia" : "xD",
621 dev->readonly = readonly;
622 spin_unlock_irqrestore(&dev->irqlock, flags);
629 static int r852_register_nand_device(struct r852_device *dev)
631 struct mtd_info *mtd = nand_to_mtd(dev->chip);
633 WARN_ON(dev->card_registered);
635 mtd->dev.parent = &dev->pci_dev->dev;
637 if (dev->readonly)
638 dev->chip->options |= NAND_ROM;
640 r852_engine_enable(dev);
642 if (sm_register_device(mtd, dev->sm))
645 if (device_create_file(&mtd->dev, &dev_attr_media_type)) {
650 dev->card_registered = 1;
653 WARN_ON(mtd_device_unregister(nand_to_mtd(dev->chip)));
654 nand_cleanup(dev->chip);
657 dev->card_detected = 0;
665 static void r852_unregister_nand_device(struct r852_device *dev)
667 struct mtd_info *mtd = nand_to_mtd(dev->chip);
669 if (!dev->card_registered)
672 device_remove_file(&mtd->dev, &dev_attr_media_type);
674 nand_cleanup(dev->chip);
675 r852_engine_disable(dev);
676 dev->card_registered = 0;
682 struct r852_device *dev =
685 r852_card_update_present(dev);
686 r852_update_card_detect(dev);
687 dev->card_unstable = 0;
690 if (dev->card_detected == dev->card_registered)
694 r852_update_media_status(dev);
697 if (dev->card_detected)
698 r852_register_nand_device(dev);
700 r852_unregister_nand_device(dev);
702 r852_update_card_detect(dev);
706 static void r852_disable_irqs(struct r852_device *dev)
709 reg = r852_read_reg(dev, R852_CARD_IRQ_ENABLE);
710 r852_write_reg(dev, R852_CARD_IRQ_ENABLE, reg & ~R852_CARD_IRQ_MASK);
712 reg = r852_read_reg_dword(dev, R852_DMA_IRQ_ENABLE);
713 r852_write_reg_dword(dev, R852_DMA_IRQ_ENABLE,
716 r852_write_reg(dev, R852_CARD_IRQ_STA, R852_CARD_IRQ_MASK);
717 r852_write_reg_dword(dev, R852_DMA_IRQ_STA, R852_DMA_IRQ_MASK);
723 struct r852_device *dev = (struct r852_device *)data;
728 spin_lock(&dev->irqlock);
731 card_status = r852_read_reg(dev, R852_CARD_IRQ_STA);
732 r852_write_reg(dev, R852_CARD_IRQ_STA, card_status);
737 dev->card_detected = !!(card_status & R852_CARD_IRQ_INSERT);
741 WARN_ON(dev->card_unstable);
745 r852_disable_irqs(dev);
747 if (dev->card_unstable)
751 dev->card_unstable = 1;
752 queue_delayed_work(dev->card_workqueue,
753 &dev->card_detect_work, msecs_to_jiffies(100));
759 dma_status = r852_read_reg_dword(dev, R852_DMA_IRQ_STA);
760 r852_write_reg_dword(dev, R852_DMA_IRQ_STA, dma_status);
768 r852_dma_done(dev, -EIO);
769 complete(&dev->dma_done);
774 WARN_ON_ONCE(dev->dma_stage == 0);
776 if (dev->dma_stage == 0)
780 if (dev->dma_state == DMA_INTERNAL &&
783 dev->dma_state = DMA_MEMORY;
784 dev->dma_stage++;
788 if (dev->dma_state == DMA_MEMORY &&
790 dev->dma_state = DMA_INTERNAL;
791 dev->dma_stage++;
795 if (dev->dma_stage == 2)
796 r852_dma_enable(dev);
799 if (dev->dma_stage == 3) {
800 r852_dma_done(dev, 0);
801 complete(&dev->dma_done);
814 spin_unlock(&dev->irqlock);
845 struct r852_device *dev;
855 error = dma_set_mask(&pci_dev->dev, DMA_BIT_MASK(32));
883 dev = kzalloc(sizeof(struct r852_device), GFP_KERNEL);
885 if (!dev)
888 nand_set_controller_data(chip, dev);
889 dev->chip = chip;
890 dev->pci_dev = pci_dev;
891 pci_set_drvdata(pci_dev, dev);
893 nand_controller_init(&dev->controller);
894 dev->controller.ops = &r852_ops;
895 chip->controller = &dev->controller;
897 dev->bounce_buffer = dma_alloc_coherent(&pci_dev->dev, R852_DMA_LEN,
898 &dev->phys_bounce_buffer, GFP_KERNEL);
900 if (!dev->bounce_buffer)
905 dev->mmio = pci_ioremap_bar(pci_dev, 0);
907 if (!dev->mmio)
911 dev->tmp_buffer = kzalloc(SM_SECTOR_SIZE, GFP_KERNEL);
913 if (!dev->tmp_buffer)
916 init_completion(&dev->dma_done);
918 dev->card_workqueue = create_freezable_workqueue(DRV_NAME);
920 if (!dev->card_workqueue)
923 INIT_DELAYED_WORK(&dev->card_detect_work, r852_card_detect_work);
926 r852_engine_disable(dev);
927 r852_disable_irqs(dev);
929 r852_dma_test(dev);
931 dev->irq = pci_dev->irq;
932 spin_lock_init(&dev->irqlock);
934 dev->card_detected = 0;
935 r852_card_update_present(dev);
940 DRV_NAME, dev))
944 queue_delayed_work(dev->card_workqueue,
945 &dev->card_detect_work, 0);
952 destroy_workqueue(dev->card_workqueue);
954 kfree(dev->tmp_buffer);
956 pci_iounmap(pci_dev, dev->mmio);
958 dma_free_coherent(&pci_dev->dev, R852_DMA_LEN, dev->bounce_buffer,
959 dev->phys_bounce_buffer);
961 kfree(dev);
975 struct r852_device *dev = pci_get_drvdata(pci_dev);
979 cancel_delayed_work_sync(&dev->card_detect_work);
980 destroy_workqueue(dev->card_workqueue);
983 r852_unregister_nand_device(dev);
986 r852_disable_irqs(dev);
987 free_irq(dev->irq, dev);
990 kfree(dev->tmp_buffer);
991 pci_iounmap(pci_dev, dev->mmio);
992 dma_free_coherent(&pci_dev->dev, R852_DMA_LEN, dev->bounce_buffer,
993 dev->phys_bounce_buffer);
995 kfree(dev->chip);
996 kfree(dev);
1005 struct r852_device *dev = pci_get_drvdata(pci_dev);
1007 cancel_delayed_work_sync(&dev->card_detect_work);
1008 r852_disable_irqs(dev);
1009 synchronize_irq(dev->irq);
1016 struct r852_device *dev = dev_get_drvdata(device);
1018 if (dev->ctlreg & R852_CTL_CARDENABLE)
1022 cancel_delayed_work_sync(&dev->card_detect_work);
1025 r852_disable_irqs(dev);
1026 r852_engine_disable(dev);
1031 dev->card_unstable = 0;
1037 struct r852_device *dev = dev_get_drvdata(device);
1039 r852_disable_irqs(dev);
1040 r852_card_update_present(dev);
1041 r852_engine_disable(dev);
1045 if (dev->card_detected != dev->card_registered) {
1047 dev->card_detected ? "added" : "removed");
1049 queue_delayed_work(dev->card_workqueue,
1050 &dev->card_detect_work, msecs_to_jiffies(1000));
1055 if (dev->card_registered) {
1056 r852_engine_enable(dev);
1057 nand_select_target(dev->chip, 0);
1058 nand_reset_op(dev->chip);
1059 nand_deselect_target(dev->chip);
1063 r852_update_card_detect(dev);