Lines Matching refs:can
9 #include <linux/can/dev.h>
371 struct can_priv can;
387 struct kvaser_pciefd_can *can[KVASER_PCIEFD_MAX_CAN_CHANNELS];
465 static inline void kvaser_pciefd_send_kcan_cmd(struct kvaser_pciefd_can *can, u32 cmd)
468 FIELD_PREP(KVASER_PCIEFD_KCAN_CMD_SEQ_MASK, ++can->cmd_seq),
469 can->reg_base + KVASER_PCIEFD_KCAN_CMD_REG);
472 static inline void kvaser_pciefd_request_status(struct kvaser_pciefd_can *can)
474 kvaser_pciefd_send_kcan_cmd(can, KVASER_PCIEFD_KCAN_CMD_SRQ);
477 static inline void kvaser_pciefd_abort_flush_reset(struct kvaser_pciefd_can *can)
479 kvaser_pciefd_send_kcan_cmd(can, KVASER_PCIEFD_KCAN_CMD_AT);
482 static void kvaser_pciefd_enable_err_gen(struct kvaser_pciefd_can *can)
487 spin_lock_irqsave(&can->lock, irq);
488 mode = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
491 iowrite32(mode, can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
493 spin_unlock_irqrestore(&can->lock, irq);
496 static void kvaser_pciefd_disable_err_gen(struct kvaser_pciefd_can *can)
501 spin_lock_irqsave(&can->lock, irq);
502 mode = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
504 iowrite32(mode, can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
505 spin_unlock_irqrestore(&can->lock, irq);
508 static void kvaser_pciefd_set_tx_irq(struct kvaser_pciefd_can *can)
518 iowrite32(msk, can->reg_base + KVASER_PCIEFD_KCAN_IEN_REG);
528 static void kvaser_pciefd_setup_controller(struct kvaser_pciefd_can *can)
533 spin_lock_irqsave(&can->lock, irq);
534 mode = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
535 if (can->can.ctrlmode & CAN_CTRLMODE_FD) {
537 if (can->can.ctrlmode & CAN_CTRLMODE_FD_NON_ISO)
546 if (can->can.ctrlmode & CAN_CTRLMODE_LISTENONLY)
555 iowrite32(mode, can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
557 spin_unlock_irqrestore(&can->lock, irq);
560 static void kvaser_pciefd_start_controller_flush(struct kvaser_pciefd_can *can)
565 spin_lock_irqsave(&can->lock, irq);
566 iowrite32(GENMASK(31, 0), can->reg_base + KVASER_PCIEFD_KCAN_IRQ_REG);
568 can->reg_base + KVASER_PCIEFD_KCAN_IEN_REG);
569 status = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_STAT_REG);
572 kvaser_pciefd_abort_flush_reset(can);
577 mode = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
579 iowrite32(mode, can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
581 spin_unlock_irqrestore(&can->lock, irq);
584 static int kvaser_pciefd_bus_on(struct kvaser_pciefd_can *can)
589 del_timer(&can->bec_poll_timer);
590 if (!completion_done(&can->flush_comp))
591 kvaser_pciefd_start_controller_flush(can);
593 if (!wait_for_completion_timeout(&can->flush_comp,
595 netdev_err(can->can.dev, "Timeout during bus on flush\n");
599 spin_lock_irqsave(&can->lock, irq);
600 iowrite32(0, can->reg_base + KVASER_PCIEFD_KCAN_IEN_REG);
601 iowrite32(GENMASK(31, 0), can->reg_base + KVASER_PCIEFD_KCAN_IRQ_REG);
603 can->reg_base + KVASER_PCIEFD_KCAN_IEN_REG);
604 mode = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
606 iowrite32(mode, can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
607 spin_unlock_irqrestore(&can->lock, irq);
609 if (!wait_for_completion_timeout(&can->start_comp,
611 netdev_err(can->can.dev, "Timeout during bus on reset\n");
615 iowrite32(0, can->reg_base + KVASER_PCIEFD_KCAN_IEN_REG);
616 iowrite32(GENMASK(31, 0), can->reg_base + KVASER_PCIEFD_KCAN_IRQ_REG);
618 kvaser_pciefd_set_tx_irq(can);
619 kvaser_pciefd_setup_controller(can);
620 can->can.state = CAN_STATE_ERROR_ACTIVE;
621 netif_wake_queue(can->can.dev);
622 can->bec.txerr = 0;
623 can->bec.rxerr = 0;
624 can->err_rep_cnt = 0;
629 static void kvaser_pciefd_pwm_stop(struct kvaser_pciefd_can *can)
635 spin_lock_irqsave(&can->lock, irq);
636 pwm_ctrl = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_PWM_REG);
640 iowrite32(pwm_ctrl, can->reg_base + KVASER_PCIEFD_KCAN_PWM_REG);
641 spin_unlock_irqrestore(&can->lock, irq);
644 static void kvaser_pciefd_pwm_start(struct kvaser_pciefd_can *can)
650 kvaser_pciefd_pwm_stop(can);
651 spin_lock_irqsave(&can->lock, irq);
653 top = can->kv_pcie->bus_freq / (2 * 500000) - 1;
657 iowrite32(pwm_ctrl, can->reg_base + KVASER_PCIEFD_KCAN_PWM_REG);
663 iowrite32(pwm_ctrl, can->reg_base + KVASER_PCIEFD_KCAN_PWM_REG);
664 spin_unlock_irqrestore(&can->lock, irq);
670 struct kvaser_pciefd_can *can = netdev_priv(netdev);
676 err = kvaser_pciefd_bus_on(can);
687 struct kvaser_pciefd_can *can = netdev_priv(netdev);
691 if (!completion_done(&can->flush_comp))
692 kvaser_pciefd_start_controller_flush(can);
694 if (!wait_for_completion_timeout(&can->flush_comp,
696 netdev_err(can->can.dev, "Timeout during stop\n");
699 iowrite32(0, can->reg_base + KVASER_PCIEFD_KCAN_IEN_REG);
700 del_timer(&can->bec_poll_timer);
702 can->can.state = CAN_STATE_STOPPED;
709 struct kvaser_pciefd_can *can,
714 int seq = can->echo_idx;
717 if (can->can.ctrlmode & CAN_CTRLMODE_ONE_SHOT)
740 can_get_cc_dlc((struct can_frame *)cf, can->can.ctrlmode));
754 struct kvaser_pciefd_can *can = netdev_priv(netdev);
763 nr_words = kvaser_pciefd_prepare_tx_packet(&packet, can, skb);
765 spin_lock_irqsave(&can->echo_lock, irq_flags);
767 can_put_echo_skb(skb, netdev, can->echo_idx, 0);
770 can->echo_idx = (can->echo_idx + 1) % can->can.echo_skb_max;
774 can->reg_base + KVASER_PCIEFD_KCAN_FIFO_REG);
776 can->reg_base + KVASER_PCIEFD_KCAN_FIFO_REG);
782 iowrite32_rep(can->reg_base +
786 __raw_writel(data_last, can->reg_base +
790 __raw_writel(0, can->reg_base +
795 ioread32(can->reg_base + KVASER_PCIEFD_KCAN_TX_NR_PACKETS_REG));
799 if (count >= can->can.echo_skb_max || can->can.echo_skb[can->echo_idx])
801 spin_unlock_irqrestore(&can->echo_lock, irq_flags);
806 static int kvaser_pciefd_set_bittiming(struct kvaser_pciefd_can *can, bool data)
814 bt = &can->can.data_bittiming;
816 bt = &can->can.bittiming;
823 spin_lock_irqsave(&can->lock, irq_flags);
824 mode = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
827 can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
830 ret = readl_poll_timeout(can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG,
833 spin_unlock_irqrestore(&can->lock, irq_flags);
838 iowrite32(btrn, can->reg_base + KVASER_PCIEFD_KCAN_BTRD_REG);
840 iowrite32(btrn, can->reg_base + KVASER_PCIEFD_KCAN_BTRN_REG);
842 iowrite32(mode, can->reg_base + KVASER_PCIEFD_KCAN_MODE_REG);
843 spin_unlock_irqrestore(&can->lock, irq_flags);
860 struct kvaser_pciefd_can *can = netdev_priv(ndev);
865 if (!can->can.restart_ms)
866 ret = kvaser_pciefd_bus_on(can);
878 struct kvaser_pciefd_can *can = netdev_priv(ndev);
880 bec->rxerr = can->bec.rxerr;
881 bec->txerr = can->bec.txerr;
888 struct kvaser_pciefd_can *can = from_timer(can, data, bec_poll_timer);
890 kvaser_pciefd_enable_err_gen(can);
891 kvaser_pciefd_request_status(can);
892 can->err_rep_cnt = 0;
913 struct kvaser_pciefd_can *can;
921 can = netdev_priv(netdev);
924 can->reg_base = KVASER_PCIEFD_KCAN_CHX_ADDR(pcie, i);
925 can->kv_pcie = pcie;
926 can->cmd_seq = 0;
927 can->err_rep_cnt = 0;
928 can->bec.txerr = 0;
929 can->bec.rxerr = 0;
931 init_completion(&can->start_comp);
932 init_completion(&can->flush_comp);
933 timer_setup(&can->bec_poll_timer, kvaser_pciefd_bec_poll_timer, 0);
936 iowrite32(0, can->reg_base + KVASER_PCIEFD_KCAN_BUS_LOAD_REG);
940 ioread32(can->reg_base + KVASER_PCIEFD_KCAN_TX_NR_PACKETS_REG));
942 can->can.clock.freq = pcie->freq;
943 can->can.echo_skb_max = min(KVASER_PCIEFD_CAN_TX_MAX_COUNT, tx_nr_packets_max - 1);
944 can->echo_idx = 0;
945 spin_lock_init(&can->echo_lock);
946 spin_lock_init(&can->lock);
948 can->can.bittiming_const = &kvaser_pciefd_bittiming_const;
949 can->can.data_bittiming_const = &kvaser_pciefd_bittiming_const;
950 can->can.do_set_bittiming = kvaser_pciefd_set_nominal_bittiming;
951 can->can.do_set_data_bittiming = kvaser_pciefd_set_data_bittiming;
952 can->can.do_set_mode = kvaser_pciefd_set_mode;
953 can->can.do_get_berr_counter = kvaser_pciefd_get_berr_counter;
954 can->can.ctrlmode_supported = CAN_CTRLMODE_LISTENONLY |
959 status = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_STAT_REG);
969 can->can.ctrlmode_supported |= CAN_CTRLMODE_ONE_SHOT;
974 iowrite32(GENMASK(31, 0), can->reg_base + KVASER_PCIEFD_KCAN_IRQ_REG);
976 can->reg_base + KVASER_PCIEFD_KCAN_IEN_REG);
978 pcie->can[i] = can;
979 kvaser_pciefd_pwm_start(can);
990 int err = register_candev(pcie->can[i]->can.dev);
997 unregister_candev(pcie->can[j]->can.dev);
1133 priv = &pcie->can[ch_id]->can;
1173 static void kvaser_pciefd_change_state(struct kvaser_pciefd_can *can,
1179 can_change_state(can->can.dev, cf, tx_state, rx_state);
1182 struct net_device *ndev = can->can.dev;
1185 spin_lock_irqsave(&can->lock, irq_flags);
1186 netif_stop_queue(can->can.dev);
1187 spin_unlock_irqrestore(&can->lock, irq_flags);
1189 if (!can->can.restart_ms) {
1190 kvaser_pciefd_start_controller_flush(can);
1222 static int kvaser_pciefd_rx_error_frame(struct kvaser_pciefd_can *can,
1227 struct net_device *ndev = can->can.dev;
1231 old_state = can->can.state;
1239 kvaser_pciefd_change_state(can, cf, new_state, tx_state, rx_state);
1242 can->can.restart_ms) {
1243 can->can.can_stats.restarts++;
1249 can->err_rep_cnt++;
1250 can->can.can_stats.bus_error++;
1256 can->bec.txerr = bec.txerr;
1257 can->bec.rxerr = bec.rxerr;
1264 kvaser_pciefd_set_skb_timestamp(can->kv_pcie, skb, p->timestamp);
1277 struct kvaser_pciefd_can *can;
1283 can = pcie->can[ch_id];
1284 kvaser_pciefd_rx_error_frame(can, p);
1285 if (can->err_rep_cnt >= KVASER_PCIEFD_MAX_ERR_REP)
1287 kvaser_pciefd_disable_err_gen(can);
1289 mod_timer(&can->bec_poll_timer, KVASER_PCIEFD_BEC_POLL_FREQ);
1294 static int kvaser_pciefd_handle_status_resp(struct kvaser_pciefd_can *can,
1300 old_state = can->can.state;
1307 struct net_device *ndev = can->can.dev;
1317 kvaser_pciefd_change_state(can, cf, new_state, tx_state, rx_state);
1320 can->can.restart_ms) {
1321 can->can.can_stats.restarts++;
1325 kvaser_pciefd_set_skb_timestamp(can->kv_pcie, skb, p->timestamp);
1332 can->bec.txerr = bec.txerr;
1333 can->bec.rxerr = bec.rxerr;
1336 mod_timer(&can->bec_poll_timer, KVASER_PCIEFD_BEC_POLL_FREQ);
1344 struct kvaser_pciefd_can *can;
1352 can = pcie->can[ch_id];
1354 status = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_STAT_REG);
1364 can->reg_base + KVASER_PCIEFD_KCAN_IRQ_REG);
1365 kvaser_pciefd_abort_flush_reset(can);
1374 ioread32(can->reg_base + KVASER_PCIEFD_KCAN_TX_NR_PACKETS_REG));
1378 can->reg_base + KVASER_PCIEFD_KCAN_CTRL_REG);
1382 kvaser_pciefd_handle_status_resp(can, p);
1383 if (can->can.state != CAN_STATE_BUS_OFF &&
1384 can->can.state != CAN_STATE_ERROR_ACTIVE) {
1385 mod_timer(&can->bec_poll_timer, KVASER_PCIEFD_BEC_POLL_FREQ);
1390 if (!completion_done(&can->start_comp))
1391 complete(&can->start_comp);
1397 static void kvaser_pciefd_handle_nack_packet(struct kvaser_pciefd_can *can,
1403 skb = alloc_can_err_skb(can->can.dev, &cf);
1404 can->can.dev->stats.tx_errors++;
1408 can->can.can_stats.arbitration_lost++;
1415 kvaser_pciefd_set_skb_timestamp(can->kv_pcie, skb, p->timestamp);
1418 can->can.dev->stats.rx_dropped++;
1419 netdev_warn(can->can.dev, "No memory left for err_skb\n");
1426 struct kvaser_pciefd_can *can;
1433 can = pcie->can[ch_id];
1439 kvaser_pciefd_handle_nack_packet(can, p);
1444 netdev_dbg(can->can.dev, "Packet was flushed\n");
1451 skb = can->can.echo_skb[echo_idx];
1454 len = can_get_echo_skb(can->can.dev, echo_idx, NULL);
1456 ioread32(can->reg_base + KVASER_PCIEFD_KCAN_TX_NR_PACKETS_REG));
1458 if (count < can->can.echo_skb_max && netif_queue_stopped(can->can.dev))
1459 netif_wake_queue(can->can.dev);
1462 can->can.dev->stats.tx_bytes += len;
1463 can->can.dev->stats.tx_packets++;
1473 struct kvaser_pciefd_can *can;
1479 can = pcie->can[ch_id];
1481 if (!completion_done(&can->flush_comp))
1482 complete(&can->flush_comp);
1610 static void kvaser_pciefd_transmit_irq(struct kvaser_pciefd_can *can)
1612 u32 irq = ioread32(can->reg_base + KVASER_PCIEFD_KCAN_IRQ_REG);
1615 netdev_err(can->can.dev, "Tx FIFO overflow\n");
1618 netdev_err(can->can.dev,
1622 netdev_err(can->can.dev, "CAN FD frame in CAN mode\n");
1625 netdev_err(can->can.dev, "Rx FIFO overflow\n");
1627 iowrite32(irq, can->reg_base + KVASER_PCIEFD_KCAN_IRQ_REG);
1644 if (!pcie->can[i]) {
1652 kvaser_pciefd_transmit_irq(pcie->can[i]);
1663 struct kvaser_pciefd_can *can = pcie->can[i];
1665 if (can) {
1666 iowrite32(0, can->reg_base + KVASER_PCIEFD_KCAN_IEN_REG);
1667 kvaser_pciefd_pwm_stop(can);
1668 free_candev(can->can.dev);
1773 struct kvaser_pciefd_can *can = pcie->can[i];
1775 if (can) {
1776 iowrite32(0, can->reg_base + KVASER_PCIEFD_KCAN_IEN_REG);
1777 unregister_candev(can->can.dev);
1778 del_timer(&can->bec_poll_timer);
1779 kvaser_pciefd_pwm_stop(can);
1780 free_candev(can->can.dev);