Lines Matching defs:gpi_dev
467 struct gpi_dev {
504 struct gpi_dev *gpi_dev;
617 devm_free_irq(gpii->gpi_dev->dev, gpii->irq, gpii);
632 ret = devm_request_irq(gpii->gpi_dev->dev, gpii->irq,
636 dev_err(gpii->gpi_dev->dev, "error request irq:%d ret:%d\n",
699 dev_dbg(gpii->gpi_dev->dev,
713 dev_err(gpii->gpi_dev->dev, "cmd: %s completion timeout:%u\n",
810 dev_dbg(gpii->gpi_dev->dev, "irq_stts:0x%x\n", irq_stts);
829 dev_err(gpii->gpi_dev->dev, "invalid error status:0x%x\n", irq_stts);
852 dev_err(gpii->gpi_dev->dev, "receive interrupt while in %s state\n",
878 dev_dbg(gpii->gpi_dev->dev,
899 dev_dbg(gpii->gpi_dev->dev, "setting EV state to %s\n",
907 dev_dbg(gpii->gpi_dev->dev, "process CH CTRL interrupts\n");
913 dev_err(gpii->gpi_dev->dev, "Unhandled interrupt status:0x%x\n", type);
945 dev_err(gpii->gpi_dev->dev, "skipping processing event because ch @ %s state\n",
957 dev_dbg(gpii->gpi_dev->dev, "event without a pending descriptor!\n");
959 dev_dbg(gpii->gpi_dev->dev,
964 dev_dbg(gpii->gpi_dev->dev,
1025 dev_err(gpii->gpi_dev->dev, "skipping processing event because ch @ %s state\n",
1036 dev_err(gpii->gpi_dev->dev, "Event without a pending descriptor!\n");
1038 dev_err(gpii->gpi_dev->dev,
1069 dev_err(gpii->gpi_dev->dev, "Error in Transaction\n");
1072 dev_dbg(gpii->gpi_dev->dev, "Transaction Success\n");
1076 dev_dbg(gpii->gpi_dev->dev, "Residue %d\n", result.residue);
1108 dev_dbg(gpii->gpi_dev->dev,
1121 dev_dbg(gpii->gpi_dev->dev, "stale event, not processing\n");
1129 dev_dbg(gpii->gpi_dev->dev, "QUP_NOTIF_EV_TYPE\n");
1132 dev_dbg(gpii->gpi_dev->dev,
1156 dev_err(gpii->gpi_dev->dev, "not processing any events, pm_state:%s\n",
1206 dev_err(gpii->gpi_dev->dev, "Error with cmd:%s ret:%d\n",
1239 dev_err(gpii->gpi_dev->dev, "Error with cmd:%s ret:%d\n",
1259 dev_err(gpii->gpi_dev->dev, "Error with cmd:%s ret:%d\n",
1280 dev_err(gpii->gpi_dev->dev, "Error with cmd:%s ret:%d\n",
1315 dev_err(gpii->gpi_dev->dev, "error with cmd:%s ret:%d\n",
1400 dma_free_coherent(gpii->gpi_dev->dev, ring->alloc_size,
1418 dev_dbg(gpii->gpi_dev->dev,
1423 ring->pre_aligned = dma_alloc_coherent(gpii->gpi_dev->dev,
1427 dev_err(gpii->gpi_dev->dev, "could not alloc size:%zu mem for ring\n",
1446 dev_dbg(gpii->gpi_dev->dev,
1464 dev_err(gpii->gpi_dev->dev, "Error adding ring element to xfer ring\n");
1509 dev_err(gpii->gpi_dev->dev, "Error resetting channel ret:%d\n", ret);
1516 dev_err(gpii->gpi_dev->dev, "Error alloc_channel ret:%d\n", ret);
1527 dev_err(gpii->gpi_dev->dev, "Error Starting Channel ret:%d\n", ret);
1551 dev_dbg(gpii->gpi_dev->dev, "channel is already paused\n");
1587 dev_dbg(gpii->gpi_dev->dev, "channel is already active\n");
1598 dev_err(gpii->gpi_dev->dev, "Error starting chan, ret:%d\n", ret);
1641 struct device *dev = chan->gpii->gpi_dev->dev;
1716 struct device *dev = chan->gpii->gpi_dev->dev;
1797 struct device *dev = gpii->gpi_dev->dev;
1806 dev_err(gpii->gpi_dev->dev, "invalid dma direction: %d\n", direction);
1893 const int ev_factor = gpii->gpi_dev->ev_factor;
1906 dev_err(gpii->gpi_dev->dev, "protocol did not match protocol %u != %u\n",
1925 dev_err(gpii->gpi_dev->dev, "error config. interrupts, ret:%d\n", ret);
1932 dev_err(gpii->gpi_dev->dev, "error alloc_ev_chan:%d\n", ret);
1940 dev_err(gpii->gpi_dev->dev, "Error allocating chan:%d\n", ret);
1949 dev_err(gpii->gpi_dev->dev, "Error start chan:%d\n", ret);
1995 dev_err(gpii->gpi_dev->dev, "error resetting channel:%d\n", ret);
2068 static int gpi_find_avail_gpii(struct gpi_dev *gpi_dev, u32 seid)
2074 for (gpii = 0; gpii < gpi_dev->max_gpii; gpii++) {
2075 if (!((1 << gpii) & gpi_dev->gpii_mask))
2078 tx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_TX_CHAN];
2079 rx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_RX_CHAN];
2088 for (gpii = 0; gpii < gpi_dev->max_gpii; gpii++) {
2089 if (!((1 << gpii) & gpi_dev->gpii_mask))
2092 tx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_TX_CHAN];
2093 rx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_RX_CHAN];
2112 struct gpi_dev *gpi_dev = (struct gpi_dev *)of_dma->of_dma_data;
2118 dev_err(gpi_dev->dev, "gpii require minimum 2 args, client passed:%d args\n",
2125 dev_err(gpi_dev->dev, "gpii channel:%d not valid\n", chid);
2132 gpii = gpi_find_avail_gpii(gpi_dev, seid);
2134 dev_err(gpi_dev->dev, "no available gpii instances\n");
2138 gchan = &gpi_dev->gpiis[gpii].gchan[chid];
2140 dev_err(gpi_dev->dev, "gpii:%d chid:%d seid:%d already configured\n",
2153 struct gpi_dev *gpi_dev;
2158 gpi_dev = devm_kzalloc(&pdev->dev, sizeof(*gpi_dev), GFP_KERNEL);
2159 if (!gpi_dev)
2162 gpi_dev->dev = &pdev->dev;
2163 gpi_dev->regs = devm_platform_get_and_ioremap_resource(pdev, 0, &gpi_dev->res);
2164 if (IS_ERR(gpi_dev->regs))
2165 return PTR_ERR(gpi_dev->regs);
2166 gpi_dev->ee_base = gpi_dev->regs;
2168 ret = of_property_read_u32(gpi_dev->dev->of_node, "dma-channels",
2169 &gpi_dev->max_gpii);
2171 dev_err(gpi_dev->dev, "missing 'max-no-gpii' DT node\n");
2175 ret = of_property_read_u32(gpi_dev->dev->of_node, "dma-channel-mask",
2176 &gpi_dev->gpii_mask);
2178 dev_err(gpi_dev->dev, "missing 'gpii-mask' DT node\n");
2182 ee_offset = (uintptr_t)device_get_match_data(gpi_dev->dev);
2183 gpi_dev->ee_base = gpi_dev->ee_base - ee_offset;
2185 gpi_dev->ev_factor = EV_FACTOR;
2187 ret = dma_set_mask(gpi_dev->dev, DMA_BIT_MASK(64));
2189 dev_err(gpi_dev->dev, "Error setting dma_mask to 64, ret:%d\n", ret);
2193 gpi_dev->gpiis = devm_kzalloc(gpi_dev->dev, sizeof(*gpi_dev->gpiis) *
2194 gpi_dev->max_gpii, GFP_KERNEL);
2195 if (!gpi_dev->gpiis)
2199 INIT_LIST_HEAD(&gpi_dev->dma_device.channels);
2200 for (i = 0; i < gpi_dev->max_gpii; i++) {
2201 struct gpii *gpii = &gpi_dev->gpiis[i];
2204 if (!((1 << i) & gpi_dev->gpii_mask))
2208 gpii->ev_cntxt_base_reg = gpi_dev->ee_base + GPII_n_EV_CH_k_CNTXT_0_OFFS(i, 0);
2209 gpii->ev_cntxt_db_reg = gpi_dev->ee_base + GPII_n_EV_CH_k_DOORBELL_0_OFFS(i, 0);
2211 gpii->ev_cmd_reg = gpi_dev->ee_base + GPII_n_EV_CH_CMD_OFFS(i);
2212 gpii->ieob_clr_reg = gpi_dev->ee_base + GPII_n_CNTXT_SRC_IEOB_IRQ_CLR_OFFS(i);
2225 gchan->ch_cntxt_base_reg = gpi_dev->ee_base +
2227 gchan->ch_cntxt_db_reg = gpi_dev->ee_base +
2229 gchan->ch_cmd_reg = gpi_dev->ee_base + GPII_n_CH_CMD_OFFS(i);
2232 vchan_init(&gchan->vc, &gpi_dev->dma_device);
2244 gpii->regs = gpi_dev->ee_base;
2245 gpii->gpi_dev = gpi_dev;
2248 platform_set_drvdata(pdev, gpi_dev);
2251 dma_cap_zero(gpi_dev->dma_device.cap_mask);
2252 dma_cap_set(DMA_SLAVE, gpi_dev->dma_device.cap_mask);
2255 gpi_dev->dma_device.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
2256 gpi_dev->dma_device.residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR;
2257 gpi_dev->dma_device.src_addr_widths = DMA_SLAVE_BUSWIDTH_8_BYTES;
2258 gpi_dev->dma_device.dst_addr_widths = DMA_SLAVE_BUSWIDTH_8_BYTES;
2259 gpi_dev->dma_device.device_alloc_chan_resources = gpi_alloc_chan_resources;
2260 gpi_dev->dma_device.device_free_chan_resources = gpi_free_chan_resources;
2261 gpi_dev->dma_device.device_tx_status = dma_cookie_status;
2262 gpi_dev->dma_device.device_issue_pending = gpi_issue_pending;
2263 gpi_dev->dma_device.device_prep_slave_sg = gpi_prep_slave_sg;
2264 gpi_dev->dma_device.device_config = gpi_peripheral_config;
2265 gpi_dev->dma_device.device_terminate_all = gpi_terminate_all;
2266 gpi_dev->dma_device.dev = gpi_dev->dev;
2267 gpi_dev->dma_device.device_pause = gpi_pause;
2268 gpi_dev->dma_device.device_resume = gpi_resume;
2271 ret = dma_async_device_register(&gpi_dev->dma_device);
2273 dev_err(gpi_dev->dev, "async_device_register failed ret:%d", ret);
2277 ret = of_dma_controller_register(gpi_dev->dev->of_node,
2278 gpi_of_dma_xlate, gpi_dev);
2280 dev_err(gpi_dev->dev, "of_dma_controller_reg failed ret:%d", ret);