Lines Matching defs:xbar
43 u32 xbar_events; /* maximum number of events to select in xbar */
67 struct ti_am335x_xbar_data *xbar = dev_get_drvdata(dev);
73 ti_am335x_xbar_write(xbar->iomem, map->dma_line, 0);
81 struct ti_am335x_xbar_data *xbar = platform_get_drvdata(pdev);
87 if (dma_spec->args[2] >= xbar->xbar_events) {
93 if (dma_spec->args[0] >= xbar->dma_requests) {
121 ti_am335x_xbar_write(xbar->iomem, map->dma_line, map->mux_val);
136 struct ti_am335x_xbar_data *xbar;
143 xbar = devm_kzalloc(&pdev->dev, sizeof(*xbar), GFP_KERNEL);
144 if (!xbar)
161 &xbar->dma_requests)) {
165 xbar->dma_requests = TI_AM335X_XBAR_LINES;
169 if (of_property_read_u32(node, "dma-requests", &xbar->xbar_events)) {
173 xbar->xbar_events = TI_AM335X_XBAR_LINES;
180 xbar->iomem = iomem;
182 xbar->dmarouter.dev = &pdev->dev;
183 xbar->dmarouter.route_free = ti_am335x_xbar_free;
185 platform_set_drvdata(pdev, xbar);
188 for (i = 0; i < xbar->dma_requests; i++)
189 ti_am335x_xbar_write(xbar->iomem, i, 0);
192 &xbar->dmarouter);
219 static inline void ti_dra7_xbar_write(void __iomem *iomem, int xbar, u16 val)
221 writew_relaxed(val, iomem + (xbar * 2));
226 struct ti_dra7_xbar_data *xbar = dev_get_drvdata(dev);
232 ti_dra7_xbar_write(xbar->iomem, map->xbar_out, xbar->safe_val);
233 mutex_lock(&xbar->mutex);
234 clear_bit(map->xbar_out, xbar->dma_inuse);
235 mutex_unlock(&xbar->mutex);
243 struct ti_dra7_xbar_data *xbar = platform_get_drvdata(pdev);
246 if (dma_spec->args[0] >= xbar->xbar_requests) {
268 mutex_lock(&xbar->mutex);
269 map->xbar_out = find_first_zero_bit(xbar->dma_inuse,
270 xbar->dma_requests);
271 if (map->xbar_out == xbar->dma_requests) {
272 mutex_unlock(&xbar->mutex);
279 set_bit(map->xbar_out, xbar->dma_inuse);
280 mutex_unlock(&xbar->mutex);
284 dma_spec->args[0] = map->xbar_out + xbar->dma_offset;
289 ti_dra7_xbar_write(xbar->iomem, map->xbar_out, map->xbar_in);
328 struct ti_dra7_xbar_data *xbar;
338 xbar = devm_kzalloc(&pdev->dev, sizeof(*xbar), GFP_KERNEL);
339 if (!xbar)
356 &xbar->dma_requests)) {
360 xbar->dma_requests = TI_DRA7_XBAR_OUTPUTS;
364 xbar->dma_inuse = devm_kcalloc(&pdev->dev,
365 BITS_TO_LONGS(xbar->dma_requests),
367 if (!xbar->dma_inuse)
370 if (of_property_read_u32(node, "dma-requests", &xbar->xbar_requests)) {
374 xbar->xbar_requests = TI_DRA7_XBAR_INPUTS;
378 xbar->safe_val = (u16)safe_val;
404 xbar->dma_inuse);
413 xbar->iomem = iomem;
415 xbar->dmarouter.dev = &pdev->dev;
416 xbar->dmarouter.route_free = ti_dra7_xbar_free;
417 xbar->dma_offset = *(u32 *)match->data;
419 mutex_init(&xbar->mutex);
420 platform_set_drvdata(pdev, xbar);
423 for (i = 0; i < xbar->dma_requests; i++) {
424 if (!test_bit(i, xbar->dma_inuse))
425 ti_dra7_xbar_write(xbar->iomem, i, xbar->safe_val);
429 &xbar->dmarouter);
432 for (i = 0; i < xbar->dma_requests; i++) {
433 if (!test_bit(i, xbar->dma_inuse))
434 ti_dra7_xbar_write(xbar->iomem, i, i);