Lines Matching defs:xbar
42 u32 xbar_events; /* maximum number of events to select in xbar */
66 struct ti_am335x_xbar_data *xbar = dev_get_drvdata(dev);
72 ti_am335x_xbar_write(xbar->iomem, map->dma_line, 0);
80 struct ti_am335x_xbar_data *xbar = platform_get_drvdata(pdev);
86 if (dma_spec->args[2] >= xbar->xbar_events) {
92 if (dma_spec->args[0] >= xbar->dma_requests) {
120 ti_am335x_xbar_write(xbar->iomem, map->dma_line, map->mux_val);
135 struct ti_am335x_xbar_data *xbar;
142 xbar = devm_kzalloc(&pdev->dev, sizeof(*xbar), GFP_KERNEL);
143 if (!xbar)
160 &xbar->dma_requests)) {
164 xbar->dma_requests = TI_AM335X_XBAR_LINES;
168 if (of_property_read_u32(node, "dma-requests", &xbar->xbar_events)) {
172 xbar->xbar_events = TI_AM335X_XBAR_LINES;
179 xbar->iomem = iomem;
181 xbar->dmarouter.dev = &pdev->dev;
182 xbar->dmarouter.route_free = ti_am335x_xbar_free;
184 platform_set_drvdata(pdev, xbar);
187 for (i = 0; i < xbar->dma_requests; i++)
188 ti_am335x_xbar_write(xbar->iomem, i, 0);
191 &xbar->dmarouter);
218 static inline void ti_dra7_xbar_write(void __iomem *iomem, int xbar, u16 val)
220 writew_relaxed(val, iomem + (xbar * 2));
225 struct ti_dra7_xbar_data *xbar = dev_get_drvdata(dev);
231 ti_dra7_xbar_write(xbar->iomem, map->xbar_out, xbar->safe_val);
232 mutex_lock(&xbar->mutex);
233 clear_bit(map->xbar_out, xbar->dma_inuse);
234 mutex_unlock(&xbar->mutex);
242 struct ti_dra7_xbar_data *xbar = platform_get_drvdata(pdev);
245 if (dma_spec->args[0] >= xbar->xbar_requests) {
267 mutex_lock(&xbar->mutex);
268 map->xbar_out = find_first_zero_bit(xbar->dma_inuse,
269 xbar->dma_requests);
270 if (map->xbar_out == xbar->dma_requests) {
271 mutex_unlock(&xbar->mutex);
278 set_bit(map->xbar_out, xbar->dma_inuse);
279 mutex_unlock(&xbar->mutex);
283 dma_spec->args[0] = map->xbar_out + xbar->dma_offset;
288 ti_dra7_xbar_write(xbar->iomem, map->xbar_out, map->xbar_in);
327 struct ti_dra7_xbar_data *xbar;
337 xbar = devm_kzalloc(&pdev->dev, sizeof(*xbar), GFP_KERNEL);
338 if (!xbar)
355 &xbar->dma_requests)) {
359 xbar->dma_requests = TI_DRA7_XBAR_OUTPUTS;
363 xbar->dma_inuse = devm_kcalloc(&pdev->dev,
364 BITS_TO_LONGS(xbar->dma_requests),
366 if (!xbar->dma_inuse)
369 if (of_property_read_u32(node, "dma-requests", &xbar->xbar_requests)) {
373 xbar->xbar_requests = TI_DRA7_XBAR_INPUTS;
377 xbar->safe_val = (u16)safe_val;
403 xbar->dma_inuse);
412 xbar->iomem = iomem;
414 xbar->dmarouter.dev = &pdev->dev;
415 xbar->dmarouter.route_free = ti_dra7_xbar_free;
416 xbar->dma_offset = *(u32 *)match->data;
418 mutex_init(&xbar->mutex);
419 platform_set_drvdata(pdev, xbar);
422 for (i = 0; i < xbar->dma_requests; i++) {
423 if (!test_bit(i, xbar->dma_inuse))
424 ti_dra7_xbar_write(xbar->iomem, i, xbar->safe_val);
428 &xbar->dmarouter);
431 for (i = 0; i < xbar->dma_requests; i++) {
432 if (!test_bit(i, xbar->dma_inuse))
433 ti_dra7_xbar_write(xbar->iomem, i, i);