Lines Matching refs:safe
54 /* safe buffer info */
56 void *safe;
104 /* allocate a 'safe' buffer and keep track of it */
137 buf->safe = dma_pool_alloc(pool->pool, GFP_ATOMIC,
140 buf->safe = dma_alloc_coherent(dev, size, &buf->safe_dma_addr,
144 if (buf->safe == NULL) {
165 /* determine if a buffer is from our "safe" pool */
199 dma_pool_free(buf->pool->pool, buf->safe, buf->safe_dma_addr);
201 dma_free_coherent(device_info->dev, buf->size, buf->safe,
263 buf->safe, buf->safe_dma_addr);
267 dev_dbg(dev, "%s: copy unsafe %p to safe %p, size %d\n",
268 __func__, ptr, buf->safe, size);
269 memcpy(buf->safe, ptr, size);
284 buf->safe, buf->safe_dma_addr);
292 dev_dbg(dev, "%s: copy back safe %p to unsafe %p size %d\n",
293 __func__, buf->safe, ptr, size);
294 memcpy(ptr, buf->safe, size);
310 * allocate a 'safe' buffer and copy the unsafe buffer into it.
311 * substitute the safe buffer for the unsafe one.
312 * (basically move the buffer from an unsafe area to a safe one)
344 * see if a mapped address was really a "safe" buffer and if so, copy
345 * the data from the safe buffer back to the unsafe buffer and free up
346 * the safe buffer. (basically return things back to the way they
385 buf->safe, buf->safe_dma_addr);
390 dev_dbg(dev, "%s: copy back safe %p to unsafe %p size %d\n",
391 __func__, buf->safe + off, buf->ptr + off, sz);
392 memcpy(buf->ptr + off, buf->safe + off, sz);
425 buf->safe, buf->safe_dma_addr);
430 dev_dbg(dev, "%s: copy out unsafe %p to safe %p, size %d\n",
431 __func__,buf->ptr + off, buf->safe + off, sz);
432 memcpy(buf->safe + off, buf->ptr + off, sz);