Home
last modified time | relevance | path

Searched refs:device (Results 1 – 6 of 6) sorted by relevance

/crypto/async_tx/
A Dasync_tx.c50 dma_has_cap(tx_type, depend_tx->chan->device->cap_mask)) in __async_tx_find_channel()
69 struct dma_device *device = chan->device; in async_tx_channel_switch() local
82 device->device_issue_pending(chan); in async_tx_channel_switch()
89 if (dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in async_tx_channel_switch()
90 intr_tx = device->device_prep_dma_interrupt(chan, 0); in async_tx_channel_switch()
116 device->device_issue_pending(chan); in async_tx_channel_switch()
224 struct dma_device *device; in async_trigger_callback() local
230 device = chan->device; in async_trigger_callback()
235 if (device && !dma_has_cap(DMA_INTERRUPT, device->cap_mask)) in async_trigger_callback()
236 device = NULL; in async_trigger_callback()
[all …]
A Dasync_xor.c26 struct dma_device *dma = chan->device; in do_async_xor()
146 if (!is_dma_xor_aligned(device, offset, 0, len)) in dma_xor_aligned_offsets()
153 if (!is_dma_xor_aligned(device, src_offs[i], 0, len)) in dma_xor_aligned_offsets()
189 struct dma_device *device = chan ? chan->device : NULL; in async_xor_offs() local
194 if (device) in async_xor_offs()
197 if (unmap && dma_xor_aligned_offsets(device, offset, in async_xor_offs()
321 struct dma_device *device = chan ? chan->device : NULL; in async_xor_val_offs() local
327 if (device) in async_xor_val_offs()
330 if (unmap && src_cnt <= device->max_xor && in async_xor_val_offs()
358 tx = device->device_prep_dma_xor_val(chan, in async_xor_val_offs()
[all …]
A Dasync_memcpy.c38 struct dma_device *device = chan ? chan->device : NULL; in async_memcpy() local
42 if (device) in async_memcpy()
43 unmap = dmaengine_get_unmap_data(device->dev, 2, GFP_NOWAIT); in async_memcpy()
45 if (unmap && is_dma_copy_aligned(device, src_offset, dest_offset, len)) { in async_memcpy()
54 unmap->addr[0] = dma_map_page(device->dev, src, src_offset, len, in async_memcpy()
57 unmap->addr[1] = dma_map_page(device->dev, dest, dest_offset, len, in async_memcpy()
61 tx = device->device_prep_dma_memcpy(chan, unmap->addr[1], in async_memcpy()
A Dasync_pq.c42 struct dma_device *dma = chan->device; in do_async_gen_syndrome()
184 struct dma_device *device = chan ? chan->device : NULL; in async_gen_syndrome() local
189 if (device) in async_gen_syndrome()
194 (src_cnt <= dma_maxpq(device, 0) || in async_gen_syndrome()
195 dma_maxpq(device, DMA_PREP_CONTINUE) > 0) && in async_gen_syndrome()
213 unmap->addr[j] = dma_map_page(device->dev, blocks[i], in async_gen_syndrome()
303 struct dma_device *device = chan ? chan->device : NULL; in async_syndrome_val() local
311 if (device) in async_syndrome_val()
314 if (unmap && disks <= dma_maxpq(device, 0) && in async_syndrome_val()
316 struct device *dev = device->dev; in async_syndrome_val()
[all …]
A Dasync_raid6_recov.c24 struct dma_device *dma = chan ? chan->device : NULL; in async_sum_product()
34 struct device *dev = dma->dev; in async_sum_product()
93 struct dma_device *dma = chan ? chan->device : NULL; in async_mult()
103 struct device *dev = dma->dev; in async_mult()
/crypto/
A Dcrypto_engine.c433 struct crypto_engine *crypto_engine_alloc_init_and_set(struct device *dev, in crypto_engine_alloc_init_and_set()
484 struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt) in crypto_engine_alloc_init()

Completed in 10 milliseconds