Home
last modified time | relevance | path

Searched refs:gfp_flags (Results 1 – 25 of 135) sorted by relevance

123456

/drivers/infiniband/hw/hns/
A Dhns_roce_alloc.c72 gfp_t gfp_flags; in hns_roce_buf_alloc() local
79 gfp_flags = (flags & HNS_ROCE_BUF_NOSLEEP) ? GFP_ATOMIC : GFP_KERNEL; in hns_roce_buf_alloc()
80 buf = kzalloc(sizeof(*buf), gfp_flags); in hns_roce_buf_alloc()
96 trunks = kcalloc(ntrunk, sizeof(*trunks), gfp_flags); in hns_roce_buf_alloc()
106 &trunks[i].map, gfp_flags); in hns_roce_buf_alloc()
/drivers/scsi/aic94xx/
A Daic94xx_task.c38 gfp_t gfp_flags) in asd_map_scatterlist() argument
77 gfp_flags); in asd_map_scatterlist()
349 gfp_t gfp_flags) in asd_build_ata_ascb() argument
399 gfp_flags); in asd_build_ata_ascb()
412 gfp_t gfp_flags) in asd_build_smp_ascb() argument
462 gfp_t gfp_flags) in asd_build_ssp_ascb() argument
522 int asd_execute_task(struct sas_task *task, gfp_t gfp_flags) in asd_execute_task() argument
535 ascb = asd_ascb_alloc_list(asd_ha, &res, gfp_flags); in asd_execute_task()
555 res = asd_build_ata_ascb(a, t, gfp_flags); in asd_execute_task()
558 res = asd_build_smp_ascb(a, t, gfp_flags); in asd_execute_task()
[all …]
A Daic94xx_hwi.c270 static int asd_alloc_edbs(struct asd_ha_struct *asd_ha, gfp_t gfp_flags) in asd_alloc_edbs() argument
276 gfp_flags); in asd_alloc_edbs()
282 gfp_flags); in asd_alloc_edbs()
302 gfp_t gfp_flags) in asd_alloc_escbs() argument
309 gfp_flags); in asd_alloc_escbs()
314 escb = asd_ascb_alloc_list(asd_ha, &escbs, gfp_flags); in asd_alloc_escbs()
1029 gfp_t gfp_flags) in asd_ascb_alloc() argument
1036 ascb = kmem_cache_zalloc(asd_ascb_cache, gfp_flags); in asd_ascb_alloc()
1041 gfp_flags, in asd_ascb_alloc()
1085 gfp_t gfp_flags) in asd_ascb_alloc_list() argument
[all …]
A Daic94xx.h56 int asd_execute_task(struct sas_task *task, gfp_t gfp_flags);
/drivers/md/dm-vdo/
A Dmemory-alloc.c213 const gfp_t gfp_flags = GFP_KERNEL | __GFP_ZERO | __GFP_RETRY_MAYFAIL; in vdo_allocate_memory() local
232 p = kmalloc(size, gfp_flags | __GFP_NOWARN); in vdo_allocate_memory()
240 p = kmalloc(size, gfp_flags); in vdo_allocate_memory()
260 p = __vmalloc(size, gfp_flags | __GFP_NOWARN); in vdo_allocate_memory()
266 p = __vmalloc(size, gfp_flags); in vdo_allocate_memory()
/drivers/scsi/libsas/
A Dsas_event.c165 gfp_t gfp_flags) in sas_notify_port_event() argument
172 ev = sas_alloc_event(phy, gfp_flags); in sas_notify_port_event()
192 gfp_t gfp_flags) in sas_notify_phy_event() argument
199 ev = sas_alloc_event(phy, gfp_flags); in sas_notify_phy_event()
/drivers/crypto/marvell/cesa/
A Dtdma.c227 u32 size, u32 flags, gfp_t gfp_flags) in mv_cesa_dma_add_result_op() argument
231 tdma = mv_cesa_dma_add_desc(chain, gfp_flags); in mv_cesa_dma_add_result_op()
294 u32 flags, gfp_t gfp_flags) in mv_cesa_dma_add_data_transfer() argument
298 tdma = mv_cesa_dma_add_desc(chain, gfp_flags); in mv_cesa_dma_add_data_transfer()
336 gfp_t gfp_flags) in mv_cesa_dma_add_op_transfers() argument
356 flags, gfp_flags); in mv_cesa_dma_add_op_transfers()
A Dcesa.h858 u32 size, u32 flags, gfp_t gfp_flags);
867 u32 flags, gfp_t gfp_flags);
875 gfp_t gfp_flags);
/drivers/gpu/drm/ttm/
A Dttm_device.c154 int ttm_global_swapout(struct ttm_operation_ctx *ctx, gfp_t gfp_flags) in ttm_global_swapout() argument
162 ret = ttm_device_swapout(bdev, ctx, gfp_flags); in ttm_global_swapout()
173 gfp_t gfp_flags) in ttm_device_swapout() argument
184 lret = ttm_bo_swapout(bdev, ctx, man, gfp_flags, 1); in ttm_device_swapout()
A Dttm_pool.c135 static struct page *ttm_pool_alloc_page(struct ttm_pool *pool, gfp_t gfp_flags, in ttm_pool_alloc_page() argument
148 gfp_flags |= __GFP_NOMEMALLOC | __GFP_NORETRY | __GFP_NOWARN | in ttm_pool_alloc_page()
152 p = alloc_pages_node(pool->nid, gfp_flags, order); in ttm_pool_alloc_page()
166 &dma->addr, gfp_flags, attr); in ttm_pool_alloc_page()
706 gfp_t gfp_flags = GFP_USER; in __ttm_pool_alloc() local
717 gfp_flags |= __GFP_ZERO; in __ttm_pool_alloc()
720 gfp_flags |= __GFP_RETRY_MAYFAIL; in __ttm_pool_alloc()
723 gfp_flags |= GFP_DMA32; in __ttm_pool_alloc()
725 gfp_flags |= GFP_HIGHUSER; in __ttm_pool_alloc()
748 p = ttm_pool_alloc_page(pool, gfp_flags, order); in __ttm_pool_alloc()
A Dttm_tt.c321 gfp_t gfp_flags) in ttm_tt_swapout() argument
337 gfp_flags &= mapping_gfp_mask(swap_space); in ttm_tt_swapout()
344 to_page = shmem_read_mapping_page_gfp(swap_space, i, gfp_flags); in ttm_tt_swapout()
A Dttm_bo.c1099 gfp_t gfp_flags; member
1182 ret = ttm_tt_swapout(bo->bdev, bo->ttm, swapout_walk->gfp_flags); in ttm_bo_swapout_cb()
1216 struct ttm_resource_manager *man, gfp_t gfp_flags, in ttm_bo_swapout() argument
1227 .gfp_flags = gfp_flags, in ttm_bo_swapout()
/drivers/usb/gadget/udc/
A Dgr_udc.c350 req = kzalloc(sizeof(*req), gfp_flags); in gr_alloc_request()
437 dma_addr_t data, unsigned size, gfp_t gfp_flags) in gr_add_dma_desc() argument
441 desc = gr_alloc_dma_desc(ep, gfp_flags); in gr_add_dma_desc()
476 gfp_t gfp_flags) in gr_setup_out_desc_list() argument
529 gfp_t gfp_flags) in gr_setup_in_desc_list() argument
556 ret = gr_add_dma_desc(ep, req, 0, 0, gfp_flags); in gr_setup_in_desc_list()
612 ret = gr_setup_in_desc_list(ep, req, gfp_flags); in gr_queue()
614 ret = gr_setup_out_desc_list(ep, req, gfp_flags); in gr_queue()
635 gfp_t gfp_flags) in gr_queue_int() argument
640 return gr_queue(ep, req, gfp_flags); in gr_queue_int()
[all …]
/drivers/infiniband/sw/rxe/
A Drxe_pool.c123 gfp_t gfp_flags; in __rxe_add_to_pool() local
137 gfp_flags = sleepable ? GFP_KERNEL : GFP_ATOMIC; in __rxe_add_to_pool()
142 &pool->next, gfp_flags); in __rxe_add_to_pool()
/drivers/greybus/
A Doperation.c363 size_t payload_size, gfp_t gfp_flags) in gb_operation_message_alloc() argument
376 message = kmem_cache_zalloc(gb_message_cache, gfp_flags); in gb_operation_message_alloc()
380 message->buffer = kzalloc(message_size, gfp_flags); in gb_operation_message_alloc()
522 unsigned long op_flags, gfp_t gfp_flags) in gb_operation_create_common() argument
527 operation = kmem_cache_zalloc(gb_operation_cache, gfp_flags); in gb_operation_create_common()
533 gfp_flags); in gb_operation_create_common()
541 gfp_flags)) { in gb_operation_create_common()
/drivers/media/usb/uvc/
A Duvc_video.c1774 struct uvc_urb *uvc_urb, gfp_t gfp_flags) in uvc_alloc_urb_buffer() argument
1779 gfp_flags, &uvc_urb->dma, in uvc_alloc_urb_buffer()
1797 unsigned int size, unsigned int psize, gfp_t gfp_flags) in uvc_alloc_urb_buffers() argument
1895 struct usb_host_endpoint *ep, gfp_t gfp_flags) in uvc_init_video_isoc() argument
1913 urb = usb_alloc_urb(npackets, gfp_flags); in uvc_init_video_isoc()
1948 struct usb_host_endpoint *ep, gfp_t gfp_flags) in uvc_init_video_bulk() argument
1977 urb = usb_alloc_urb(0, gfp_flags); in uvc_init_video_bulk()
1999 gfp_t gfp_flags) in uvc_video_start_transfer() argument
2079 ret = uvc_init_video_isoc(stream, best_ep, gfp_flags); in uvc_video_start_transfer()
2091 ret = uvc_init_video_bulk(stream, ep, gfp_flags); in uvc_video_start_transfer()
[all …]
/drivers/usb/gadget/function/
A Du_ether.c143 rx_submit(struct eth_dev *dev, struct usb_request *req, gfp_t gfp_flags) in rx_submit() argument
188 skb = __netdev_alloc_skb(dev->net, size + NET_IP_ALIGN, gfp_flags); in rx_submit()
206 retval = usb_ep_queue(out, req, gfp_flags); in rx_submit()
371 static void rx_fill(struct eth_dev *dev, gfp_t gfp_flags) in rx_fill() argument
383 if (rx_submit(dev, req, gfp_flags) < 0) { in rx_fill()
602 static void eth_start(struct eth_dev *dev, gfp_t gfp_flags) in eth_start() argument
607 rx_fill(dev, gfp_flags); in eth_start()
/drivers/net/ethernet/sun/
A Dsunbmac.h324 static inline struct sk_buff *big_mac_alloc_skb(unsigned int length, gfp_t gfp_flags) in big_mac_alloc_skb() argument
328 skb = alloc_skb(length + 64, gfp_flags); in big_mac_alloc_skb()
/drivers/md/
A Draid1-10.c44 gfp_t gfp_flags) in resync_alloc_pages() argument
49 rp->pages[i] = alloc_page(gfp_flags); in resync_alloc_pages()
/drivers/net/usb/
A Dcdc-phonet.c116 static int rx_submit(struct usbpn_dev *pnd, struct urb *req, gfp_t gfp_flags) in rx_submit() argument
122 page = __dev_alloc_page(gfp_flags | __GFP_NOMEMALLOC); in rx_submit()
129 err = usb_submit_urb(req, gfp_flags); in rx_submit()
/drivers/dma/
A Ddmatest.c224 gfp_t gfp_flags; member
543 d->raw[i] = kmalloc(buf_size + align, d->gfp_flags); in dmatest_alloc_test_data()
665 src->gfp_flags = GFP_KERNEL; in dmatest_func()
666 dst->gfp_flags = GFP_KERNEL; in dmatest_func()
668 src->gfp_flags = GFP_DMA; in dmatest_func()
669 dst->gfp_flags = GFP_DMA; in dmatest_func()
/drivers/target/
A Dtarget_core_tmr.c30 gfp_t gfp_flags) in core_tmr_alloc_req() argument
34 tmr = kzalloc(sizeof(struct se_tmr_req), gfp_flags); in core_tmr_alloc_req()
/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_gart.c121 gfp_t gfp_flags = GFP_KERNEL | __GFP_ZERO; in amdgpu_gart_table_ram_alloc() local
133 p = alloc_pages(gfp_flags, order); in amdgpu_gart_table_ram_alloc()
/drivers/media/common/videobuf2/
A Dvideobuf2-dma-sg.c61 gfp_t gfp_flags) in vb2_dma_sg_alloc_compacted() argument
79 __GFP_NOWARN | gfp_flags, order); in vb2_dma_sg_alloc_compacted()
133 ret = vb2_dma_sg_alloc_compacted(buf, vb->vb2_queue->gfp_flags); in vb2_dma_sg_alloc()
/drivers/mtd/ubi/
A Dubi.h1086 ubi_alloc_vid_buf(const struct ubi_device *ubi, gfp_t gfp_flags) in ubi_alloc_vid_buf() argument
1091 vidb = kzalloc(sizeof(*vidb), gfp_flags); in ubi_alloc_vid_buf()
1095 buf = kmalloc(ubi->vid_hdr_alsize, gfp_flags); in ubi_alloc_vid_buf()

Completed in 66 milliseconds

123456