| /drivers/net/ethernet/chelsio/cxgb4/ |
| A D | l2t.c | 159 memcpy(e->dmac, e->neigh->ha, sizeof(e->dmac)); in write_l2e() 272 for (e = d->rover, end = &d->l2tab[d->l2t_size]; e != end; ++e) in alloc_l2e() 276 for (e = d->l2tab; atomic_read(&e->refcnt); ++e) in alloc_l2e() 304 for (e = &d->l2tab[0], end = &d->l2tab[d->l2t_size]; e != end; ++e) { in find_or_alloc_l2e() 440 for (e = d->l2tab[hash].first; e; e = e->next) in cxgb4_l2t_get() 442 e->vlan == vlan && e->lport == lport) { in cxgb4_l2t_get() 520 for (e = d->l2tab[hash].first; e; e = e->next) in t4_l2t_update() 708 e->idx + d->l2t_start, ip, e->dmac, in l2t_seq_show() 709 e->vlan & VLAN_VID_MASK, vlan_prio(e), e->lport, in l2t_seq_show() 710 l2e_state(e), atomic_read(&e->refcnt), in l2t_seq_show() [all …]
|
| A D | smt.c | 70 for (e = &s->smtab[0], end = &s->smtab[s->smt_size]; e != end; ++e) { in find_or_alloc_smte() 73 first_free = e; in find_or_alloc_smte() 86 e = first_free; in find_or_alloc_smte() 95 return e; in find_or_alloc_smte() 115 t4_smte_free(e); in cxgb4_smt_release() 196 row = e->idx; in write_smt_entry() 217 if (e) { in t4_smt_alloc_switching() 220 e->refcnt = 1; in t4_smt_alloc_switching() 222 e->pfvf = pfvf; in t4_smt_alloc_switching() 226 ++e->refcnt; in t4_smt_alloc_switching() [all …]
|
| A D | sched.c | 130 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_entry_lookup() 328 if (!e) in t4_sched_class_unbind_all() 489 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_class_lookup() 491 found = e; in t4_sched_class_lookup() 505 for (e = &s->tab[0]; e != end; ++e) { in t4_sched_class_lookup() 550 if (!e) { in t4_sched_class_alloc() 555 if (!e) in t4_sched_class_alloc() 564 memcpy(&e->info, &np, sizeof(e->info)); in t4_sched_class_alloc() 569 return e; in t4_sched_class_alloc() 639 memset(&e->info, 0, sizeof(e->info)); in cxgb4_sched_class_free() [all …]
|
| /drivers/net/ethernet/chelsio/cxgb3/ |
| A D | l2t.c | 105 memcpy(e->dmac, e->neigh->ha, sizeof(e->dmac)); in setup_l2e_send_pending() 190 for (e = d->rover, end = &d->l2tab[d->nentries]; e != end; ++e) in alloc_l2e() 194 for (e = &d->l2tab[1]; atomic_read(&e->refcnt); ++e) ; in alloc_l2e() 204 int hash = arp_hash(e->addr, e->ifindex, d); in alloc_l2e() 213 return e; in alloc_l2e() 255 if (memcmp(e->dmac, neigh->ha, sizeof(e->dmac)) || in reuse_entry() 297 for (e = d->l2tab[hash].first; e; e = e->next) in t3_l2t_get() 298 if (e->addr == addr && e->ifindex == ifidx && in t3_l2t_get() 308 if (e) { in t3_l2t_get() 372 for (e = d->l2tab[hash].first; e; e = e->next) in t3_l2t_update() [all …]
|
| /drivers/media/test-drivers/vidtv/ |
| A D | vidtv_s302m.c | 184 if (e->sync && e->sync->is_video_encoder) { in vidtv_s302m_alloc_au() 269 if (e->src_buf_offset > e->src_buf_sz) { in vidtv_s302m_get_sample() 277 if (e->src_buf_offset >= e->src_buf_sz) { in vidtv_s302m_get_sample() 280 e->last_sample_cb(e->sample_count); in vidtv_s302m_get_sample() 285 sample = *(u16 *)(e->src_buf + e->src_buf_offset); in vidtv_s302m_get_sample() 410 if (e->sync && e->sync->is_video_encoder) { in vidtv_s302m_encode() 448 e = kzalloc(sizeof(*e), GFP_KERNEL); in vidtv_s302m_encoder_init() 449 if (!e) in vidtv_s302m_encoder_init() 501 return e; in vidtv_s302m_encoder_init() 508 kfree(e); in vidtv_s302m_encoder_init() [all …]
|
| /drivers/md/ |
| A D | dm-cache-policy-smq.c | 98 BUG_ON(e < es->begin || e >= es->end); in to_index() 221 for (e = l_head(es, l); e; e = l_next(es, e)) in l_pop_head() 234 for (e = l_tail(es, l); e; e = l_prev(es, e)) in l_pop_tail() 303 l_add_tail(q->es, q->qs + e->level, e); in q_push() 313 l_add_head(q->es, q->qs + e->level, e); in q_push_front() 328 l_del(q->es, q->qs + e->level, e); in q_del() 344 for (e = l_head(q->es, q->qs + level); e; e = l_next(q->es, e)) { in q_peek() 378 for (e = l_head(q->es, q->qs + level); e; e = l_next(q->es, e)) in __redist_pop_from() 380 l_del(q->es, q->qs + e->level, e); in __redist_pop_from() 637 for (e = h_head(ht, h); e; e = h_next(ht, e)) { in __h_lookup() [all …]
|
| A D | dm-writecache.c | 621 e->rb_node.rb_left : e->rb_node.rb_right); in writecache_find_entry() 649 e = e2; in writecache_find_entry() 745 return e; in writecache_pop_from_freelist() 820 e = e2; in writecache_flush() 1341 if (e) { in writecache_map_remap_origin() 1414 e = f; in writecache_bio_copy_ssd() 1732 e = c->e; in __writecache_endio_ssd() 1742 e++; in __writecache_endio_ssd() 1875 e = f; in __writecache_writeback_pmem() 1917 c->e = e; in __writecache_writeback_ssd() [all …]
|
| /drivers/mtd/ubi/ |
| A D | wl.c | 524 e->pnum, e->ec); in serve_prot_queue() 603 e->pnum, e->ec, torture); in schedule_erase() 610 wl_wrk->e = e; in schedule_erase() 639 wl_wrk.e = e; in do_sync_erase() 1768 ubi->lookuptbl[e->pnum] = e; 1852 ubi->lookuptbl[e->pnum] = e; 1869 ubi->lookuptbl[e->pnum] = e; 1873 e->pnum, e->ec); 1877 e->pnum, e->ec); 2061 e->pnum, e->ec, root); [all …]
|
| A D | fastmap-wl.c | 33 if (e->pnum < UBI_FM_MAX_START && e->ec < max_ec) { in find_anchor_wl_entry() 34 victim = e; in find_anchor_wl_entry() 87 if (!e) in ubi_wl_get_fm_peb() 97 return e; in ubi_wl_get_fm_peb() 227 if (!e) in ubi_refill_pools_and_lock() 392 if (!e) { in need_wear_leveling() 396 ec = e->ec; in need_wear_leveling() 398 ec = e->ec; in need_wear_leveling() 510 if (!e) { in ubi_wl_put_fm_peb() 511 e = fm_e; in ubi_wl_put_fm_peb() [all …]
|
| /drivers/net/ethernet/mellanox/mlx5/core/esw/ |
| A D | indir_table.c | 201 e->recirc_grp = mlx5_create_flow_group(e->ft, in); in mlx5_create_indir_recirc_group() 224 e->fwd_grp = mlx5_create_flow_group(e->ft, in); in mlx5_create_indir_fwd_group() 261 e = kzalloc(sizeof(*e), GFP_KERNEL); in mlx5_esw_indir_table_entry_create() 262 if (!e) in mlx5_esw_indir_table_entry_create() 275 e->ft = ft; in mlx5_esw_indir_table_entry_create() 296 return e; in mlx5_esw_indir_table_entry_create() 306 kfree(e); in mlx5_esw_indir_table_entry_create() 332 if (e) { in mlx5_esw_indir_table_get() 363 if (!e) in mlx5_esw_indir_table_put() 371 if (e->fwd_ref || e->recirc_rule) in mlx5_esw_indir_table_put() [all …]
|
| A D | vporttbl.c | 75 return e; in esw_vport_tbl_lookup() 94 if (e) { in mlx5_esw_vporttbl_get() 95 e->num_rules++; in mlx5_esw_vporttbl_get() 99 e = kzalloc(sizeof(*e), GFP_KERNEL); in mlx5_esw_vporttbl_get() 100 if (!e) { in mlx5_esw_vporttbl_get() 116 e->fdb = fdb; in mlx5_esw_vporttbl_get() 118 e->key = skey; in mlx5_esw_vporttbl_get() 122 return e->fdb; in mlx5_esw_vporttbl_get() 125 kfree(e); in mlx5_esw_vporttbl_get() 142 if (!e || --e->num_rules) in mlx5_esw_vporttbl_put() [all …]
|
| /drivers/edac/ |
| A D | edac_mc.c | 806 int pos[EDAC_MAX_LAYERS] = { e->top_layer, e->mid_layer, e->low_layer }; in edac_inc_ce_error() 820 int pos[EDAC_MAX_LAYERS] = { e->top_layer, e->mid_layer, e->low_layer }; in edac_inc_ue_error() 840 e->error_count, e->msg, in edac_ce_error() 842 e->label, e->location, e->page_frame_number, e->offset_in_page, in edac_ce_error() 843 e->grain, e->syndrome, in edac_ce_error() 877 e->error_count, e->msg, in edac_ue_error() 879 e->label, e->location, e->page_frame_number, e->offset_in_page, in edac_ue_error() 891 e->label, e->location, e->page_frame_number, e->offset_in_page, in edac_ue_error() 931 trace_mc_event(e->type, e->msg, e->label, e->error_count, in edac_raw_mc_handle_error() 932 mci->mc_idx, e->top_layer, e->mid_layer, in edac_raw_mc_handle_error() [all …]
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_ring_mux.c | 94 if (!e) { in amdgpu_mux_resubmit_chunks() 223 if (!e) { in amdgpu_ring_mux_set_wptr() 235 e->sw_cptr = e->sw_wptr; in amdgpu_ring_mux_set_wptr() 258 if (!e) { in amdgpu_ring_mux_get_wptr() 288 if (!e) { in amdgpu_ring_mux_get_rptr() 307 e->sw_rptr = e->sw_cptr; in amdgpu_ring_mux_get_rptr() 310 e->sw_rptr = e->sw_wptr; in amdgpu_ring_mux_get_rptr() 443 if (!e) { in amdgpu_ring_mux_start_ib() 469 if (!e) { in scan_and_remove_signaled_chunk() 492 if (!e) { in amdgpu_ring_mux_ib_mark_offset() [all …]
|
| A D | amdgpu_sync.c | 135 struct amdgpu_sync_entry *e; in amdgpu_sync_add_later() local 139 dma_fence_put(e->fence); in amdgpu_sync_add_later() 164 struct amdgpu_sync_entry *e; in amdgpu_sync_fence() local 173 if (!e) in amdgpu_sync_fence() 177 e->fence = dma_fence_get(f); in amdgpu_sync_fence() 302 hash_del(&e->node); in amdgpu_sync_entry_free() 303 dma_fence_put(e->fence); in amdgpu_sync_entry_free() 365 f = e->fence; in amdgpu_sync_get_fence() 367 hash_del(&e->node); in amdgpu_sync_get_fence() 395 f = e->fence; in amdgpu_sync_clone() [all …]
|
| /drivers/net/fddi/skfp/ |
| A D | ecm.c | 97 smc->e.path_test = PT_PASSED ; in ecm_init() 98 smc->e.trace_prop = 0 ; in ecm_init() 99 smc->e.sb_flag = 0 ; in ecm_init() 187 smc->e.trace_prop = 0 ; in ecm_fsm() 360 smc->e.sb_flag = FALSE ; in ecm_fsm() 366 else if (!smc->e.sb_flag && in ecm_fsm() 369 smc->e.sb_flag = TRUE ; in ecm_fsm() 459 smc->e.trace_prop = 0 ; in prop_actions() 472 while (smc->e.trace_prop) { in prop_actions() 474 smc->e.trace_prop); in prop_actions() [all …]
|
| /drivers/ssb/ |
| A D | driver_chipcommon_sflash.c | 90 const struct ssb_sflash_tbl_e *e; in ssb_sflash_init() local 107 for (e = ssb_sflash_sst_tbl; e->name; e++) { in ssb_sflash_init() 108 if (e->id == id2) in ssb_sflash_init() 115 for (e = ssb_sflash_st_tbl; e->name; e++) { in ssb_sflash_init() 116 if (e->id == id) in ssb_sflash_init() 121 if (!e->name) { in ssb_sflash_init() 132 for (e = ssb_sflash_at_tbl; e->name; e++) { in ssb_sflash_init() 133 if (e->id == id) in ssb_sflash_init() 136 if (!e->name) { in ssb_sflash_init() 149 sflash->blocksize = e->blocksize; in ssb_sflash_init() [all …]
|
| /drivers/net/ethernet/mellanox/mlx5/core/en/ |
| A D | tc_tun_encap.c | 350 if (e) in mlx5e_get_next_matching_encap() 351 mlx5e_encap_put(netdev_priv(e->out_dev), e); in mlx5e_get_next_matching_encap() 359 e = next; in mlx5e_get_next_matching_encap() 549 if (!e) in mlx5e_detach_encap() 708 if (flow->encaps[i].e != e) in is_duplicated_encap_entry() 857 if (e) { in mlx5e_attach_encap() 867 e = kzalloc(sizeof(*e), GFP_KERNEL); in mlx5e_attach_encap() 868 if (!e) { in mlx5e_attach_encap() 922 flow->encaps[out_index].e = e; in mlx5e_attach_encap() 936 if (e) in mlx5e_attach_encap() [all …]
|
| A D | tc_tun.c | 188 if (!e->tunnel) { in mlx5e_gen_ip_tunnel_header() 193 return e->tunnel->generate_ip_tun_hdr(buf, ip_proto, e); in mlx5e_gen_ip_tunnel_header() 248 e->tunnel->calc_hlen(e); in mlx5e_tc_tun_create_header_ipv4() 296 &ip->protocol, e); in mlx5e_tc_tun_create_header_ipv4() 329 mlx5e_rep_encap_entry_detach(netdev_priv(e->out_dev), e); in mlx5e_tc_tun_create_header_ipv4() 364 e->tunnel->calc_hlen(e); in mlx5e_tc_tun_update_header_ipv4() 401 &ip->protocol, e); in mlx5e_tc_tun_update_header_ipv4() 406 kfree(e->encap_header); in mlx5e_tc_tun_update_header_ipv4() 519 e->tunnel->calc_hlen(e); in mlx5e_tc_tun_create_header_ipv6() 599 mlx5e_rep_encap_entry_detach(netdev_priv(e->out_dev), e); in mlx5e_tc_tun_create_header_ipv6() [all …]
|
| /drivers/scsi/ |
| A D | scsi_proc.c | 124 return e; in scsi_lookup_proc_entry() 136 return e ? e->proc_dir : NULL; in scsi_template_proc_dir() 164 if (!e) { in scsi_proc_hostdir_add() 165 e = kzalloc(sizeof(*e), GFP_KERNEL); in scsi_proc_hostdir_add() 166 if (!e) { in scsi_proc_hostdir_add() 183 e = NULL; in scsi_proc_hostdir_add() 188 kfree(e); in scsi_proc_hostdir_add() 205 if (e && !--e->present) { in scsi_proc_hostdir_rm() 208 kfree(e); in scsi_proc_hostdir_rm() 229 if (!e) in scsi_proc_host_add() [all …]
|
| /drivers/accel/amdxdna/ |
| A D | aie2_error.c | 221 queue_work(e->wq, &e->work); in aie2_error_async_cb() 227 drm_clflush_virt_range(e->buf, e->size); /* device can access */ in aie2_error_event_send() 228 return aie2_register_asyn_event_msg(e->ndev, e->addr, e->size, e, in aie2_error_event_send() 236 struct async_event *e; in aie2_error_worker() local 242 xdna = e->ndev->xdna; in aie2_error_worker() 250 e->buf, 0x100, false); in aie2_error_worker() 268 if (aie2_error_event_send(e)) in aie2_error_worker() 276 struct async_event *e; in aie2_error_async_events_send() local 337 e->ndev = ndev; in aie2_error_async_events_alloc() 338 e->wq = events->wq; in aie2_error_async_events_alloc() [all …]
|
| /drivers/net/ethernet/airoha/ |
| A D | airoha_ppe.c | 622 airoha_ppe_foe_commit_entry(ppe, &e->data, e->hash); in airoha_ppe_foe_remove_flow() 627 kfree(e); in airoha_ppe_foe_remove_flow() 746 e->hash = hash; in airoha_ppe_foe_insert_entry() 755 if (e) in airoha_ppe_foe_insert_entry() 1070 e = kzalloc(sizeof(*e), GFP_KERNEL); in airoha_ppe_flow_offload_replace() 1071 if (!e) in airoha_ppe_flow_offload_replace() 1075 memcpy(&e->data, &hwe, sizeof(e->data)); in airoha_ppe_flow_offload_replace() 1091 kfree(e); in airoha_ppe_flow_offload_replace() 1104 if (!e) in airoha_ppe_flow_offload_destroy() 1110 kfree(e); in airoha_ppe_flow_offload_destroy() [all …]
|
| /drivers/bcma/ |
| A D | driver_chipcommon_sflash.c | 93 const struct bcma_sflash_tbl_e *e; in bcma_sflash_init() local 110 for (e = bcma_sflash_sst_tbl; e->name; e++) { in bcma_sflash_init() 111 if (e->id == id2) in bcma_sflash_init() 118 for (e = bcma_sflash_st_tbl; e->name; e++) { in bcma_sflash_init() 119 if (e->id == id) in bcma_sflash_init() 124 if (!e->name) { in bcma_sflash_init() 134 for (e = bcma_sflash_at_tbl; e->name; e++) { in bcma_sflash_init() 135 if (e->id == id) in bcma_sflash_init() 138 if (!e->name) { in bcma_sflash_init() 149 sflash->blocksize = e->blocksize; in bcma_sflash_init() [all …]
|
| /drivers/gpu/drm/xe/ |
| A D | xe_gt_sriov_pf_monitor.c | 26 int e; in xe_gt_sriov_pf_monitor_flr() local 31 for (e = 0; e < XE_GUC_KLV_NUM_THRESHOLDS; e++) in xe_gt_sriov_pf_monitor_flr() 36 enum xe_guc_klv_threshold_index e) in pf_update_event_counter() argument 41 gt->sriov.pf.vfs[vfid].monitor.guc.events[e]++; in pf_update_event_counter() 47 int e; in pf_handle_vf_threshold_event() local 53 if (unlikely(e < 0)) { in pf_handle_vf_threshold_event() 63 pf_update_event_counter(gt, vfid, e); in pf_handle_vf_threshold_event() 122 int e; in xe_gt_sriov_pf_monitor_print_events() local 129 for (e = 0; e < XE_GUC_KLV_NUM_THRESHOLDS; e++) in xe_gt_sriov_pf_monitor_print_events() 130 if (data->guc.events[e]) in xe_gt_sriov_pf_monitor_print_events() [all …]
|
| /drivers/net/wireless/mediatek/mt7601u/ |
| A D | dma.c | 165 put_page(e->p); in mt7601u_rx_process_entry() 166 e->p = new_p; in mt7601u_rx_process_entry() 182 buf = &q->e[q->start]; in mt7601u_rx_get_pending_entry() 231 if (e->urb->status) in mt7601u_rx_tasklet() 264 skb = q->e[q->start].skb; in mt7601u_complete_tx() 265 q->e[q->start].skb = NULL; in mt7601u_complete_tx() 323 e = &q->e[q->end]; in mt7601u_dma_submit_tx() 341 e->skb = skb; in mt7601u_dma_submit_tx() 454 if (!dev->rx_q.e[i].urb || !dev->rx_q.e[i].p) in mt7601u_alloc_rx() 467 if (q->e[i].skb) in mt7601u_free_tx_queue() [all …]
|
| /drivers/virt/coco/efi_secret/ |
| A D | efi_secret.c | 85 return e->len - sizeof(*e); in secret_entry_data_len() 99 if (e) in efi_secret_bin_file_show() 100 seq_write(file, e->data, secret_entry_data_len(e)); in efi_secret_bin_file_show() 123 if (e) { in efi_secret_unlink() 125 wipe_memory(e->data, secret_entry_data_len(e)); in efi_secret_unlink() 126 e->guid = NULL_GUID; in efi_secret_unlink() 194 struct secret_entry *e; in efi_secret_securityfs_setup() local 238 e = (struct secret_entry *)ptr; in efi_secret_securityfs_setup() 239 if (e->len < sizeof(*e) || e->len > (unsigned int)bytes_left) { in efi_secret_securityfs_setup() 258 ptr += e->len; in efi_secret_securityfs_setup() [all …]
|