| /linux/drivers/uio/ |
| A D | uio_dmem_genirq.c | 39 struct mutex alloc_lock; member 55 mutex_lock(&priv->alloc_lock); in uio_dmem_genirq_open() 68 mutex_unlock(&priv->alloc_lock); in uio_dmem_genirq_open() 84 mutex_lock(&priv->alloc_lock); in uio_dmem_genirq_release() 99 mutex_unlock(&priv->alloc_lock); in uio_dmem_genirq_release() 200 mutex_init(&priv->alloc_lock); in uio_dmem_genirq_probe()
|
| /linux/mm/ |
| A D | swap_slots.c | 148 mutex_init(&cache->alloc_lock); in alloc_swap_slot_cache() 176 mutex_lock(&cache->alloc_lock); in drain_slots_cache_cpu() 184 mutex_unlock(&cache->alloc_lock); in drain_slots_cache_cpu() 330 mutex_lock(&cache->alloc_lock); in folio_alloc_swap() 341 mutex_unlock(&cache->alloc_lock); in folio_alloc_swap()
|
| A D | vmscan.c | 2884 lockdep_assert_held(&task->alloc_lock); in lru_gen_migrate_mm()
|
| /linux/drivers/misc/ |
| A D | hpilo.c | 547 spin_lock_irqsave(&hw->alloc_lock, flags); in ilo_close() 549 spin_unlock_irqrestore(&hw->alloc_lock, flags); in ilo_close() 594 spin_lock_irqsave(&hw->alloc_lock, flags); in ilo_open() 597 spin_unlock_irqrestore(&hw->alloc_lock, flags); in ilo_open() 603 spin_lock_irqsave(&hw->alloc_lock, flags); in ilo_open() 605 spin_unlock_irqrestore(&hw->alloc_lock, flags); in ilo_open() 651 spin_lock(&hw->alloc_lock); in ilo_isr() 656 spin_unlock(&hw->alloc_lock); in ilo_isr() 676 spin_unlock(&hw->alloc_lock); in ilo_isr() 806 spin_lock_init(&ilo_hw->alloc_lock); in ilo_probe()
|
| A D | hpilo.h | 61 spinlock_t alloc_lock; member
|
| /linux/init/ |
| A D | init_task.c | 138 .alloc_lock = __SPIN_LOCK_UNLOCKED(init_task.alloc_lock), 173 &init_task.alloc_lock),
|
| /linux/include/linux/sched/ |
| A D | task.h | 229 spin_lock(&p->alloc_lock); in task_lock() 234 spin_unlock(&p->alloc_lock); in task_unlock()
|
| /linux/include/linux/ |
| A D | swap_slots.h | 15 struct mutex alloc_lock; /* protects slots, nr, cur */ member
|
| A D | ioprio.h | 66 lockdep_assert_held(&p->alloc_lock); in __get_task_ioprio()
|
| A D | sched.h | 1182 spinlock_t alloc_lock; member
|
| /linux/drivers/infiniband/hw/hfi1/ |
| A D | pio.c | 700 spin_lock_init(&sc->alloc_lock); in sc_alloc() 887 spin_lock_irq(&sc->alloc_lock); in sc_disable() 931 spin_unlock_irq(&sc->alloc_lock); in sc_disable() 1261 spin_lock_irqsave(&sc->alloc_lock, flags); in sc_enable() 1332 spin_unlock_irqrestore(&sc->alloc_lock, flags); in sc_enable() 1387 spin_lock_irqsave(&sc->alloc_lock, flags); in sc_stop() 1391 spin_unlock_irqrestore(&sc->alloc_lock, flags); in sc_stop() 1420 spin_lock_irqsave(&sc->alloc_lock, flags); in sc_buffer_alloc() 1422 spin_unlock_irqrestore(&sc->alloc_lock, flags); in sc_buffer_alloc() 1431 spin_unlock_irqrestore(&sc->alloc_lock, flags); in sc_buffer_alloc() [all …]
|
| A D | pio.h | 73 spinlock_t alloc_lock ____cacheline_aligned_in_smp;
|
| /linux/drivers/net/ethernet/mellanox/mlx5/core/ |
| A D | cmd.c | 164 spin_lock_irqsave(&cmd->alloc_lock, flags); in cmd_alloc_index() 171 spin_unlock_irqrestore(&cmd->alloc_lock, flags); in cmd_alloc_index() 178 lockdep_assert_held(&cmd->alloc_lock); in cmd_free_index() 192 spin_lock_irqsave(&cmd->alloc_lock, flags); in cmd_ent_put() 203 spin_unlock_irqrestore(&cmd->alloc_lock, flags); in cmd_ent_put() 1021 spin_lock_irqsave(&cmd->alloc_lock, flags); in cmd_work_handler() 1024 spin_unlock_irqrestore(&cmd->alloc_lock, flags); in cmd_work_handler() 1782 spin_lock_irqsave(&dev->cmd.alloc_lock, flags); in mlx5_cmd_trigger_completions() 1795 spin_unlock_irqrestore(&dev->cmd.alloc_lock, flags); in mlx5_cmd_trigger_completions() 1804 spin_unlock_irqrestore(&dev->cmd.alloc_lock, flags); in mlx5_cmd_trigger_completions() [all …]
|
| /linux/arch/powerpc/kvm/ |
| A D | book3s_64_vio.c | 227 mutex_lock(&stt->alloc_lock); in kvm_spapr_get_tce_page() 235 mutex_unlock(&stt->alloc_lock); in kvm_spapr_get_tce_page() 328 mutex_init(&stt->alloc_lock); in kvm_vm_ioctl_create_spapr_tce()
|
| /linux/fs/bcachefs/ |
| A D | btree_cache.c | 708 if (bc->alloc_lock == current) { in bch2_btree_cache_cannibalize_unlock() 710 bc->alloc_lock = NULL; in bch2_btree_cache_cannibalize_unlock() 722 if (try_cmpxchg(&bc->alloc_lock, &old, current) || old == current) in bch2_btree_cache_cannibalize_lock() 734 if (try_cmpxchg(&bc->alloc_lock, &old, current) || old == current) { in bch2_btree_cache_cannibalize_lock() 867 if (bc->alloc_lock == current) { in bch2_btree_node_mem_alloc() 1478 prt_printf(out, "cannibalize lock:\t%p\n", bc->alloc_lock); in bch2_btree_cache_to_text()
|
| A D | btree_types.h | 203 struct task_struct *alloc_lock; member
|
| /linux/security/yama/ |
| A D | yama_lsm.c | 81 assert_spin_locked(&target->alloc_lock); /* for target->comm */ in report_access()
|
| /linux/arch/powerpc/include/asm/ |
| A D | kvm_host.h | 198 struct mutex alloc_lock; member
|
| /linux/include/linux/mlx5/ |
| A D | driver.h | 316 spinlock_t alloc_lock; member
|
| /linux/drivers/net/ethernet/mellanox/mlx4/ |
| A D | resource_tracker.c | 321 spin_lock(&res_alloc->alloc_lock); in mlx4_grant_resource() 374 spin_unlock(&res_alloc->alloc_lock); in mlx4_grant_resource() 390 spin_lock(&res_alloc->alloc_lock); in mlx4_release_resource() 419 spin_unlock(&res_alloc->alloc_lock); in mlx4_release_resource() 551 spin_lock_init(&res_alloc->alloc_lock); in mlx4_init_resource_tracker()
|
| A D | mlx4.h | 543 spinlock_t alloc_lock; /* protect quotas */ member
|
| /linux/kernel/ |
| A D | fork.c | 2266 spin_lock_init(&p->alloc_lock); in copy_process() 2315 seqcount_spinlock_init(&p->mems_allowed_seq, &p->alloc_lock); in copy_process()
|
| /linux/Documentation/admin-guide/cgroup-v1/ |
| A D | cgroups.rst | 535 - while holding the task's alloc_lock (via task_lock())
|