Searched refs:next (Results 1 – 12 of 12) sorted by relevance
| /hyp/core/util/src/ |
| A D | list.c | 47 list_node_t *next = atomic_load_relaxed(&prev->next); in list_insert_at_head_explicit() local 50 atomic_store_relaxed(&node->next, next); in list_insert_at_head_explicit() 73 atomic_store_relaxed(&node->next, next); in list_insert_at_tail_explicit() 104 list_node_t *next = atomic_load_relaxed(&head->next); in list_insert_in_order_explicit() local 111 prev = next; in list_insert_in_order_explicit() 112 next = atomic_load_relaxed(&prev->next); in list_insert_in_order_explicit() 120 atomic_store_relaxed(&node->next, next); in list_insert_in_order_explicit() 145 list_node_t *next = atomic_load_relaxed(&prev->next); in list_insert_after_node_explicit() local 148 atomic_store_relaxed(&node->next, next); in list_insert_after_node_explicit() 174 list_node_t *next = atomic_load_relaxed(&node->next); in list_delete_node() local [all …]
|
| /hyp/mem/allocator_list/src/ |
| A D | freelist.c | 180 node->next = (*head)->next; in list_add() 373 next->next = (*current)->next; in allocate_from_node() 377 (*previous)->next = next; in allocate_from_node() 387 next->next = (*current)->next; in allocate_from_node() 389 (*current)->next = next; in allocate_from_node() 535 previous->next = remove->next; in list_remove() 653 next = (*head)->next; in deallocate_block() 658 next = next->next; in deallocate_block() 682 freed_node->next = next->next; in deallocate_block() 705 freed_node->next = next; in deallocate_block() [all …]
|
| /hyp/interfaces/util/include/ |
| A D | list.h | 63 for ((node) = atomic_load_relaxed(&(list)->head.next); \ 65 (node) = atomic_load_relaxed(&(node)->next)) 68 list_node_t *n = atomic_load_relaxed(&list->head.next); \ 72 n = atomic_load_relaxed(&n->next), \ 83 list_node_t *n = load(&list->head.next); \ 86 n = load(&n->next); \ 90 n = load(&n->next)) 96 util_cpp_unique_ident(next), \ 106 util_cpp_unique_ident(next), \
|
| /hyp/core/task_queue/src/ |
| A D | task_queue.c | 95 task_queue_entry_t *next = task_queue_entry_bf_get_next(&entry->bf); in task_queue_cancel() local 98 task_queue_entry_bf_set_next(&prev->bf, next); in task_queue_cancel() 99 task_queue_entry_bf_set_prev(&next->bf, prev); in task_queue_cancel() 132 task_queue_entry_t *next = in task_queue_handle_ipi_received() local 134 task_queue_entry_bf_set_next(&head->bf, next); in task_queue_handle_ipi_received() 135 task_queue_entry_bf_set_prev(&next->bf, head); in task_queue_handle_ipi_received()
|
| /hyp/mem/memdb_gpt/src/ |
| A D | memdb.c | 164 *next = entry_dst.next; in atomic_entry_read() 263 uintptr_t next; in unlock_levels() local 276 assert(curr_entry.next != entry.next); in unlock_levels() 285 if (cmp_entry.next == curr_entry.next) { in unlock_levels() 1488 &next); in find_common_level() 1693 uintptr_t next; in memdb_insert() local 1708 &next); in memdb_insert() 1939 &next); in memdb_lookup() 2021 &next); in memdb_do_walk() 2139 &next); in memdb_range_walk() [all …]
|
| /hyp/core/scheduler_fprr/src/ |
| A D | scheduler_fprr.c | 558 thread_t *next = target; in select_yield_target() local 568 next = yield_to; in select_yield_target() 577 return next; in select_yield_target() 903 assert(next != thread_get_self()); in scheduler_fprr_handle_thread_context_switch_pre() 908 scheduler_lock_nopreempt(next); in scheduler_fprr_handle_thread_context_switch_pre() 909 cpu_index_t affinity = next->scheduler_affinity; in scheduler_fprr_handle_thread_context_switch_pre() 916 (can_be_scheduled(next) || (next == idle_thread())); in scheduler_fprr_handle_thread_context_switch_pre() 923 assert(!sched_state_get_exited(&next->scheduler_state)); in scheduler_fprr_handle_thread_context_switch_pre() 924 sched_state_set_running(&next->scheduler_state, true); in scheduler_fprr_handle_thread_context_switch_pre() 925 CPULOCAL(running_thread) = next; in scheduler_fprr_handle_thread_context_switch_pre() [all …]
|
| /hyp/core/timer_null/src/ |
| A D | timer_queue_null.c | 48 timer->next = NULL; in timer_init_object()
|
| /hyp/core/rcu_bitmap/src/ |
| A D | rcu_bitmap.c | 112 rcu_entry->next = batch->heads[rcu_update_class]; in rcu_enqueue() 493 rcu_entry_t *next = entry->next; in rcu_bitmap_update() local 499 entry = next; in rcu_bitmap_update()
|
| /hyp/vm/arm_vm_amu/aarch64/src/ |
| A D | arm_vm_amu.c | 108 arm_vm_amu_handle_thread_context_switch_pre(thread_t *next) in arm_vm_amu_handle_thread_context_switch_pre() argument 115 if (compiler_unexpected((next->kind == THREAD_KIND_VCPU) && in arm_vm_amu_handle_thread_context_switch_pre() 117 &next->vcpu_options)))) { in arm_vm_amu_handle_thread_context_switch_pre()
|
| /hyp/misc/log_standard/src/ |
| A D | string_util.c | 559 const char *next = NULL; in check_align() local 561 next = fmt + 1; in check_align() 565 if ((next != NULL) && (strnidx(stopper, len, *next) < len)) { in check_align()
|
| /hyp/mem/memdb/tests/ |
| A D | test.c | 61 void *next = &tmp_entry.next; in print_level() local 67 print_level(next); in print_level() 82 uintptr_t * next = &tmp_root.next; in print_memdb() 87 if (next == NULL) { in print_memdb() 98 memdb_level_t *level = (memdb_level_t *)next; in print_memdb()
|
| /hyp/mem/memdb/src/ |
| A D | memdb_tests.c | 1347 *dummy_heap = (allocator_node_t){ .size = dummy_size, .next = NULL }; in memdb_test9()
|
Completed in 62 milliseconds