Home
last modified time | relevance | path

Searched refs:thread (Results 1 – 25 of 41) sorted by relevance

12

/kernel/
A Dsched.c63 ARG_UNUSED(thread); in thread_runq()
81 _priq_run_add(thread_runq(thread), thread); in runq_add()
88 _priq_run_remove(thread_runq(thread), thread); in runq_remove()
113 runq_add(thread); in queue_thread()
189 thread = mirqp; in next_up()
206 return (thread != NULL) ? thread : _current_cpu->idle_thread; in next_up()
251 return thread; in next_up()
347 if (!z_is_thread_queued(thread) && z_is_thread_ready(thread)) { in ready_thread()
654 return thread; in z_unpend1_no_timeout()
944 for (thread = z_waitq_head(wait_q); thread != NULL; thread = z_waitq_head(wait_q)) { in z_unpend_all()
[all …]
A Dcpu_mask.c30 thread->base.cpu_mask |= enable_mask; in cpu_mask_mod()
31 thread->base.cpu_mask &= ~disable_mask; in cpu_mask_mod()
38 int m = thread->base.cpu_mask; in cpu_mask_mod()
47 int k_thread_cpu_mask_clear(k_tid_t thread) in k_thread_cpu_mask_clear() argument
49 return cpu_mask_mod(thread, 0, 0xffffffff); in k_thread_cpu_mask_clear()
52 int k_thread_cpu_mask_enable_all(k_tid_t thread) in k_thread_cpu_mask_enable_all() argument
54 return cpu_mask_mod(thread, 0xffffffff, 0); in k_thread_cpu_mask_enable_all()
59 return cpu_mask_mod(thread, BIT(cpu), 0); in k_thread_cpu_mask_enable()
64 return cpu_mask_mod(thread, 0, BIT(cpu)); in k_thread_cpu_mask_disable()
67 int k_thread_cpu_pin(k_tid_t thread, int cpu) in k_thread_cpu_pin() argument
[all …]
A Dfloat.c11 int z_impl_k_float_disable(struct k_thread *thread) in z_impl_k_float_disable() argument
14 return arch_float_disable(thread); in z_impl_k_float_disable()
16 ARG_UNUSED(thread); in z_impl_k_float_disable()
21 int z_impl_k_float_enable(struct k_thread *thread, unsigned int options) in z_impl_k_float_enable() argument
24 return arch_float_enable(thread, options); in z_impl_k_float_enable()
26 ARG_UNUSED(thread); in z_impl_k_float_enable()
33 static inline int z_vrfy_k_float_disable(struct k_thread *thread) in z_vrfy_k_float_disable() argument
35 K_OOPS(K_SYSCALL_OBJ(thread, K_OBJ_THREAD)); in z_vrfy_k_float_disable()
36 return z_impl_k_float_disable(thread); in z_vrfy_k_float_disable()
42 K_OOPS(K_SYSCALL_OBJ(thread, K_OBJ_THREAD)); in z_vrfy_k_float_enable()
[all …]
A Dusage.c68 if (thread->base.usage.longest < thread->base.usage.current) { in sched_thread_update_usage()
69 thread->base.usage.longest = thread->base.usage.current; in sched_thread_update_usage()
85 thread->base.usage.current = 0; in z_sched_usage_start()
183 if (thread == cpu->current) { in z_sched_thread_usage()
232 CHECKIF(thread == NULL) { in k_thread_runtime_stats_enable()
253 CHECKIF(thread == NULL) { in k_thread_runtime_stats_disable()
263 if (thread == cpu->current) { in k_thread_runtime_stats_disable()
360 struct k_thread *thread; in z_thread_stats_query() local
373 struct k_thread *thread; in z_thread_stats_reset() local
418 struct k_thread *thread; in z_thread_stats_disable() local
[all …]
A Dtimeslicing.c30 if (thread->base.slice_ticks != 0) { in slice_time()
31 ret = thread->base.slice_ticks; in slice_time()
34 ARG_UNUSED(thread); in slice_time()
41 bool ret = thread_is_preemptible(thread) in thread_is_sliceable()
42 && slice_time(thread) != 0 in thread_is_sliceable()
45 && !z_is_idle_thread_object(thread); in thread_is_sliceable()
48 ret |= thread->base.slice_ticks != 0; in thread_is_sliceable()
74 if (thread_is_sliceable(thread)) { in z_reset_time_slice()
95 thread->base.slice_expired = expired; in k_thread_time_slice_set()
96 thread->base.slice_data = data; in k_thread_time_slice_set()
[all …]
A Dthread.c126 return thread->base.prio; in z_impl_k_thread_priority_get()
141 if (thread == NULL) { in z_impl_k_thread_name_set()
142 thread = _current; in z_impl_k_thread_name_set()
156 ARG_UNUSED(thread); in z_impl_k_thread_name_set()
171 if (thread != NULL) { in z_vrfy_k_thread_name_set()
198 ARG_UNUSED(thread); in k_thread_name_get()
209 ARG_UNUSED(thread); in z_impl_k_thread_name_copy()
301 len = strlen(thread->name); in z_vrfy_k_thread_name_copy()
308 ARG_UNUSED(thread); in z_vrfy_k_thread_name_copy()
1068 ARG_UNUSED(thread); in do_thread_cleanup()
[all …]
A Dthread_monitor.c15 void z_thread_monitor_exit(struct k_thread *thread) in z_thread_monitor_exit() argument
19 if (thread == _kernel.threads) { in z_thread_monitor_exit()
26 (thread != prev_thread->next_thread)) { in z_thread_monitor_exit()
30 prev_thread->next_thread = thread->next_thread; in z_thread_monitor_exit()
43 struct k_thread *thread; in thread_foreach_helper() local
54 for (thread = _kernel.threads; thread; thread = thread->next_thread) { in thread_foreach_helper()
57 bool on_cpu = (thread->base.cpu == cpu); in thread_foreach_helper()
67 user_cb(thread, user_data); in thread_foreach_helper()
70 user_cb(thread, user_data); in thread_foreach_helper()
A Devents.c126 event_data->head = thread; in event_walk_op()
137 struct k_thread *thread; in k_event_post_internal() local
165 thread = data.head; in k_event_post_internal()
169 thread->events = events; in k_event_post_internal()
170 next = thread->next_event_link; in k_event_post_internal()
172 thread = next; in k_event_post_internal()
173 } while (thread != NULL); in k_event_post_internal()
247 struct k_thread *thread; in k_event_wait_internal() local
290 thread->events = events; in k_event_wait_internal()
291 thread->event_options = options; in k_event_wait_internal()
[all …]
A Dmem_domain.c260 k_tid_t thread) in add_thread_locked() argument
265 __ASSERT_NO_MSG(thread != NULL); in add_thread_locked()
269 &thread->mem_domain_info.mem_domain_q_node); in add_thread_locked()
270 thread->mem_domain_info.mem_domain = domain; in add_thread_locked()
273 ret = arch_mem_domain_thread_add(thread); in add_thread_locked()
283 __ASSERT_NO_MSG(thread != NULL); in remove_thread_locked()
285 thread, thread->mem_domain_info.mem_domain); in remove_thread_locked()
289 ret = arch_mem_domain_thread_remove(thread); in remove_thread_locked()
316 ret = remove_thread_locked(thread); in z_mem_domain_exit_thread()
330 ret = remove_thread_locked(thread); in k_mem_domain_add_thread()
[all …]
A Dfatal.c49 static const char *thread_name_get(struct k_thread *thread) in thread_name_get() argument
51 const char *thread_name = (thread != NULL) ? k_thread_name_get(thread) : NULL; in thread_name_get()
92 struct k_thread *thread = IS_ENABLED(CONFIG_MULTITHREADING) ? in z_fatal_error() local
114 LOG_ERR("Current thread: %p (%s)", thread, thread_name_get(thread)); in z_fatal_error()
117 coredump(reason, esf, thread); in z_fatal_error()
177 k_thread_abort(thread); in z_fatal_error()
A Dsem.c98 struct k_thread *thread; in z_impl_k_sem_give() local
103 thread = z_unpend_first_thread(&sem->wait_q); in z_impl_k_sem_give()
105 if (unlikely(thread != NULL)) { in z_impl_k_sem_give()
106 arch_thread_return_value_set(thread, 0); in z_impl_k_sem_give()
107 z_ready_thread(thread); in z_impl_k_sem_give()
168 struct k_thread *thread; in z_impl_k_sem_reset() local
173 thread = z_unpend_first_thread(&sem->wait_q); in z_impl_k_sem_reset()
174 if (thread == NULL) { in z_impl_k_sem_reset()
178 arch_thread_return_value_set(thread, -EAGAIN); in z_impl_k_sem_reset()
179 z_ready_thread(thread); in z_impl_k_sem_reset()
A Dfutex.c31 struct k_thread *thread; in z_impl_k_futex_wake() local
42 thread = z_unpend_first_thread(&futex_data->wait_q); in z_impl_k_futex_wake()
43 if (thread != NULL) { in z_impl_k_futex_wake()
45 arch_thread_return_value_set(thread, 0); in z_impl_k_futex_wake()
46 z_ready_thread(thread); in z_impl_k_futex_wake()
48 } while (thread && wake_all); in z_impl_k_futex_wake()
A Dipi.c26 atomic_val_t ipi_mask_create(struct k_thread *thread) in ipi_mask_create() argument
54 executable_on_cpu = ((thread->base.cpu_mask & BIT(i)) != 0); in ipi_mask_create()
59 (((z_sched_prio_cmp(cpu_thread, thread) < 0) && in ipi_mask_create()
61 thread_is_metairq(thread)) && executable_on_cpu) { in ipi_mask_create()
A DKconfig20 since the main thread cannot pend, it being the only thread in the
193 This option allows each thread to store the thread stack info into
425 thread.
428 bool "Store current thread in thread local storage (TLS)"
432 Use thread local storage to store the current thread. This avoids a
505 int "Max length of a thread name"
743 thread structure.
750 allows a thread to send a byte stream to another thread. Pipes can
754 thread structure.
984 per thread.
[all …]
A Duserspace_handler.c57 struct k_thread *thread) in z_vrfy_k_object_access_grant() argument
61 K_OOPS(K_SYSCALL_OBJ_INIT(thread, K_OBJ_THREAD)); in z_vrfy_k_object_access_grant()
65 k_thread_perms_set(ko, thread); in z_vrfy_k_object_access_grant()
A Ddynamic.c105 static void dyn_cb(const struct k_thread *thread, void *user_data) in dyn_cb() argument
109 if (data->stack == (k_thread_stack_t *)thread->stack_info.start) { in dyn_cb()
111 (void *)thread->stack_info.start); in dyn_cb()
112 data->tid = (k_tid_t)thread; in dyn_cb()
A Dcondvar.c50 struct k_thread *thread = z_unpend_first_thread(&condvar->wait_q); in z_impl_k_condvar_signal() local
52 if (unlikely(thread != NULL)) { in z_impl_k_condvar_signal()
55 arch_thread_return_value_set(thread, 0); in z_impl_k_condvar_signal()
56 z_ready_thread(thread); in z_impl_k_condvar_signal()
/kernel/include/
A Dkthread.h40 #define z_thread_monitor_exit(thread) \ argument
50 k_thread_start(thread); in thread_schedule_new()
52 z_add_thread_timeout(thread, delay); in thread_schedule_new()
56 k_thread_start(thread); in thread_schedule_new()
73 ARG_UNUSED(thread); in thread_is_metairq()
98 uint8_t state = thread->base.thread_state; in z_is_thread_prevented_from_running()
126 thread->base.thread_state |= _THREAD_QUEUED; in z_mark_thread_as_queued()
179 thread->base.user_options |= K_ESSENTIAL; in z_thread_essential_set()
189 thread->base.user_options &= ~K_ESSENTIAL; in z_thread_essential_clear()
250 return thread->base.is_idle; in z_is_idle_thread_object()
[all …]
A Dksched.h145 return thread->base.pended_on; in pended_on_thread()
151 _priq_wait_remove(&pended_on_thread(thread)->waitq, thread); in unpend_thread_no_timeout()
152 z_mark_thread_as_not_pending(thread); in unpend_thread_no_timeout()
153 thread->base.pended_on = NULL; in unpend_thread_no_timeout()
164 struct k_thread *thread = NULL; in z_unpend_first_thread() local
171 if (unlikely(thread != NULL)) { in z_unpend_first_thread()
172 unpend_thread_no_timeout(thread); in z_unpend_first_thread()
173 z_abort_thread_timeout(thread); in z_unpend_first_thread()
177 return thread; in z_unpend_first_thread()
333 ARG_UNUSED(thread); in z_sched_usage_switch()
[all …]
A Dpriority_q.h111 if (z_sched_prio_cmp(thread, t) > 0) { in z_priq_simple_add()
124 sys_dlist_remove(&thread->base.qnode_dlist); in z_priq_simple_remove()
160 struct k_thread *thread = NULL; in z_priq_simple_best() local
166 return thread; in z_priq_simple_best()
175 struct k_thread *thread; in z_priq_simple_mask_best() local
179 return thread; in z_priq_simple_mask_best()
202 thread->base.order_key = pq->next_order_key; in z_priq_rb_add()
240 struct k_thread *thread = NULL; in z_priq_rb_best() local
246 return thread; in z_priq_rb_best()
293 struct k_thread *thread) in z_priq_mq_add() argument
[all …]
A Dtimeout_q.h59 static inline k_ticks_t z_add_thread_timeout(struct k_thread *thread, k_timeout_t ticks) in z_add_thread_timeout() argument
61 return z_add_timeout(&thread->base.timeout, z_thread_timeout, ticks); in z_add_thread_timeout()
64 static inline void z_abort_thread_timeout(struct k_thread *thread) in z_abort_thread_timeout() argument
66 z_abort_timeout(&thread->base.timeout); in z_abort_thread_timeout()
69 static inline bool z_is_aborted_thread_timeout(struct k_thread *thread) in z_is_aborted_thread_timeout() argument
72 return z_is_aborted_timeout(&thread->base.timeout); in z_is_aborted_thread_timeout()
90 static inline k_ticks_t z_add_thread_timeout(struct k_thread *thread, k_timeout_t ticks)
92 ARG_UNUSED(thread);
A Dkernel_arch_interface.h76 void arch_new_thread(struct k_thread *thread, k_thread_stack_t *stack,
160 arch_thread_return_value_set(struct k_thread *thread, unsigned int value);
201 int arch_float_disable(struct k_thread *thread);
221 int arch_float_enable(struct k_thread *thread, unsigned int options);
230 int arch_coprocessors_disable(struct k_thread *thread);
256 int arch_thread_priv_stack_space_get(const struct k_thread *thread, size_t *stack_size,
622 int arch_thread_name_set(struct k_thread *thread, const char *str);
661 uintptr_t arch_coredump_stack_ptr_get(const struct k_thread *thread);
672 void arch_coredump_priv_stack_dump(struct k_thread *thread);
A Dkernel_internal.h116 arch_thread_return_value_set(struct k_thread *thread, unsigned int value) in arch_thread_return_value_set() argument
118 thread->swap_retval = value; in arch_thread_return_value_set()
123 z_thread_return_value_set_with_data(struct k_thread *thread, in z_thread_return_value_set_with_data() argument
127 arch_thread_return_value_set(thread, value); in z_thread_return_value_set_with_data()
128 thread->base.swap_data = data; in z_thread_return_value_set_with_data()
165 void z_mem_domain_init_thread(struct k_thread *thread);
168 void z_mem_domain_exit_thread(struct k_thread *thread);
292 void k_thread_abort_cleanup(struct k_thread *thread);
304 void k_thread_abort_cleanup_check_reuse(struct k_thread *thread);
A Dkswap.h29 void z_smp_release_global_lock(struct k_thread *thread);
53 static inline void z_sched_switch_spin(struct k_thread *thread) in z_sched_switch_spin() argument
56 volatile void **shp = (void *)&thread->switch_handle; in z_sched_switch_spin()
197 static inline void z_sched_switch_spin(struct k_thread *thread) in z_sched_switch_spin() argument
199 ARG_UNUSED(thread); in z_sched_switch_spin()
/kernel/paging/
A Dstatistics.c112 void z_impl_k_mem_paging_thread_stats_get(struct k_thread *thread, in z_impl_k_mem_paging_thread_stats_get() argument
115 if ((thread == NULL) || (stats == NULL)) { in z_impl_k_mem_paging_thread_stats_get()
120 memcpy(stats, &thread->paging_stats, sizeof(thread->paging_stats)); in z_impl_k_mem_paging_thread_stats_get()
125 void z_vrfy_k_mem_paging_thread_stats_get(struct k_thread *thread, in z_vrfy_k_mem_paging_thread_stats_get() argument
128 K_OOPS(K_SYSCALL_OBJ(thread, K_OBJ_THREAD)); in z_vrfy_k_mem_paging_thread_stats_get()
130 z_impl_k_mem_paging_thread_stats_get(thread, stats); in z_vrfy_k_mem_paging_thread_stats_get()

Completed in 180 milliseconds

12