Lines Matching refs:current_thread

86         _curthr = _percpu->current_thread; \
97 _curthr = _percpu->current_thread; \
441 pcpu->current_thread = RT_NULL; in rt_system_scheduler_init()
609 rt_thread_t current_thread) in _prepare_context_switch_locked() argument
621 RT_SCHED_CTX(current_thread).oncpu = RT_CPU_DETACHED; in _prepare_context_switch_locked()
624 if ((RT_SCHED_CTX(current_thread).stat & RT_THREAD_STAT_MASK) == RT_THREAD_RUNNING) in _prepare_context_switch_locked()
627 if (RT_SCHED_CTX(current_thread).bind_cpu == RT_CPUS_NR in _prepare_context_switch_locked()
628 || RT_SCHED_CTX(current_thread).bind_cpu == cpu_id) in _prepare_context_switch_locked()
631 if (RT_SCHED_PRIV(current_thread).current_priority < highest_ready_priority) in _prepare_context_switch_locked()
633 to_thread = current_thread; in _prepare_context_switch_locked()
636 else if (RT_SCHED_PRIV(current_thread).current_priority == highest_ready_priority && in _prepare_context_switch_locked()
637 (RT_SCHED_CTX(current_thread).stat & RT_THREAD_STAT_YIELD_MASK) == 0) in _prepare_context_switch_locked()
639 to_thread = current_thread; in _prepare_context_switch_locked()
644 _sched_insert_thread_locked(current_thread); in _prepare_context_switch_locked()
650 _sched_insert_thread_locked(current_thread); in _prepare_context_switch_locked()
654 RT_SCHED_CTX(current_thread).stat &= ~RT_THREAD_STAT_YIELD_MASK; in _prepare_context_switch_locked()
668 if (to_thread != current_thread) in _prepare_context_switch_locked()
672 RT_OBJECT_HOOK_CALL(rt_scheduler_hook, (current_thread, to_thread)); in _prepare_context_switch_locked()
680 RT_OBJECT_HOOK_CALL(rt_scheduler_switch_hook, (current_thread)); in _prepare_context_switch_locked()
706 static void _sched_thread_preprocess_signal(struct rt_thread *current_thread) in _sched_thread_preprocess_signal() argument
709 if (rt_sched_thread_is_suspended(current_thread)) in _sched_thread_preprocess_signal()
712 …if ((RT_SCHED_CTX(current_thread).stat & RT_THREAD_STAT_SIGNAL_MASK) & RT_THREAD_STAT_SIGNAL_PENDI… in _sched_thread_preprocess_signal()
715 rt_thread_wakeup(current_thread); in _sched_thread_preprocess_signal()
717 rt_thread_resume(current_thread); in _sched_thread_preprocess_signal()
737 static void _sched_thread_process_signal(struct rt_thread *current_thread) in _sched_thread_process_signal() argument
743 if (RT_SCHED_CTX(current_thread).stat & RT_THREAD_STAT_SIGNAL_PENDING) in _sched_thread_process_signal()
747 RT_SCHED_CTX(current_thread).stat &= ~RT_THREAD_STAT_SIGNAL_PENDING; in _sched_thread_process_signal()
804 struct rt_thread *current_thread; in rt_sched_unlock_n_resched() local
811 current_thread = pcpu->current_thread; in rt_sched_unlock_n_resched()
813 if (!current_thread) in rt_sched_unlock_n_resched()
817 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_sched_unlock_n_resched()
827 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_sched_unlock_n_resched()
833 SCHED_THREAD_PREPROCESS_SIGNAL_LOCKED(current_thread); in rt_sched_unlock_n_resched()
836 if (RT_SCHED_CTX(current_thread).critical_lock_nest > 1) in rt_sched_unlock_n_resched()
841 SET_CRITICAL_SWITCH_FLAG(pcpu, current_thread); in rt_sched_unlock_n_resched()
844 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_sched_unlock_n_resched()
849 CLR_CRITICAL_SWITCH_FLAG(pcpu, current_thread); in rt_sched_unlock_n_resched()
852 to_thread = _prepare_context_switch_locked(cpu_id, pcpu, current_thread); in rt_sched_unlock_n_resched()
861 RT_NAME_MAX, current_thread->parent.name, current_thread->sp); in rt_sched_unlock_n_resched()
863 rt_hw_context_switch((rt_ubase_t)&current_thread->sp, in rt_sched_unlock_n_resched()
869 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_sched_unlock_n_resched()
877 SCHED_THREAD_PROCESS_SIGNAL(current_thread); in rt_sched_unlock_n_resched()
900 struct rt_thread *current_thread; in rt_schedule() local
910 current_thread = pcpu->current_thread; in rt_schedule()
921 SCHEDULER_ENTER_CRITICAL(current_thread); in rt_schedule()
924 SCHED_THREAD_PREPROCESS_SIGNAL(pcpu, current_thread); in rt_schedule()
927 if (RT_SCHED_CTX(current_thread).critical_lock_nest > 1) in rt_schedule()
929 SET_CRITICAL_SWITCH_FLAG(pcpu, current_thread); in rt_schedule()
931 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_schedule()
938 CLR_CRITICAL_SWITCH_FLAG(pcpu, current_thread); in rt_schedule()
948 to_thread = _prepare_context_switch_locked(cpu_id, pcpu, current_thread); in rt_schedule()
957 RT_NAME_MAX, current_thread->parent.name, current_thread->sp); in rt_schedule()
959 rt_hw_context_switch((rt_ubase_t)&current_thread->sp, in rt_schedule()
966 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_schedule()
974 SCHED_THREAD_PROCESS_SIGNAL(current_thread); in rt_schedule()
995 struct rt_thread *current_thread; in rt_scheduler_do_irq_switch() local
1001 current_thread = pcpu->current_thread; in rt_scheduler_do_irq_switch()
1004 SCHEDULER_ENTER_CRITICAL(current_thread); in rt_scheduler_do_irq_switch()
1006 SCHED_THREAD_PREPROCESS_SIGNAL(pcpu, current_thread); in rt_scheduler_do_irq_switch()
1012 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_scheduler_do_irq_switch()
1018 if (RT_SCHED_CTX(current_thread).critical_lock_nest > 1) in rt_scheduler_do_irq_switch()
1020 SET_CRITICAL_SWITCH_FLAG(pcpu, current_thread); in rt_scheduler_do_irq_switch()
1021 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_scheduler_do_irq_switch()
1026 CLR_CRITICAL_SWITCH_FLAG(pcpu, current_thread); in rt_scheduler_do_irq_switch()
1032 to_thread = _prepare_context_switch_locked(cpu_id, pcpu, current_thread); in rt_scheduler_do_irq_switch()
1040 RT_NAME_MAX, current_thread->parent.name, current_thread->sp); in rt_scheduler_do_irq_switch()
1042 rt_hw_context_switch_interrupt(context, (rt_ubase_t)&current_thread->sp, in rt_scheduler_do_irq_switch()
1049 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_scheduler_do_irq_switch()
1054 SCHEDULER_EXIT_CRITICAL(current_thread); in rt_scheduler_do_irq_switch()
1187 rt_thread_t from_thread = pcpu->current_thread; in rt_sched_post_ctx_switch()
1200 pcpu->current_thread = thread; in rt_sched_post_ctx_switch()
1225 rt_thread_t current_thread = pcpu->current_thread; in rt_exit_critical_safe() local
1226 if (current_thread && !_critical_error_occurred) in rt_exit_critical_safe()
1228 if (critical_level != RT_SCHED_CTX(current_thread).critical_lock_nest) in rt_exit_critical_safe()
1235 __func__, RT_SCHED_CTX(current_thread).critical_lock_nest, in rt_exit_critical_safe()
1295 struct rt_thread *current_thread; in rt_enter_critical() local
1305 current_thread = pcpu->current_thread; in rt_enter_critical()
1308 current_thread = rt_hw_thread_self(); in rt_enter_critical()
1312 if (!current_thread) in rt_enter_critical()
1320 RT_SCHED_CTX(current_thread).critical_lock_nest++; in rt_enter_critical()
1321 critical_level = RT_SCHED_CTX(current_thread).critical_lock_nest; in rt_enter_critical()
1348 struct rt_thread *current_thread; in rt_exit_critical() local
1359 current_thread = pcpu->current_thread; in rt_exit_critical()
1362 current_thread = rt_hw_thread_self(); in rt_exit_critical()
1366 if (!current_thread) in rt_exit_critical()
1373 RT_SCHED_CTX(current_thread).critical_lock_nest--; in rt_exit_critical()
1376 if (RT_SCHED_CTX(current_thread).critical_lock_nest == 0) in rt_exit_critical()
1379 need_resched = IS_CRITICAL_SWITCH_PEND(pcpu, current_thread); in rt_exit_critical()
1380 CLR_CRITICAL_SWITCH_FLAG(pcpu, current_thread); in rt_exit_critical()
1390 RT_ASSERT(RT_SCHED_CTX(current_thread).critical_lock_nest > 0); in rt_exit_critical()
1406 struct rt_thread *current_thread; in rt_critical_level() local
1410 current_thread = rt_cpu_self()->current_thread; in rt_critical_level()
1412 if (current_thread) in rt_critical_level()
1415 critical_lvl = RT_SCHED_CTX(current_thread).critical_lock_nest; in rt_critical_level()