1 /*
2  * Copyright (c) 2019 Carlo Caione <ccaione@baylibre.com>
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 /**
7  * @file
8  * @brief Private kernel definitions (ARM)
9  *
10  * This file contains private kernel function definitions and various
11  * other definitions for the 32-bit ARM Cortex-M processor architecture
12  * family.
13  *
14  * This file is also included by assembly language files which must #define
15  * _ASMLANGUAGE before including this header file.  Note that kernel
16  * assembly source files obtains structure offset values via "absolute symbols"
17  * in the offsets.o module.
18  */
19 
20 #ifndef ZEPHYR_ARCH_ARM_INCLUDE_CORTEX_M_KERNEL_ARCH_FUNC_H_
21 #define ZEPHYR_ARCH_ARM_INCLUDE_CORTEX_M_KERNEL_ARCH_FUNC_H_
22 
23 #include <zephyr/platform/hooks.h>
24 
25 #ifdef __cplusplus
26 extern "C" {
27 #endif
28 
29 #ifndef _ASMLANGUAGE
30 extern void z_arm_fault_init(void);
31 extern void z_arm_cpu_idle_init(void);
32 #ifdef CONFIG_ARM_MPU
33 extern void z_arm_configure_static_mpu_regions(void);
34 extern void z_arm_configure_dynamic_mpu_regions(struct k_thread *thread);
35 extern int z_arm_mpu_init(void);
36 #endif /* CONFIG_ARM_MPU */
37 #ifdef CONFIG_ARM_AARCH32_MMU
38 extern int z_arm_mmu_init(void);
39 #endif /* CONFIG_ARM_AARCH32_MMU */
40 
arch_kernel_init(void)41 static ALWAYS_INLINE void arch_kernel_init(void)
42 {
43 	z_arm_interrupt_stack_setup();
44 	z_arm_exc_setup();
45 	z_arm_fault_init();
46 	z_arm_cpu_idle_init();
47 	z_arm_clear_faults();
48 #if defined(CONFIG_ARM_MPU)
49 	z_arm_mpu_init();
50 	/* Configure static memory map. This will program MPU regions,
51 	 * to set up access permissions for fixed memory sections, such
52 	 * as Application Memory or No-Cacheable SRAM area.
53 	 *
54 	 * This function is invoked once, upon system initialization.
55 	 */
56 	z_arm_configure_static_mpu_regions();
57 #endif /* CONFIG_ARM_MPU */
58 
59 #ifdef CONFIG_SOC_PER_CORE_INIT_HOOK
60 	soc_per_core_init_hook();
61 #endif /* CONFIG_SOC_PER_CORE_INIT_HOOK */
62 }
63 
arch_thread_return_value_set(struct k_thread * thread,unsigned int value)64 static ALWAYS_INLINE void arch_thread_return_value_set(struct k_thread *thread, unsigned int value)
65 {
66 	thread->arch.swap_return_value = value;
67 }
68 
69 #if !defined(CONFIG_MULTITHREADING)
70 extern FUNC_NORETURN void z_arm_switch_to_main_no_multithreading(k_thread_entry_t main_func,
71 								 void *p1, void *p2, void *p3);
72 
73 #define ARCH_SWITCH_TO_MAIN_NO_MULTITHREADING z_arm_switch_to_main_no_multithreading
74 
75 #endif /* !CONFIG_MULTITHREADING */
76 
77 extern FUNC_NORETURN void z_arm_userspace_enter(k_thread_entry_t user_entry, void *p1, void *p2,
78 						void *p3, uint32_t stack_end, uint32_t stack_start);
79 
80 extern void z_arm_fatal_error(unsigned int reason, const struct arch_esf *esf);
81 
arch_swap(unsigned int key)82 static ALWAYS_INLINE int arch_swap(unsigned int key)
83 {
84 	/* store off key and return value */
85 	_current->arch.basepri = key;
86 	_current->arch.swap_return_value = -EAGAIN;
87 
88 	/* set pending bit to make sure we will take a PendSV exception */
89 	SCB->ICSR |= SCB_ICSR_PENDSVSET_Msk;
90 
91 	/* clear mask or enable all irqs to take a pendsv */
92 	irq_unlock(0);
93 
94 	/* Context switch is performed here. Returning implies the
95 	 * thread has been context-switched-in again.
96 	 */
97 	return _current->arch.swap_return_value;
98 }
99 
100 #endif /* _ASMLANGUAGE */
101 
102 #ifdef __cplusplus
103 }
104 #endif
105 
106 #endif /* ZEPHYR_ARCH_ARM_INCLUDE_CORTEX_M_KERNEL_ARCH_FUNC_H_ */
107