| /arch/riscv/core/ |
| A D | ipi_clint.c | 26 unsigned int id = _current_cpu->id; in arch_sched_directed_ipi() local 30 if ((i != id) && _kernel.cpus[i].arch.online && ((cpu_bitmap & BIT(i)) != 0)) { in arch_sched_directed_ipi() 53 atomic_val_t pending_ipi = atomic_clear(&cpu_pending_ipi[_current_cpu->id]); in sched_ipi_handler() 82 atomic_val_t *pending_ipi = &cpu_pending_ipi[_current_cpu->id]; in arch_spin_relax()
|
| A D | fpu.c | 38 buf[3] = '0' + _current_cpu->id; in DBG() 150 if (i == _current_cpu->id) { in flush_owned_fpu()
|
| A D | coredump.c | 74 .id = COREDUMP_ARCH_HDR_ID, in arch_coredump_info_dump()
|
| A D | thread.c | 235 _kernel.cpus[0].id = 0; in z_riscv_switch_to_main_no_multithreading()
|
| /arch/arc/core/ |
| A D | thread.c | 299 _kernel.cpus[0].id = 0; in z_arc_switch_to_main_no_multithreading() 342 unsigned int id; in arc_vpx_lock() local 346 id = _current_cpu->id; in arc_vpx_lock() 348 __ASSERT(!arch_is_in_isr() && (_current->base.cpu_mask == BIT(id)), ""); in arc_vpx_lock() 357 return k_sem_take(&vpx_sem[id], timeout); in arc_vpx_lock() 363 unsigned int id; in arc_vpx_unlock() local 369 id = _current_cpu->id; in arc_vpx_unlock() 378 k_sem_give(&vpx_sem[id]); in arc_vpx_unlock() 381 void arc_vpx_unlock_force(unsigned int id) in arc_vpx_unlock_force() argument 383 __ASSERT(id < CONFIG_MP_MAX_NUM_CPUS, ""); in arc_vpx_unlock_force() [all …]
|
| A D | irq_manage.c | 158 #define IRQ_NUM_TO_IDU_NUM(id) ((id) - ARC_CONNECT_IDU_IRQ_START) argument 159 #define IRQ_IS_COMMON(id) ((id) >= ARC_CONNECT_IDU_IRQ_START) argument
|
| A D | irq_offload.c | 28 #define CURR_CPU (IS_ENABLED(CONFIG_SMP) ? arch_curr_cpu()->id : 0)
|
| /arch/arm/core/mmu/ |
| A D | arm_mmu_priv.h | 89 uint32_t id : 2; /* [00] */ member 105 uint32_t id : 2; /* [00] */ member 114 uint32_t id : 2; /* [00] */ member 126 uint32_t id : 2; /* [00] */ member 137 uint32_t id : 2; /* [00] */ member 150 uint32_t id : 2; /* [00] */ member
|
| A D | arm_mmu.c | 414 __ASSERT(l1_page_table.entries[l1_index].undefined.id == ARM_MMU_PTE_ID_INVALID, in arm_mmu_l1_map_section() 416 l1_page_table.entries[l1_index].undefined.id, in arm_mmu_l1_map_section() 419 l1_page_table.entries[l1_index].l1_section_1m.id = in arm_mmu_l1_map_section() 487 perms_attrs.id_mask = (l1_page_table.entries[l1_index].l1_section_1m.id == in arm_mmu_remap_l1_section_to_l2_table() 521 l1_page_table.entries[l1_index].l2_page_table_ref.id = ARM_MMU_PTE_ID_L2_PT; in arm_mmu_remap_l1_section_to_l2_table() 572 if (l1_page_table.entries[l1_index].undefined.id == ARM_MMU_PTE_ID_INVALID || in arm_mmu_l2_map_page() 602 l1_page_table.entries[l1_index].l2_page_table_ref.id = ARM_MMU_PTE_ID_L2_PT; in arm_mmu_l2_map_page() 612 } else if (l1_page_table.entries[l1_index].undefined.id == ARM_MMU_PTE_ID_L2_PT) { in arm_mmu_l2_map_page() 656 l2_page_table->entries[l2_index].l2_page_4k.id = in arm_mmu_l2_map_page() 688 if (l1_page_table.entries[l1_index].undefined.id != ARM_MMU_PTE_ID_L2_PT) { in arm_mmu_l2_unmap_page() [all …]
|
| /arch/x86/core/intel64/ |
| A D | irq_offload.c | 26 uint8_t cpu_id = _current_cpu->id; in dispatcher() 36 uint8_t cpu_id = _current_cpu->id; in arch_irq_offload()
|
| A D | coredump.c | 53 .id = COREDUMP_ARCH_HDR_ID, in arch_coredump_info_dump()
|
| /arch/arm64/core/ |
| A D | macro_priv.inc | 15 * Get CPU id 25 * Get CPU logic id by looking up cpu_node_list 28 * xreg1: logic id (0 ~ DT_CHILD_NUM_STATUS_OKAY(DT_PATH(cpus)) - 1)
|
| A D | fpu.c | 37 buf[3] = '0' + _current_cpu->id; in DBG() 109 if (i == _current_cpu->id) { in flush_owned_fpu()
|
| A D | coredump.c | 58 .id = COREDUMP_ARCH_HDR_ID, in arch_coredump_info_dump()
|
| /arch/arm/core/cortex_a_r/ |
| A D | macro_priv.inc | 12 * Get CPU id 22 * Get CPU logic id by looking up cpu_node_list 25 * reg1: logic id (0 ~ CONFIG_MP_MAX_NUM_CPUS - 1)
|
| /arch/xtensa/core/ |
| A D | irq_offload.c | 19 uint8_t cpu_id = _current_cpu->id; in irq_offload_isr() 29 uint8_t cpu_id = _current_cpu->id; in arch_irq_offload()
|
| A D | vector_handlers.c | 74 cpu_id = arch_curr_cpu()->id; in xtensa_is_outside_stack_bounds() 246 arch_curr_cpu()->id, cause, in print_fatal_exception() 600 arch_sched_directed_ipi(BIT(cpu->id)); in xtensa_excint1_c()
|
| /arch/x86/core/ia32/ |
| A D | intstub.S | 418 .macro __INT_STUB_NUM id 419 z_dynamic_irq_stub_\id: 421 .macro INT_STUB_NUM id 422 __INT_STUB_NUM %id
|
| A D | coredump.c | 41 .id = COREDUMP_ARCH_HDR_ID, in arch_coredump_info_dump()
|
| /arch/x86/core/ |
| A D | pcie.c | 294 union acpi_dmar_id id; in arch_pcie_msi_vector_connect() local 300 id.bits.bus = PCIE_BDF_TO_BUS(vector->bdf); in arch_pcie_msi_vector_connect() 301 id.bits.device = PCIE_BDF_TO_DEV(vector->bdf); in arch_pcie_msi_vector_connect() 302 id.bits.function = PCIE_BDF_TO_FUNC(vector->bdf); in arch_pcie_msi_vector_connect() 305 flags, id.raw); in arch_pcie_msi_vector_connect()
|
| /arch/arm/core/ |
| A D | irq_offload.c | 39 : [id] "i" (_SVC_CALL_IRQ_OFFLOAD) in arch_irq_offload()
|
| /arch/arm64/core/cortex_r/ |
| A D | arm_mpu.c | 339 (void)flush_dynamic_regions_to_mpu(sys_dyn_regions[arch_curr_cpu()->id], in z_arm64_mm_init() 340 sys_dyn_regions_num[arch_curr_cpu()->id]); in z_arm64_mm_init() 453 int num = sys_dyn_regions_num[arch_curr_cpu()->id]; in dup_dynamic_regions() 461 dst[i] = sys_dyn_regions[arch_curr_cpu()->id][i]; in dup_dynamic_regions() 834 int cpuid = arch_curr_cpu()->id; in z_arm64_swap_mem_domains()
|
| /arch/arm/core/cortex_m/ |
| A D | coredump.c | 48 .id = COREDUMP_ARCH_HDR_ID, in arch_coredump_info_dump()
|
| /arch/arc/ |
| A D | CMakeLists.txt | 10 # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63691
|
| /arch/xtensa/include/ |
| A D | kernel_arch_func.h | 137 int32_t curr_cpu = _current_cpu->id; in arch_cohere_stacks()
|