Searched refs:mmfr0 (Results 1 – 9 of 9) sorted by relevance
| /linux/arch/arm64/include/asm/ |
| A D | cpufeature.h | 678 u64 mmfr0; in system_supports_4kb_granule() local 682 val = cpuid_feature_extract_unsigned_field(mmfr0, in system_supports_4kb_granule() 691 u64 mmfr0; in system_supports_64kb_granule() local 695 val = cpuid_feature_extract_unsigned_field(mmfr0, in system_supports_64kb_granule() 704 u64 mmfr0; in system_supports_16kb_granule() local 722 u64 mmfr0; in system_supports_mixed_endian() local 1026 u64 mmfr0; in cpu_has_lpa2() local 1029 mmfr0 = read_sysreg(id_aa64mmfr0_el1); in cpu_has_lpa2() 1030 mmfr0 &= ~id_aa64mmfr0_override.mask; in cpu_has_lpa2() 1031 mmfr0 |= id_aa64mmfr0_override.val; in cpu_has_lpa2() [all …]
|
| A D | kvm_pgtable.h | 40 static inline u64 kvm_get_parange(u64 mmfr0) in kvm_get_parange() argument 43 u64 parange = cpuid_feature_extract_unsigned_field(mmfr0, in kvm_get_parange() 503 u64 kvm_get_vtcr(u64 mmfr0, u64 mmfr1, u32 phys_shift);
|
| /linux/arch/arm64/kvm/ |
| A D | reset.c | 285 u64 mmfr0; in kvm_set_ipa_limit() local 287 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in kvm_set_ipa_limit() 288 parange = cpuid_feature_extract_unsigned_field(mmfr0, in kvm_set_ipa_limit() 302 switch (cpuid_feature_extract_unsigned_field(mmfr0, ID_AA64MMFR0_EL1_TGRAN_2_SHIFT)) { in kvm_set_ipa_limit()
|
| A D | mmu.c | 869 u64 mmfr0, mmfr1; in kvm_init_ipa_range() local 891 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in kvm_init_ipa_range() 893 mmu->vtcr = kvm_get_vtcr(mmfr0, mmfr1, phys_shift); in kvm_init_ipa_range()
|
| A D | arm.c | 2013 u64 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in cpu_prepare_hyp_mode() local 2037 tcr |= FIELD_PREP(TCR_EL2_PS_MASK, kvm_get_parange(mmfr0)); in cpu_prepare_hyp_mode()
|
| /linux/arch/arm/kernel/ |
| A D | setup.c | 254 unsigned int mmfr0 = read_cpuid_ext(CPUID_EXT_MMFR0); in __get_cpu_architecture() local 255 if ((mmfr0 & 0x0000000f) >= 0x00000003 || in __get_cpu_architecture() 256 (mmfr0 & 0x000000f0) >= 0x00000030) in __get_cpu_architecture() 258 else if ((mmfr0 & 0x0000000f) == 0x00000002 || in __get_cpu_architecture() 259 (mmfr0 & 0x000000f0) == 0x00000020) in __get_cpu_architecture()
|
| /linux/arch/arm64/mm/ |
| A D | init.c | 282 u64 mmfr0 = read_cpuid(ID_AA64MMFR0_EL1); in arm64_memblock_init() local 284 mmfr0, ID_AA64MMFR0_EL1_PARANGE_SHIFT); in arm64_memblock_init()
|
| /linux/arch/arm64/kernel/ |
| A D | cpufeature.c | 1837 static bool has_lpa2_at_stage1(u64 mmfr0) in has_lpa2_at_stage1() argument 1841 tgran = cpuid_feature_extract_unsigned_field(mmfr0, in has_lpa2_at_stage1() 1846 static bool has_lpa2_at_stage2(u64 mmfr0) in has_lpa2_at_stage2() argument 1850 tgran = cpuid_feature_extract_unsigned_field(mmfr0, in has_lpa2_at_stage2() 1857 u64 mmfr0; in has_lpa2() local 1859 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in has_lpa2() 1860 return has_lpa2_at_stage1(mmfr0) && has_lpa2_at_stage2(mmfr0); in has_lpa2() 3370 u64 safe_mmfr1, mmfr0, mmfr1; in verify_hyp_capabilities() local 3378 mmfr0 = read_cpuid(ID_AA64MMFR0_EL1); in verify_hyp_capabilities() 3390 parange = cpuid_feature_extract_unsigned_field(mmfr0, in verify_hyp_capabilities()
|
| /linux/arch/arm64/kvm/hyp/ |
| A D | pgtable.c | 592 u64 kvm_get_vtcr(u64 mmfr0, u64 mmfr1, u32 phys_shift) in kvm_get_vtcr() argument 597 vtcr |= kvm_get_parange(mmfr0) << VTCR_EL2_PS_SHIFT; in kvm_get_vtcr()
|
Completed in 32 milliseconds