1 /*
2  * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <common/debug.h>
8 #include <common/runtime_svc.h>
9 #include <lib/cpus/errata_report.h>
10 #include <lib/cpus/wa_cve_2017_5715.h>
11 #include <lib/cpus/wa_cve_2018_3639.h>
12 #include <lib/cpus/wa_cve_2022_23960.h>
13 #include <lib/smccc.h>
14 #include <services/arm_arch_svc.h>
15 #include <smccc_helpers.h>
16 #include <plat/common/platform.h>
17 
smccc_version(void)18 static int32_t smccc_version(void)
19 {
20 	return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
21 }
22 
smccc_arch_features(u_register_t arg1)23 static int32_t smccc_arch_features(u_register_t arg1)
24 {
25 	switch (arg1) {
26 	case SMCCC_VERSION:
27 	case SMCCC_ARCH_FEATURES:
28 		return SMC_ARCH_CALL_SUCCESS;
29 	case SMCCC_ARCH_SOC_ID:
30 		return plat_is_smccc_feature_available(arg1);
31 #if WORKAROUND_CVE_2017_5715
32 	case SMCCC_ARCH_WORKAROUND_1:
33 		if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
34 			return 1;
35 		return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
36 #endif
37 
38 #if WORKAROUND_CVE_2018_3639
39 	case SMCCC_ARCH_WORKAROUND_2: {
40 #if DYNAMIC_WORKAROUND_CVE_2018_3639
41 		unsigned long long ssbs;
42 
43 		/*
44 		 * Firmware doesn't have to carry out dynamic workaround if the
45 		 * PE implements architectural Speculation Store Bypass Safe
46 		 * (SSBS) feature.
47 		 */
48 		ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
49 			ID_AA64PFR1_EL1_SSBS_MASK;
50 
51 		/*
52 		 * If architectural SSBS is available on this PE, no firmware
53 		 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
54 		 */
55 		if (ssbs != SSBS_UNAVAILABLE)
56 			return 1;
57 
58 		/*
59 		 * On a platform where at least one CPU requires
60 		 * dynamic mitigation but others are either unaffected
61 		 * or permanently mitigated, report the latter as not
62 		 * needing dynamic mitigation.
63 		 */
64 		if (wa_cve_2018_3639_get_disable_ptr() == NULL)
65 			return 1;
66 		/*
67 		 * If we get here, this CPU requires dynamic mitigation
68 		 * so report it as such.
69 		 */
70 		return 0;
71 #else
72 		/* Either the CPUs are unaffected or permanently mitigated */
73 		return SMC_ARCH_CALL_NOT_REQUIRED;
74 #endif
75 	}
76 #endif
77 
78 #if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
79 	case SMCCC_ARCH_WORKAROUND_3:
80 		/*
81 		 * SMCCC_ARCH_WORKAROUND_3 should also take into account
82 		 * CVE-2017-5715 since this SMC can be used instead of
83 		 * SMCCC_ARCH_WORKAROUND_1.
84 		 */
85 		if ((check_smccc_arch_wa3_applies() == ERRATA_NOT_APPLIES) &&
86 		    (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)) {
87 			return 1;
88 		}
89 		return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
90 #endif
91 
92 	/* Fallthrough */
93 
94 	default:
95 		return SMC_UNK;
96 	}
97 }
98 
99 /* return soc revision or soc version on success otherwise
100  * return invalid parameter */
smccc_arch_id(u_register_t arg1)101 static int32_t smccc_arch_id(u_register_t arg1)
102 {
103 	if (arg1 == SMCCC_GET_SOC_REVISION) {
104 		return plat_get_soc_revision();
105 	}
106 	if (arg1 == SMCCC_GET_SOC_VERSION) {
107 		return plat_get_soc_version();
108 	}
109 	return SMC_ARCH_CALL_INVAL_PARAM;
110 }
111 
112 /*
113  * Top-level Arm Architectural Service SMC handler.
114  */
arm_arch_svc_smc_handler(uint32_t smc_fid,u_register_t x1,u_register_t x2,u_register_t x3,u_register_t x4,void * cookie,void * handle,u_register_t flags)115 static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
116 	u_register_t x1,
117 	u_register_t x2,
118 	u_register_t x3,
119 	u_register_t x4,
120 	void *cookie,
121 	void *handle,
122 	u_register_t flags)
123 {
124 	switch (smc_fid) {
125 	case SMCCC_VERSION:
126 		SMC_RET1(handle, smccc_version());
127 	case SMCCC_ARCH_FEATURES:
128 		SMC_RET1(handle, smccc_arch_features(x1));
129 	case SMCCC_ARCH_SOC_ID:
130 		SMC_RET1(handle, smccc_arch_id(x1));
131 #if WORKAROUND_CVE_2017_5715
132 	case SMCCC_ARCH_WORKAROUND_1:
133 		/*
134 		 * The workaround has already been applied on affected PEs
135 		 * during entry to EL3. On unaffected PEs, this function
136 		 * has no effect.
137 		 */
138 		SMC_RET0(handle);
139 #endif
140 #if WORKAROUND_CVE_2018_3639
141 	case SMCCC_ARCH_WORKAROUND_2:
142 		/*
143 		 * The workaround has already been applied on affected PEs
144 		 * requiring dynamic mitigation during entry to EL3.
145 		 * On unaffected or statically mitigated PEs, this function
146 		 * has no effect.
147 		 */
148 		SMC_RET0(handle);
149 #endif
150 #if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
151 	case SMCCC_ARCH_WORKAROUND_3:
152 		/*
153 		 * The workaround has already been applied on affected PEs
154 		 * during entry to EL3. On unaffected PEs, this function
155 		 * has no effect.
156 		 */
157 		SMC_RET0(handle);
158 #endif
159 	default:
160 		WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
161 			smc_fid);
162 		SMC_RET1(handle, SMC_UNK);
163 	}
164 }
165 
166 /* Register Standard Service Calls as runtime service */
167 DECLARE_RT_SVC(
168 		arm_arch_svc,
169 		OEN_ARM_START,
170 		OEN_ARM_END,
171 		SMC_TYPE_FAST,
172 		NULL,
173 		arm_arch_svc_smc_handler
174 );
175