@@ -52,6 +52,7 @@
#define EXCP_KERNEL_TRAP 9 /* Jumped to kernel code page. */
#define EXCP_STREX 10
#define EXCP_HVC 11 /* HyperVisor Call */
+#define EXCP_SMC 12 /* Secure Monitor Call */
#define ARMV7M_EXCP_RESET 1
#define ARMV7M_EXCP_NMI 2
@@ -477,6 +477,7 @@ void aarch64_cpu_do_interrupt(CPUState *cs)
case EXCP_UDEF:
case EXCP_SWI:
case EXCP_HVC:
+ case EXCP_SMC:
env->cp15.esr_el[new_el] = env->exception.syndrome;
break;
case EXCP_IRQ:
@@ -3333,6 +3333,12 @@ unsigned int arm_excp_target_el(CPUState *cs, unsigned int excp_idx)
case EXCP_HVC:
target_el = MAX(target_el, 2);
break;
+ case EXCP_SMC:
+ target_el = 3;
+ if (!secure && cur_el == 1 && (env->cp15.hcr_el2 & HCR_TSC)) {
+ target_el = 2;
+ }
+ break;
}
return target_el;
}
@@ -51,6 +51,7 @@ DEF_HELPER_3(exception_with_syndrome, void, env, i32, i32)
DEF_HELPER_1(wfi, void, env)
DEF_HELPER_1(wfe, void, env)
DEF_HELPER_2(hvc, void, env, i32)
+DEF_HELPER_2(smc, void, env, i32)
DEF_HELPER_3(cpsr_write, void, env, i32, i32)
DEF_HELPER_1(cpsr_read, i32, env)
@@ -54,6 +54,7 @@ static const char * const excnames[] = {
[EXCP_KERNEL_TRAP] = "QEMU intercept of kernel commpage",
[EXCP_STREX] = "QEMU intercept of STREX",
[EXCP_HVC] = "Hypervisor Call",
+ [EXCP_SMC] = "Secure Monitor Call",
};
static inline void arm_log_exception(int idx)
@@ -210,6 +211,11 @@ static inline uint32_t syn_aa64_hvc(uint32_t imm16)
return (EC_AA64_HVC << ARM_EL_EC_SHIFT) | ARM_EL_IL | (imm16 & 0xffff);
}
+static inline uint32_t syn_aa64_smc(uint32_t imm16)
+{
+ return (EC_AA64_SMC << ARM_EL_EC_SHIFT) | ARM_EL_IL | (imm16 & 0xffff);
+}
+
static inline uint32_t syn_aa32_svc(uint32_t imm16, bool is_thumb)
{
return (EC_AA32_SVC << ARM_EL_EC_SHIFT) | (imm16 & 0xffff)
@@ -404,6 +404,37 @@ void HELPER(hvc)(CPUARMState *env, uint32_t syndrome)
raise_exception(env, EXCP_HVC);
}
+void HELPER(smc)(CPUARMState *env, uint32_t syndrome)
+{
+ int cur_el = arm_current_pl(env);
+ /* FIXME: Use real secure state. */
+ bool secure = false;
+ bool smd = env->cp15.scr_el3 & SCR_SMD;
+ /* On ARMv8 AArch32, SMD only applies to NS mode.
+ * On ARMv7 SMD only applies to NS mode and only if EL2 is available.
+ * For ARMv7 non EL2, we force SMD to zero so we don't need to re-check
+ * the EL2 condition here.
+ */
+ bool udef = is_a64(env) ? smd : !secure && smd;
+
+ /* In NS EL1, HCR controlled routing to EL2 has priority over SMD. */
+ if (!secure && cur_el == 1 && (env->cp15.hcr_el2 & HCR_TSC)) {
+ /* When routing SMCs to EL2, the trap is taken at the SMC insn. */
+ env->pc -= 4;
+ udef = false;
+ }
+
+ /* We've already checked that EL3 exists at translation time. */
+ if (udef) {
+ /* UDEFs trap on the SMC, roll back to the PC to the SMC insn. */
+ env->pc -= 4;
+ env->exception.syndrome = syn_uncategorized();
+ raise_exception(env, EXCP_UDEF);
+ }
+ env->exception.syndrome = syndrome;
+ raise_exception(env, EXCP_SMC);
+}
+
void HELPER(exception_return)(CPUARMState *env)
{
int cur_el = arm_current_pl(env);
@@ -1456,6 +1456,16 @@ static void disas_exc(DisasContext *s, uint32_t insn)
gen_helper_hvc(cpu_env, tmp);
tcg_temp_free_i32(tmp);
break;
+ case 3:
+ if (!arm_dc_feature(s, ARM_FEATURE_EL3) || s->current_pl == 0) {
+ unallocated_encoding(s);
+ break;
+ }
+ tmp = tcg_const_i32(syn_aa64_smc(imm16));
+ gen_a64_set_pc_im(s->pc);
+ gen_helper_smc(cpu_env, tmp);
+ tcg_temp_free_i32(tmp);
+ break;
default:
unallocated_encoding(s);
break;