@@ -53,6 +53,7 @@
#define EXCP_STREX 10
#define EXCP_HVC 11 /* HyperVisor Call */
#define EXCP_HYP_TRAP 12
+#define EXCP_SMC 13 /* Secure Monitor Call */
#define ARMV7M_EXCP_RESET 1
#define ARMV7M_EXCP_NMI 2
@@ -478,6 +478,7 @@ void aarch64_cpu_do_interrupt(CPUState *cs)
case EXCP_SWI:
case EXCP_HVC:
case EXCP_HYP_TRAP:
+ case EXCP_SMC:
env->cp15.esr_el[new_el] = env->exception.syndrome;
break;
case EXCP_IRQ:
@@ -3667,6 +3667,9 @@ unsigned int arm_excp_target_el(CPUState *cs, unsigned int excp_idx)
case EXCP_HYP_TRAP:
target_el = 2;
break;
+ case EXCP_SMC:
+ target_el = 3;
+ break;
default:
target_el = MAX(cur_el, 1);
break;
@@ -51,6 +51,7 @@ DEF_HELPER_3(exception_with_syndrome, void, env, i32, i32)
DEF_HELPER_1(wfi, void, env)
DEF_HELPER_1(wfe, void, env)
DEF_HELPER_1(pre_hvc, void, env)
+DEF_HELPER_2(pre_smc, void, env, i32)
DEF_HELPER_3(cpsr_write, void, env, i32, i32)
DEF_HELPER_1(cpsr_read, i32, env)
@@ -55,6 +55,7 @@ static const char * const excnames[] = {
[EXCP_STREX] = "QEMU intercept of STREX",
[EXCP_HVC] = "Hypervisor Call",
[EXCP_HYP_TRAP] = "Hypervisor Trap",
+ [EXCP_SMC] = "Secure Monitor Call",
};
static inline void arm_log_exception(int idx)
@@ -222,6 +223,11 @@ static inline uint32_t syn_aa64_hvc(uint32_t imm16)
return (EC_AA64_HVC << ARM_EL_EC_SHIFT) | ARM_EL_IL | (imm16 & 0xffff);
}
+static inline uint32_t syn_aa64_smc(uint32_t imm16)
+{
+ return (EC_AA64_SMC << ARM_EL_EC_SHIFT) | ARM_EL_IL | (imm16 & 0xffff);
+}
+
static inline uint32_t syn_aa32_svc(uint32_t imm16, bool is_thumb)
{
return (EC_AA32_SVC << ARM_EL_EC_SHIFT) | (imm16 & 0xffff)
@@ -405,6 +405,32 @@ void HELPER(pre_hvc)(CPUARMState *env)
}
}
+void HELPER(pre_smc)(CPUARMState *env, uint32_t syndrome)
+{
+ int cur_el = arm_current_pl(env);
+ /* FIXME: Use real secure state. */
+ bool secure = false;
+ bool smd = env->cp15.scr_el3 & SCR_SMD;
+ /* On ARMv8 AArch32, SMD only applies to NS state.
+ * On ARMv7 SMD only applies to NS state and only if EL2 is available.
+ * For ARMv7 non EL2, we force SMD to zero so we don't need to re-check
+ * the EL2 condition here.
+ */
+ bool undef = is_a64(env) ? smd : (!secure && smd);
+
+ /* In NS EL1, HCR controlled routing to EL2 has priority over SMD. */
+ if (!secure && cur_el == 1 && (env->cp15.hcr_el2 & HCR_TSC)) {
+ env->exception.syndrome = syndrome;
+ raise_exception(env, EXCP_HYP_TRAP);
+ }
+
+ /* We've already checked that EL3 exists at translation time. */
+ if (undef) {
+ env->exception.syndrome = syn_uncategorized();
+ raise_exception(env, EXCP_UDEF);
+ }
+}
+
void HELPER(exception_return)(CPUARMState *env)
{
int cur_el = arm_current_pl(env);
@@ -1470,6 +1470,7 @@ static void disas_exc(DisasContext *s, uint32_t insn)
int opc = extract32(insn, 21, 3);
int op2_ll = extract32(insn, 0, 5);
int imm16 = extract32(insn, 5, 16);
+ TCGv_i32 tmp;
switch (opc) {
case 0:
@@ -1496,6 +1497,18 @@ static void disas_exc(DisasContext *s, uint32_t insn)
gen_ss_advance(s);
gen_exception_insn(s, 0, EXCP_HVC, syn_aa64_hvc(imm16));
break;
+ case 3:
+ if (!arm_dc_feature(s, ARM_FEATURE_EL3) || s->current_pl == 0) {
+ unallocated_encoding(s);
+ break;
+ }
+ gen_a64_set_pc_im(s->pc - 4);
+ tmp = tcg_const_i32(syn_aa64_smc(imm16));
+ gen_helper_pre_smc(cpu_env, tmp);
+ tcg_temp_free_i32(tmp);
+ gen_ss_advance(s);
+ gen_exception_insn(s, 0, EXCP_SMC, syn_aa64_smc(imm16));
+ break;
default:
unallocated_encoding(s);
break;