u32 return_offset = (is_thumb) ? 2 : 4;
new_spsr_value = cpsr;
- new_lr_value = *vcpu_pc(vcpu) - return_offset;
+ new_lr_value = *vcpu_pc(vcpu) + return_offset;
*vcpu_cpsr(vcpu) = (cpsr & ~MODE_MASK) | UND_MODE;
*vcpu_cpsr(vcpu) |= PSR_I_BIT;
unsigned long new_spsr_value;
unsigned long cpsr = *vcpu_cpsr(vcpu);
u32 sctlr = vcpu->arch.cp15[c1_SCTLR];
- bool is_thumb = (cpsr & PSR_T_BIT);
u32 vect_offset;
- u32 return_offset = (is_thumb) ? 4 : 0;
+ u32 return_offset = (is_pabt) ? 4 : 8;
bool is_lpae;
new_spsr_value = cpsr;
PSR_I_BIT | PSR_D_BIT)
#define EL1_EXCEPT_SYNC_OFFSET 0x200
+/*
+ * Table taken from ARMv8 ARM DDI0487B-B, table G1-10.
+ */
+static const u8 return_offsets[8][2] = {
+ [0] = { 0, 0 }, /* Reset, unused */
+ [1] = { 4, 2 }, /* Undefined */
+ [2] = { 0, 0 }, /* SVC, unused */
+ [3] = { 4, 4 }, /* Prefetch abort */
+ [4] = { 8, 8 }, /* Data abort */
+ [5] = { 0, 0 }, /* HVC, unused */
+ [6] = { 4, 4 }, /* IRQ, unused */
+ [7] = { 4, 4 }, /* FIQ, unused */
+};
+
static void prepare_fault32(struct kvm_vcpu *vcpu, u32 mode, u32 vect_offset)
{
unsigned long cpsr;
unsigned long new_spsr_value = *vcpu_cpsr(vcpu);
bool is_thumb = (new_spsr_value & COMPAT_PSR_T_BIT);
- u32 return_offset = (is_thumb) ? 4 : 0;
+ u32 return_offset = return_offsets[vect_offset >> 2][is_thumb];
u32 sctlr = vcpu_cp15(vcpu, c1_SCTLR);
cpsr = mode | COMPAT_PSR_I_BIT;