diff --git a/sys/arm64/arm64/exception.S b/sys/arm64/arm64/exception.S index fd1dfd2c2dd9..123f73b49734 100644 --- a/sys/arm64/arm64/exception.S +++ b/sys/arm64/arm64/exception.S @@ -175,7 +175,7 @@ ENTRY(handle_el1h_sync) mov x1, sp bl do_el1h_sync restore_registers 1 - eret + ERET END(handle_el1h_sync) ENTRY(handle_el1h_irq) @@ -183,7 +183,7 @@ ENTRY(handle_el1h_irq) mov x0, sp bl intr_irq_handler restore_registers 1 - eret + ERET END(handle_el1h_irq) ENTRY(handle_el0_sync) @@ -194,7 +194,7 @@ ENTRY(handle_el0_sync) bl do_el0_sync do_ast restore_registers 0 - eret + ERET END(handle_el0_sync) ENTRY(handle_el0_irq) @@ -203,7 +203,7 @@ ENTRY(handle_el0_irq) bl intr_irq_handler do_ast restore_registers 0 - eret + ERET END(handle_el0_irq) ENTRY(handle_serror) diff --git a/sys/arm64/arm64/swtch.S b/sys/arm64/arm64/swtch.S index d9921f7e9528..144cc0873f68 100644 --- a/sys/arm64/arm64/swtch.S +++ b/sys/arm64/arm64/swtch.S @@ -253,7 +253,7 @@ ENTRY(fork_trampoline) * No need for interrupts reenabling since PSR * will be set to the desired value anyway. */ - eret + ERET END(fork_trampoline) diff --git a/sys/arm64/include/asm.h b/sys/arm64/include/asm.h index 0528a3787dcd..d947301d5865 100644 --- a/sys/arm64/include/asm.h +++ b/sys/arm64/include/asm.h @@ -90,4 +90,16 @@ .inst 0xd500409f | (1 << 8); /* Set PAN */ \ 999: +/* + * Some AArch64 CPUs speculate past an eret instruction. As the user may + * control the registers at this point add a speculation barrier usable on + * all AArch64 CPUs after the eret instruction. + * TODO: ARMv8.5 adds a specific instruction for this, we could use that + * if we know we are running on something that supports it. + */ +#define ERET \ + eret; \ + dsb sy; \ + isb + #endif /* _MACHINE_ASM_H_ */