|
@@ -367,3 +367,85 @@ static void emit_load_be16(u8 cond, u8 r_res, u8 r_addr, struct jit_ctx *ctx)
|
|
|
|
|
|
static inline void emit_swap16(u8 r_dst __maybe_unused,
|
|
|
u8 r_src __maybe_unused,
|
|
|
+ struct jit_ctx *ctx __maybe_unused)
|
|
|
+{
|
|
|
+#ifdef __LITTLE_ENDIAN
|
|
|
+ emit(ARM_REV16(r_dst, r_src), ctx);
|
|
|
+#endif
|
|
|
+}
|
|
|
+
|
|
|
+#endif /* __LINUX_ARM_ARCH__ < 6 */
|
|
|
+
|
|
|
+
|
|
|
+/* Compute the immediate value for a PC-relative branch. */
|
|
|
+static inline u32 b_imm(unsigned tgt, struct jit_ctx *ctx)
|
|
|
+{
|
|
|
+ u32 imm;
|
|
|
+
|
|
|
+ if (ctx->target == NULL)
|
|
|
+ return 0;
|
|
|
+ /*
|
|
|
+ * BPF allows only forward jumps and the offset of the target is
|
|
|
+ * still the one computed during the first pass.
|
|
|
+ */
|
|
|
+ imm = ctx->offsets[tgt] + ctx->prologue_bytes - (ctx->idx * 4 + 8);
|
|
|
+
|
|
|
+ return imm >> 2;
|
|
|
+}
|
|
|
+
|
|
|
+#define OP_IMM3(op, r1, r2, imm_val, ctx) \
|
|
|
+ do { \
|
|
|
+ imm12 = imm8m(imm_val); \
|
|
|
+ if (imm12 < 0) { \
|
|
|
+ emit_mov_i_no8m(r_scratch, imm_val, ctx); \
|
|
|
+ emit(op ## _R((r1), (r2), r_scratch), ctx); \
|
|
|
+ } else { \
|
|
|
+ emit(op ## _I((r1), (r2), imm12), ctx); \
|
|
|
+ } \
|
|
|
+ } while (0)
|
|
|
+
|
|
|
+static inline void emit_err_ret(u8 cond, struct jit_ctx *ctx)
|
|
|
+{
|
|
|
+ if (ctx->ret0_fp_idx >= 0) {
|
|
|
+ _emit(cond, ARM_B(b_imm(ctx->ret0_fp_idx, ctx)), ctx);
|
|
|
+ /* NOP to keep the size constant between passes */
|
|
|
+ emit(ARM_MOV_R(ARM_R0, ARM_R0), ctx);
|
|
|
+ } else {
|
|
|
+ _emit(cond, ARM_MOV_I(ARM_R0, 0), ctx);
|
|
|
+ _emit(cond, ARM_B(b_imm(ctx->skf->len, ctx)), ctx);
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+static inline void emit_blx_r(u8 tgt_reg, struct jit_ctx *ctx)
|
|
|
+{
|
|
|
+#if __LINUX_ARM_ARCH__ < 5
|
|
|
+ emit(ARM_MOV_R(ARM_LR, ARM_PC), ctx);
|
|
|
+
|
|
|
+ if (elf_hwcap & HWCAP_THUMB)
|
|
|
+ emit(ARM_BX(tgt_reg), ctx);
|
|
|
+ else
|
|
|
+ emit(ARM_MOV_R(ARM_PC, tgt_reg), ctx);
|
|
|
+#else
|
|
|
+ emit(ARM_BLX_R(tgt_reg), ctx);
|
|
|
+#endif
|
|
|
+}
|
|
|
+
|
|
|
+static inline void emit_udiv(u8 rd, u8 rm, u8 rn, struct jit_ctx *ctx)
|
|
|
+{
|
|
|
+#if __LINUX_ARM_ARCH__ == 7
|
|
|
+ if (elf_hwcap & HWCAP_IDIVA) {
|
|
|
+ emit(ARM_UDIV(rd, rm, rn), ctx);
|
|
|
+ return;
|
|
|
+ }
|
|
|
+#endif
|
|
|
+ if (rm != ARM_R0)
|
|
|
+ emit(ARM_MOV_R(ARM_R0, rm), ctx);
|
|
|
+ if (rn != ARM_R1)
|
|
|
+ emit(ARM_MOV_R(ARM_R1, rn), ctx);
|
|
|
+
|
|
|
+ ctx->seen |= SEEN_CALL;
|
|
|
+ emit_mov_i(ARM_R3, (u32)jit_udiv, ctx);
|
|
|
+ emit_blx_r(ARM_R3, ctx);
|
|
|
+
|
|
|
+ if (rd != ARM_R0)
|
|
|
+ emit(ARM_MOV_R(rd, ARM_R0), ctx);
|