|
@@ -252,3 +252,105 @@ void paravirt_cpu_asm_init(const struct pv_cpu_asm_switch *cpu_asm_switch);
|
|
|
"r15", "r16", "r17"
|
|
|
|
|
|
#define PARAVIRT_REG_CLOBBERS2 \
|
|
|
+ "r2", "r3", /*"r8", "r9",*/ "r10", "r11", "r14", \
|
|
|
+ "r15", "r16", "r17"
|
|
|
+
|
|
|
+#define PARAVIRT_REG_CLOBBERS5 \
|
|
|
+ "r2", "r3", /*"r8", "r9", "r10", "r11", "r14",*/ \
|
|
|
+ "r15", "r16", "r17"
|
|
|
+
|
|
|
+#define PARAVIRT_BR_CLOBBERS \
|
|
|
+ "b6", "b7"
|
|
|
+
|
|
|
+#define PARAVIRT_PR_CLOBBERS \
|
|
|
+ "p6", "p7", "p8", "p9", "p10", "p11", "p12", "p13", "p14", "p15"
|
|
|
+
|
|
|
+#define PARAVIRT_AR_CLOBBERS \
|
|
|
+ "ar.ccv"
|
|
|
+
|
|
|
+#define PARAVIRT_CLOBBERS0 \
|
|
|
+ PARAVIRT_REG_CLOBBERS0, \
|
|
|
+ PARAVIRT_BR_CLOBBERS, \
|
|
|
+ PARAVIRT_PR_CLOBBERS, \
|
|
|
+ PARAVIRT_AR_CLOBBERS, \
|
|
|
+ "memory"
|
|
|
+
|
|
|
+#define PARAVIRT_CLOBBERS1 \
|
|
|
+ PARAVIRT_REG_CLOBBERS1, \
|
|
|
+ PARAVIRT_BR_CLOBBERS, \
|
|
|
+ PARAVIRT_PR_CLOBBERS, \
|
|
|
+ PARAVIRT_AR_CLOBBERS, \
|
|
|
+ "memory"
|
|
|
+
|
|
|
+#define PARAVIRT_CLOBBERS2 \
|
|
|
+ PARAVIRT_REG_CLOBBERS2, \
|
|
|
+ PARAVIRT_BR_CLOBBERS, \
|
|
|
+ PARAVIRT_PR_CLOBBERS, \
|
|
|
+ PARAVIRT_AR_CLOBBERS, \
|
|
|
+ "memory"
|
|
|
+
|
|
|
+#define PARAVIRT_CLOBBERS5 \
|
|
|
+ PARAVIRT_REG_CLOBBERS5, \
|
|
|
+ PARAVIRT_BR_CLOBBERS, \
|
|
|
+ PARAVIRT_PR_CLOBBERS, \
|
|
|
+ PARAVIRT_AR_CLOBBERS, \
|
|
|
+ "memory"
|
|
|
+
|
|
|
+#define PARAVIRT_BR0(op, type) \
|
|
|
+ register unsigned long ia64_clobber asm ("r8"); \
|
|
|
+ asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
|
|
|
+ PARAVIRT_TYPE(type)) \
|
|
|
+ : "=r"(ia64_clobber) \
|
|
|
+ : PARAVIRT_OP(op) \
|
|
|
+ : PARAVIRT_CLOBBERS0)
|
|
|
+
|
|
|
+#define PARAVIRT_BR0_RET(op, type) \
|
|
|
+ register unsigned long ia64_intri_res asm ("r8"); \
|
|
|
+ asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
|
|
|
+ PARAVIRT_TYPE(type)) \
|
|
|
+ : "=r"(ia64_intri_res) \
|
|
|
+ : PARAVIRT_OP(op) \
|
|
|
+ : PARAVIRT_CLOBBERS0)
|
|
|
+
|
|
|
+#define PARAVIRT_BR1(op, type, arg1) \
|
|
|
+ register unsigned long __##arg1 asm ("r8") = arg1; \
|
|
|
+ register unsigned long ia64_clobber asm ("r8"); \
|
|
|
+ asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
|
|
|
+ PARAVIRT_TYPE(type)) \
|
|
|
+ : "=r"(ia64_clobber) \
|
|
|
+ : PARAVIRT_OP(op), "0"(__##arg1) \
|
|
|
+ : PARAVIRT_CLOBBERS1)
|
|
|
+
|
|
|
+#define PARAVIRT_BR1_RET(op, type, arg1) \
|
|
|
+ register unsigned long ia64_intri_res asm ("r8"); \
|
|
|
+ register unsigned long __##arg1 asm ("r8") = arg1; \
|
|
|
+ asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
|
|
|
+ PARAVIRT_TYPE(type)) \
|
|
|
+ : "=r"(ia64_intri_res) \
|
|
|
+ : PARAVIRT_OP(op), "0"(__##arg1) \
|
|
|
+ : PARAVIRT_CLOBBERS1)
|
|
|
+
|
|
|
+#define PARAVIRT_BR1_VOID(op, type, arg1) \
|
|
|
+ register void *__##arg1 asm ("r8") = arg1; \
|
|
|
+ register unsigned long ia64_clobber asm ("r8"); \
|
|
|
+ asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
|
|
|
+ PARAVIRT_TYPE(type)) \
|
|
|
+ : "=r"(ia64_clobber) \
|
|
|
+ : PARAVIRT_OP(op), "0"(__##arg1) \
|
|
|
+ : PARAVIRT_CLOBBERS1)
|
|
|
+
|
|
|
+#define PARAVIRT_BR2(op, type, arg1, arg2) \
|
|
|
+ register unsigned long __##arg1 asm ("r8") = arg1; \
|
|
|
+ register unsigned long __##arg2 asm ("r9") = arg2; \
|
|
|
+ register unsigned long ia64_clobber1 asm ("r8"); \
|
|
|
+ register unsigned long ia64_clobber2 asm ("r9"); \
|
|
|
+ asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
|
|
|
+ PARAVIRT_TYPE(type)) \
|
|
|
+ : "=r"(ia64_clobber1), "=r"(ia64_clobber2) \
|
|
|
+ : PARAVIRT_OP(op), "0"(__##arg1), "1"(__##arg2) \
|
|
|
+ : PARAVIRT_CLOBBERS2)
|
|
|
+
|
|
|
+
|
|
|
+#define PARAVIRT_DEFINE_CPU_OP0(op, type) \
|
|
|
+ static inline void \
|
|
|
+ paravirt_ ## op (void) \
|