hv: lib: refine inline assembly use in bitmap operation

Try to minimize the C code in inline assembly function. Now only
construct data structure and return a value is permitted.

Tracked-On: #861
Signed-off-by: Li, Fei1 <fei1.li@intel.com>
This commit is contained in:
Li, Fei1 2018-12-21 19:02:31 +08:00 committed by wenlingz
parent ddd07b955b
commit 36dcb0f605
3 changed files with 50 additions and 78 deletions

View File

@ -46,12 +46,12 @@ static uint16_t vmx_vpid_nr = VMX_MIN_NR_VPID;
#define INVEPT_TYPE_ALL_CONTEXTS 2UL #define INVEPT_TYPE_ALL_CONTEXTS 2UL
#define VMFAIL_INVALID_EPT_VPID \ #define VMFAIL_INVALID_EPT_VPID \
" jnc 1f\n" \ " jnc 1f\n" \
" movl $1, (%0)\n" /* CF: error = 1 */ \ " mov $1, %0\n" /* CF: error = 1 */ \
" jmp 3f\n" \ " jmp 3f\n" \
"1: jnz 2f\n" \ "1: jnz 2f\n" \
" movl $2, (%0)\n" /* ZF: error = 2 */ \ " mov $2, %0\n" /* ZF: error = 2 */ \
" jmp 3f\n" \ " jmp 3f\n" \
"2: movl $0, (%0)\n" \ "2: mov $0, %0\n" \
"3:" "3:"
struct invvpid_operand { struct invvpid_operand {
@ -66,13 +66,15 @@ struct invept_desc {
uint64_t res; uint64_t res;
}; };
static inline void asm_invvpid(const struct invvpid_operand operand, uint64_t type, int32_t *error) static inline int32_t asm_invvpid(const struct invvpid_operand operand, uint64_t type)
{ {
int32_t error;
asm volatile ("invvpid %1, %2\n" asm volatile ("invvpid %1, %2\n"
VMFAIL_INVALID_EPT_VPID VMFAIL_INVALID_EPT_VPID
: "+r" (error) : "=r" (error)
: "m" (operand), "r" (type) : "m" (operand), "r" (type)
: "memory"); : "memory");
return error;
} }
/* /*
@ -80,24 +82,22 @@ static inline void asm_invvpid(const struct invvpid_operand operand, uint64_t ty
*/ */
static inline void local_invvpid(uint64_t type, uint16_t vpid, uint64_t gva) static inline void local_invvpid(uint64_t type, uint16_t vpid, uint64_t gva)
{ {
int32_t error = 0;
const struct invvpid_operand operand = { vpid, 0U, 0U, gva }; const struct invvpid_operand operand = { vpid, 0U, 0U, gva };
asm_invvpid(operand, type, &error); if (asm_invvpid(operand, type) != 0) {
if (error != 0) {
pr_dbg("%s, failed. type = %llu, vpid = %u", __func__, type, vpid); pr_dbg("%s, failed. type = %llu, vpid = %u", __func__, type, vpid);
} }
} }
static inline void asm_invept(uint64_t type, struct invept_desc desc, int32_t *error) static inline int32_t asm_invept(uint64_t type, struct invept_desc desc)
{ {
int32_t error;
asm volatile ("invept %1, %2\n" asm volatile ("invept %1, %2\n"
VMFAIL_INVALID_EPT_VPID VMFAIL_INVALID_EPT_VPID
: "+r" (error) : "=r" (error)
: "m" (desc), "r" (type) : "m" (desc), "r" (type)
: "memory"); : "memory");
return error;
} }
/* /*
@ -105,11 +105,7 @@ static inline void asm_invept(uint64_t type, struct invept_desc desc, int32_t *e
*/ */
static inline void local_invept(uint64_t type, struct invept_desc desc) static inline void local_invept(uint64_t type, struct invept_desc desc)
{ {
int32_t error = 0; if (asm_invept(type, desc) != 0) {
asm_invept(type, desc, &error);
if (error != 0) {
pr_dbg("%s, failed. type = %llu, eptp = 0x%llx", __func__, type, desc.eptp); pr_dbg("%s, failed. type = %llu, eptp = 0x%llx", __func__, type, desc.eptp);
} }
} }

View File

@ -292,22 +292,18 @@ static inline uint64_t sidt(void)
} }
/* Read MSR */ /* Read MSR */
static inline void cpu_msr_read(uint32_t reg, uint64_t *msr_val_ptr) static inline uint64_t cpu_msr_read(uint32_t reg)
{ {
uint32_t msrl, msrh; uint32_t msrl, msrh;
asm volatile (" rdmsr ":"=a"(msrl), "=d"(msrh) : "c" (reg)); asm volatile (" rdmsr ":"=a"(msrl), "=d"(msrh) : "c" (reg));
*msr_val_ptr = ((uint64_t)msrh << 32U) | msrl; return (((uint64_t)msrh << 32U) | msrl);
} }
/* Write MSR */ /* Write MSR */
static inline void cpu_msr_write(uint32_t reg, uint64_t msr_val) static inline void cpu_msr_write(uint32_t reg, uint64_t msr_val)
{ {
uint32_t msrl, msrh; asm volatile (" wrmsr " : : "c" (reg), "a" ((uint32_t)msr_val), "d" ((uint32_t)(msr_val >> 32U)));
msrl = (uint32_t)msr_val;
msrh = (uint32_t)(msr_val >> 32U);
asm volatile (" wrmsr " : : "c" (reg), "a" (msrl), "d" (msrh));
} }
static inline void pause_cpu(void) static inline void pause_cpu(void)
@ -429,8 +425,7 @@ static inline uint64_t cpu_rsp_get(void)
{ {
uint64_t ret; uint64_t ret;
asm volatile("movq %%rsp, %0" asm volatile("movq %%rsp, %0" : "=r"(ret));
: "=r"(ret));
return ret; return ret;
} }
@ -438,34 +433,23 @@ static inline uint64_t cpu_rbp_get(void)
{ {
uint64_t ret; uint64_t ret;
asm volatile("movq %%rbp, %0" asm volatile("movq %%rbp, %0" : "=r"(ret));
: "=r"(ret));
return ret; return ret;
} }
static inline uint64_t static inline uint64_t msr_read(uint32_t reg_num)
msr_read(uint32_t reg_num)
{ {
uint64_t msr_val; return cpu_msr_read(reg_num);
cpu_msr_read(reg_num, &msr_val);
return msr_val;
} }
static inline void static inline void msr_write(uint32_t reg_num, uint64_t value64)
msr_write(uint32_t reg_num, uint64_t value64)
{ {
cpu_msr_write(reg_num, value64); cpu_msr_write(reg_num, value64);
} }
static inline void static inline void write_xcr(int32_t reg, uint64_t val)
write_xcr(int32_t reg, uint64_t val)
{ {
uint32_t low, high; asm volatile("xsetbv" : : "c" (reg), "a" ((uint32_t)val), "d" ((uint32_t)(val >> 32U)));
low = (uint32_t)val;
high = (uint32_t)(val >> 32U);
asm volatile("xsetbv" : : "c" (reg), "a" (low), "d" (high));
} }
static inline void stac(void) static inline void stac(void)

View File

@ -39,7 +39,7 @@
**/ **/
#define INVALID_BIT_INDEX 0xffffU #define INVALID_BIT_INDEX 0xffffU
/** /*
* *
* fls32 - Find the Last (most significant) bit Set in value and * fls32 - Find the Last (most significant) bit Set in value and
* return the bit index of that bit. * return the bit index of that bit.
@ -61,33 +61,30 @@
* when 'value' was zero, bit operations function can't find bit * when 'value' was zero, bit operations function can't find bit
* set and return the invalid bit index directly. * set and return the invalid bit index directly.
* *
* **/ */
static inline uint16_t fls32(uint32_t value) static inline uint16_t fls32(uint32_t value)
{ {
uint32_t ret = 0U; uint32_t ret;
if (value == 0U) { asm volatile("bsrl %1,%0\n\t"
return (INVALID_BIT_INDEX); "jnz 1f\n\t"
} "mov %2,%0\n"
asm volatile("bsrl %1,%0" "1:" : "=r" (ret)
: "=r" (ret) : "rm" (value), "i" (INVALID_BIT_INDEX));
: "rm" (value));
return (uint16_t)ret; return (uint16_t)ret;
} }
static inline uint16_t fls64(uint64_t value) static inline uint16_t fls64(uint64_t value)
{ {
uint64_t ret = 0UL; uint64_t ret = 0UL;
if (value == 0UL) { asm volatile("bsrq %1,%0\n\t"
ret = (INVALID_BIT_INDEX); "jnz 1f\n\t"
} else { "mov %2,%0\n"
asm volatile("bsrq %1,%0" "1:" : "=r" (ret)
: "=r" (ret) : "rm" (value), "i" (INVALID_BIT_INDEX));
: "rm" (value));
}
return (uint16_t)ret; return (uint16_t)ret;
} }
/** /*
* *
* ffs64 - Find the First (least significant) bit Set in value(Long type) * ffs64 - Find the First (least significant) bit Set in value(Long type)
* and return the index of that bit. * and return the index of that bit.
@ -111,16 +108,15 @@ static inline uint16_t fls64(uint64_t value)
* when 'value' was zero, bit operations function can't find bit * when 'value' was zero, bit operations function can't find bit
* set and return the invalid bit index directly. * set and return the invalid bit index directly.
* *
* **/ */
static inline uint16_t ffs64(uint64_t value) static inline uint16_t ffs64(uint64_t value)
{ {
uint64_t ret = 0UL; uint64_t ret;
if (value == 0UL) { asm volatile("bsfq %1,%0\n\t"
return (INVALID_BIT_INDEX); "jnz 1f\n\t"
} "mov %2,%0\n"
asm volatile("bsfq %1,%0" "1:" : "=r" (ret)
: "=r" (ret) : "rm" (value), "i" (INVALID_BIT_INDEX));
: "rm" (value));
return (uint16_t)ret; return (uint16_t)ret;
} }
@ -147,7 +143,7 @@ static inline uint64_t ffz64_ex(const uint64_t *addr, uint64_t size)
return size; return size;
} }
/** /*
* Counts leading zeros. * Counts leading zeros.
* *
* The number of leading zeros is defined as the number of * The number of leading zeros is defined as the number of
@ -171,7 +167,7 @@ static inline uint16_t clz(uint32_t value)
} }
} }
/** /*
* Counts leading zeros (64 bit version). * Counts leading zeros (64 bit version).
* *
* @param value:The 64 bit value to count the number of leading zeros. * @param value:The 64 bit value to count the number of leading zeros.
@ -232,26 +228,22 @@ build_bitmap_clear(bitmap32_clear_lock, "l", uint32_t, BUS_LOCK)
* Note:Input parameter nr shall be less than 64. If nr>=64, it will * Note:Input parameter nr shall be less than 64. If nr>=64, it will
* be truncated. * be truncated.
*/ */
static inline bool bitmap_test(uint16_t nr_arg, const volatile uint64_t *addr) static inline bool bitmap_test(uint16_t nr, const volatile uint64_t *addr)
{ {
uint16_t nr;
int32_t ret = 0; int32_t ret = 0;
nr = nr_arg & 0x3fU;
asm volatile("btq %q2,%1\n\tsbbl %0, %0" asm volatile("btq %q2,%1\n\tsbbl %0, %0"
: "=r" (ret) : "=r" (ret)
: "m" (*addr), "r" ((uint64_t)nr) : "m" (*addr), "r" ((uint64_t)(nr & 0x3fU))
: "cc", "memory"); : "cc", "memory");
return (ret != 0); return (ret != 0);
} }
static inline bool bitmap32_test(uint16_t nr_arg, const volatile uint32_t *addr) static inline bool bitmap32_test(uint16_t nr, const volatile uint32_t *addr)
{ {
uint16_t nr;
int32_t ret = 0; int32_t ret = 0;
nr = nr_arg & 0x1fU;
asm volatile("btl %2,%1\n\tsbbl %0, %0" asm volatile("btl %2,%1\n\tsbbl %0, %0"
: "=r" (ret) : "=r" (ret)
: "m" (*addr), "r" ((uint32_t)nr) : "m" (*addr), "r" ((uint32_t)(nr & 0x1fU))
: "cc", "memory"); : "cc", "memory");
return (ret != 0); return (ret != 0);
} }