mirror of
https://github.com/projectacrn/acrn-hypervisor.git
synced 2025-06-22 13:37:10 +00:00
hv: lib: refine inline assembly use in bitmap operation
Try to minimize the C code in inline assembly function. Now only construct data structure and return a value is permitted. Tracked-On: #861 Signed-off-by: Li, Fei1 <fei1.li@intel.com>
This commit is contained in:
parent
ddd07b955b
commit
36dcb0f605
@ -46,12 +46,12 @@ static uint16_t vmx_vpid_nr = VMX_MIN_NR_VPID;
|
||||
#define INVEPT_TYPE_ALL_CONTEXTS 2UL
|
||||
#define VMFAIL_INVALID_EPT_VPID \
|
||||
" jnc 1f\n" \
|
||||
" movl $1, (%0)\n" /* CF: error = 1 */ \
|
||||
" mov $1, %0\n" /* CF: error = 1 */ \
|
||||
" jmp 3f\n" \
|
||||
"1: jnz 2f\n" \
|
||||
" movl $2, (%0)\n" /* ZF: error = 2 */ \
|
||||
" mov $2, %0\n" /* ZF: error = 2 */ \
|
||||
" jmp 3f\n" \
|
||||
"2: movl $0, (%0)\n" \
|
||||
"2: mov $0, %0\n" \
|
||||
"3:"
|
||||
|
||||
struct invvpid_operand {
|
||||
@ -66,13 +66,15 @@ struct invept_desc {
|
||||
uint64_t res;
|
||||
};
|
||||
|
||||
static inline void asm_invvpid(const struct invvpid_operand operand, uint64_t type, int32_t *error)
|
||||
static inline int32_t asm_invvpid(const struct invvpid_operand operand, uint64_t type)
|
||||
{
|
||||
int32_t error;
|
||||
asm volatile ("invvpid %1, %2\n"
|
||||
VMFAIL_INVALID_EPT_VPID
|
||||
: "+r" (error)
|
||||
: "=r" (error)
|
||||
: "m" (operand), "r" (type)
|
||||
: "memory");
|
||||
return error;
|
||||
}
|
||||
|
||||
/*
|
||||
@ -80,24 +82,22 @@ static inline void asm_invvpid(const struct invvpid_operand operand, uint64_t ty
|
||||
*/
|
||||
static inline void local_invvpid(uint64_t type, uint16_t vpid, uint64_t gva)
|
||||
{
|
||||
int32_t error = 0;
|
||||
|
||||
const struct invvpid_operand operand = { vpid, 0U, 0U, gva };
|
||||
|
||||
asm_invvpid(operand, type, &error);
|
||||
|
||||
if (error != 0) {
|
||||
if (asm_invvpid(operand, type) != 0) {
|
||||
pr_dbg("%s, failed. type = %llu, vpid = %u", __func__, type, vpid);
|
||||
}
|
||||
}
|
||||
|
||||
static inline void asm_invept(uint64_t type, struct invept_desc desc, int32_t *error)
|
||||
static inline int32_t asm_invept(uint64_t type, struct invept_desc desc)
|
||||
{
|
||||
int32_t error;
|
||||
asm volatile ("invept %1, %2\n"
|
||||
VMFAIL_INVALID_EPT_VPID
|
||||
: "+r" (error)
|
||||
: "=r" (error)
|
||||
: "m" (desc), "r" (type)
|
||||
: "memory");
|
||||
return error;
|
||||
}
|
||||
|
||||
/*
|
||||
@ -105,11 +105,7 @@ static inline void asm_invept(uint64_t type, struct invept_desc desc, int32_t *e
|
||||
*/
|
||||
static inline void local_invept(uint64_t type, struct invept_desc desc)
|
||||
{
|
||||
int32_t error = 0;
|
||||
|
||||
asm_invept(type, desc, &error);
|
||||
|
||||
if (error != 0) {
|
||||
if (asm_invept(type, desc) != 0) {
|
||||
pr_dbg("%s, failed. type = %llu, eptp = 0x%llx", __func__, type, desc.eptp);
|
||||
}
|
||||
}
|
||||
|
@ -292,22 +292,18 @@ static inline uint64_t sidt(void)
|
||||
}
|
||||
|
||||
/* Read MSR */
|
||||
static inline void cpu_msr_read(uint32_t reg, uint64_t *msr_val_ptr)
|
||||
static inline uint64_t cpu_msr_read(uint32_t reg)
|
||||
{
|
||||
uint32_t msrl, msrh;
|
||||
|
||||
asm volatile (" rdmsr ":"=a"(msrl), "=d"(msrh) : "c" (reg));
|
||||
*msr_val_ptr = ((uint64_t)msrh << 32U) | msrl;
|
||||
return (((uint64_t)msrh << 32U) | msrl);
|
||||
}
|
||||
|
||||
/* Write MSR */
|
||||
static inline void cpu_msr_write(uint32_t reg, uint64_t msr_val)
|
||||
{
|
||||
uint32_t msrl, msrh;
|
||||
|
||||
msrl = (uint32_t)msr_val;
|
||||
msrh = (uint32_t)(msr_val >> 32U);
|
||||
asm volatile (" wrmsr " : : "c" (reg), "a" (msrl), "d" (msrh));
|
||||
asm volatile (" wrmsr " : : "c" (reg), "a" ((uint32_t)msr_val), "d" ((uint32_t)(msr_val >> 32U)));
|
||||
}
|
||||
|
||||
static inline void pause_cpu(void)
|
||||
@ -429,8 +425,7 @@ static inline uint64_t cpu_rsp_get(void)
|
||||
{
|
||||
uint64_t ret;
|
||||
|
||||
asm volatile("movq %%rsp, %0"
|
||||
: "=r"(ret));
|
||||
asm volatile("movq %%rsp, %0" : "=r"(ret));
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -438,34 +433,23 @@ static inline uint64_t cpu_rbp_get(void)
|
||||
{
|
||||
uint64_t ret;
|
||||
|
||||
asm volatile("movq %%rbp, %0"
|
||||
: "=r"(ret));
|
||||
asm volatile("movq %%rbp, %0" : "=r"(ret));
|
||||
return ret;
|
||||
}
|
||||
|
||||
static inline uint64_t
|
||||
msr_read(uint32_t reg_num)
|
||||
static inline uint64_t msr_read(uint32_t reg_num)
|
||||
{
|
||||
uint64_t msr_val;
|
||||
|
||||
cpu_msr_read(reg_num, &msr_val);
|
||||
return msr_val;
|
||||
return cpu_msr_read(reg_num);
|
||||
}
|
||||
|
||||
static inline void
|
||||
msr_write(uint32_t reg_num, uint64_t value64)
|
||||
static inline void msr_write(uint32_t reg_num, uint64_t value64)
|
||||
{
|
||||
cpu_msr_write(reg_num, value64);
|
||||
}
|
||||
|
||||
static inline void
|
||||
write_xcr(int32_t reg, uint64_t val)
|
||||
static inline void write_xcr(int32_t reg, uint64_t val)
|
||||
{
|
||||
uint32_t low, high;
|
||||
|
||||
low = (uint32_t)val;
|
||||
high = (uint32_t)(val >> 32U);
|
||||
asm volatile("xsetbv" : : "c" (reg), "a" (low), "d" (high));
|
||||
asm volatile("xsetbv" : : "c" (reg), "a" ((uint32_t)val), "d" ((uint32_t)(val >> 32U)));
|
||||
}
|
||||
|
||||
static inline void stac(void)
|
||||
|
@ -39,7 +39,7 @@
|
||||
**/
|
||||
#define INVALID_BIT_INDEX 0xffffU
|
||||
|
||||
/**
|
||||
/*
|
||||
*
|
||||
* fls32 - Find the Last (most significant) bit Set in value and
|
||||
* return the bit index of that bit.
|
||||
@ -61,33 +61,30 @@
|
||||
* when 'value' was zero, bit operations function can't find bit
|
||||
* set and return the invalid bit index directly.
|
||||
*
|
||||
* **/
|
||||
*/
|
||||
static inline uint16_t fls32(uint32_t value)
|
||||
{
|
||||
uint32_t ret = 0U;
|
||||
if (value == 0U) {
|
||||
return (INVALID_BIT_INDEX);
|
||||
}
|
||||
asm volatile("bsrl %1,%0"
|
||||
: "=r" (ret)
|
||||
: "rm" (value));
|
||||
uint32_t ret;
|
||||
asm volatile("bsrl %1,%0\n\t"
|
||||
"jnz 1f\n\t"
|
||||
"mov %2,%0\n"
|
||||
"1:" : "=r" (ret)
|
||||
: "rm" (value), "i" (INVALID_BIT_INDEX));
|
||||
return (uint16_t)ret;
|
||||
}
|
||||
|
||||
static inline uint16_t fls64(uint64_t value)
|
||||
{
|
||||
uint64_t ret = 0UL;
|
||||
if (value == 0UL) {
|
||||
ret = (INVALID_BIT_INDEX);
|
||||
} else {
|
||||
asm volatile("bsrq %1,%0"
|
||||
: "=r" (ret)
|
||||
: "rm" (value));
|
||||
}
|
||||
asm volatile("bsrq %1,%0\n\t"
|
||||
"jnz 1f\n\t"
|
||||
"mov %2,%0\n"
|
||||
"1:" : "=r" (ret)
|
||||
: "rm" (value), "i" (INVALID_BIT_INDEX));
|
||||
return (uint16_t)ret;
|
||||
}
|
||||
|
||||
/**
|
||||
/*
|
||||
*
|
||||
* ffs64 - Find the First (least significant) bit Set in value(Long type)
|
||||
* and return the index of that bit.
|
||||
@ -111,16 +108,15 @@ static inline uint16_t fls64(uint64_t value)
|
||||
* when 'value' was zero, bit operations function can't find bit
|
||||
* set and return the invalid bit index directly.
|
||||
*
|
||||
* **/
|
||||
*/
|
||||
static inline uint16_t ffs64(uint64_t value)
|
||||
{
|
||||
uint64_t ret = 0UL;
|
||||
if (value == 0UL) {
|
||||
return (INVALID_BIT_INDEX);
|
||||
}
|
||||
asm volatile("bsfq %1,%0"
|
||||
: "=r" (ret)
|
||||
: "rm" (value));
|
||||
uint64_t ret;
|
||||
asm volatile("bsfq %1,%0\n\t"
|
||||
"jnz 1f\n\t"
|
||||
"mov %2,%0\n"
|
||||
"1:" : "=r" (ret)
|
||||
: "rm" (value), "i" (INVALID_BIT_INDEX));
|
||||
return (uint16_t)ret;
|
||||
}
|
||||
|
||||
@ -147,7 +143,7 @@ static inline uint64_t ffz64_ex(const uint64_t *addr, uint64_t size)
|
||||
|
||||
return size;
|
||||
}
|
||||
/**
|
||||
/*
|
||||
* Counts leading zeros.
|
||||
*
|
||||
* The number of leading zeros is defined as the number of
|
||||
@ -171,7 +167,7 @@ static inline uint16_t clz(uint32_t value)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
/*
|
||||
* Counts leading zeros (64 bit version).
|
||||
*
|
||||
* @param value:The 64 bit value to count the number of leading zeros.
|
||||
@ -232,26 +228,22 @@ build_bitmap_clear(bitmap32_clear_lock, "l", uint32_t, BUS_LOCK)
|
||||
* Note:Input parameter nr shall be less than 64. If nr>=64, it will
|
||||
* be truncated.
|
||||
*/
|
||||
static inline bool bitmap_test(uint16_t nr_arg, const volatile uint64_t *addr)
|
||||
static inline bool bitmap_test(uint16_t nr, const volatile uint64_t *addr)
|
||||
{
|
||||
uint16_t nr;
|
||||
int32_t ret = 0;
|
||||
nr = nr_arg & 0x3fU;
|
||||
asm volatile("btq %q2,%1\n\tsbbl %0, %0"
|
||||
: "=r" (ret)
|
||||
: "m" (*addr), "r" ((uint64_t)nr)
|
||||
: "m" (*addr), "r" ((uint64_t)(nr & 0x3fU))
|
||||
: "cc", "memory");
|
||||
return (ret != 0);
|
||||
}
|
||||
|
||||
static inline bool bitmap32_test(uint16_t nr_arg, const volatile uint32_t *addr)
|
||||
static inline bool bitmap32_test(uint16_t nr, const volatile uint32_t *addr)
|
||||
{
|
||||
uint16_t nr;
|
||||
int32_t ret = 0;
|
||||
nr = nr_arg & 0x1fU;
|
||||
asm volatile("btl %2,%1\n\tsbbl %0, %0"
|
||||
: "=r" (ret)
|
||||
: "m" (*addr), "r" ((uint32_t)nr)
|
||||
: "m" (*addr), "r" ((uint32_t)(nr & 0x1fU))
|
||||
: "cc", "memory");
|
||||
return (ret != 0);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user