hv: multi-arch reconstruct bits library

Extract common interface to include/lib/bits.h, and invoke the variant
implementation of arch.
Re-implement unlocked functions as C in common library.
Rename bitmap*lock() to bitmap*(), bitmap*nolock() to bitmap*non_atomic().

Tracked-On: #8803
Signed-off-by: Haoyu Tang <haoyu.tang@intel.com>
Reviewed-by: Yifan Liu  <yifan1.liu@intel.com>
Acked-by: Wang, Yu1 <yu1.wang@intel.com>
This commit is contained in:
Haoyu Tang
2025-09-05 11:34:02 +08:00
committed by acrnsi-robot
parent 090aaf4c34
commit a226b5f0ec
37 changed files with 404 additions and 260 deletions

View File

@@ -25,20 +25,10 @@
*
* $FreeBSD$
*/
#ifndef BITS_H
#define BITS_H
#ifndef X86_LIB_BITS_H
#define X86_LIB_BITS_H
#include <atomic.h>
/**
*
* INVALID_BIT_INDEX means when input paramter is zero,
* bit operations function can't find bit set and return
* the invalid bit index directly.
*
**/
#define INVALID_BIT_INDEX 0xffffU
/*
*
* fls32 - Find the Last (most significant) bit Set in value and
@@ -62,7 +52,7 @@
* set and return the invalid bit index directly.
*
*/
static inline uint16_t fls32(uint32_t value)
static inline int16_t arch_fls32(uint32_t value)
{
uint32_t ret;
asm volatile("bsrl %1,%0\n\t"
@@ -73,7 +63,7 @@ static inline uint16_t fls32(uint32_t value)
return (uint16_t)ret;
}
static inline uint16_t fls64(uint64_t value)
static inline uint16_t arch_fls64(uint64_t value)
{
uint64_t ret = 0UL;
asm volatile("bsrq %1,%0\n\t"
@@ -109,7 +99,7 @@ static inline uint16_t fls64(uint64_t value)
* set and return the invalid bit index directly.
*
*/
static inline uint16_t ffs64(uint64_t value)
static inline uint16_t arch_ffs64(uint64_t value)
{
uint64_t ret;
asm volatile("bsfq %1,%0\n\t"
@@ -120,109 +110,48 @@ static inline uint16_t ffs64(uint64_t value)
return (uint16_t)ret;
}
/*bit scan forward for the least significant bit '0'*/
static inline uint16_t ffz64(uint64_t value)
{
return ffs64(~value);
}
/*
* find the first zero bit in a uint64_t array.
* @pre: the size must be multiple of 64.
*/
static inline uint64_t ffz64_ex(const uint64_t *addr, uint64_t size)
{
uint64_t ret = size;
uint64_t idx;
for (idx = 0UL; (idx << 6U) < size; idx++) {
if (addr[idx] != ~0UL) {
ret = (idx << 6U) + ffz64(addr[idx]);
break;
}
}
return ret;
}
/*
* Counts leading zeros.
*
* The number of leading zeros is defined as the number of
* most significant bits which are not '1'. E.g.:
* clz(0x80000000)==0
* clz(0x40000000)==1
* ...
* clz(0x00000001)==31
* clz(0x00000000)==32
*
* @param value:The 32 bit value to count the number of leading zeros.
*
* @return The number of leading zeros in 'value'.
*/
static inline uint16_t clz(uint32_t value)
{
return ((value != 0U) ? (31U - fls32(value)) : 32U);
}
/*
* Counts leading zeros (64 bit version).
*
* @param value:The 64 bit value to count the number of leading zeros.
*
* @return The number of leading zeros in 'value'.
*/
static inline uint16_t clz64(uint64_t value)
{
return ((value != 0UL) ? (63U - fls64(value)) : 64U);
}
/*
* (*addr) |= (1UL<<nr);
* Note:Input parameter nr shall be less than 64.
* Note:Input parameter nr shall be less than 64.
* If nr>=64, it will be truncated.
*/
#define build_bitmap_set(name, op_len, op_type, lock) \
static inline void name(uint16_t nr_arg, volatile op_type *addr) \
static inline void name(uint32_t nr_arg, volatile op_type *addr) \
{ \
uint16_t nr; \
uint32_t nr; \
nr = nr_arg & ((8U * sizeof(op_type)) - 1U); \
asm volatile(lock "or" op_len " %1,%0" \
: "+m" (*addr) \
: "r" ((op_type)(1UL<<nr)) \
: "cc", "memory"); \
}
build_bitmap_set(bitmap_set_nolock, "q", uint64_t, "")
build_bitmap_set(bitmap_set_lock, "q", uint64_t, BUS_LOCK)
build_bitmap_set(bitmap32_set_nolock, "l", uint32_t, "")
build_bitmap_set(bitmap32_set_lock, "l", uint32_t, BUS_LOCK)
build_bitmap_set(arch_bitmap_set, "q", uint64_t, BUS_LOCK)
build_bitmap_set(arch_bitmap32_set, "l", uint32_t, BUS_LOCK)
/*
* (*addr) &= ~(1UL<<nr);
* Note:Input parameter nr shall be less than 64.
* Note:Input parameter nr shall be less than 64.
* If nr>=64, it will be truncated.
*/
#define build_bitmap_clear(name, op_len, op_type, lock) \
static inline void name(uint16_t nr_arg, volatile op_type *addr) \
#define build_bitmap_clear(name, op_len, op_type, lock) \
static inline void name(uint32_t nr_arg, volatile op_type *addr) \
{ \
uint16_t nr; \
uint32_t nr; \
nr = nr_arg & ((8U * sizeof(op_type)) - 1U); \
asm volatile(lock "and" op_len " %1,%0" \
: "+m" (*addr) \
: "r" ((op_type)(~(1UL<<(nr)))) \
: "cc", "memory"); \
}
build_bitmap_clear(bitmap_clear_nolock, "q", uint64_t, "")
build_bitmap_clear(bitmap_clear_lock, "q", uint64_t, BUS_LOCK)
build_bitmap_clear(bitmap32_clear_nolock, "l", uint32_t, "")
build_bitmap_clear(bitmap32_clear_lock, "l", uint32_t, BUS_LOCK)
build_bitmap_clear(arch_bitmap_clear, "q", uint64_t, BUS_LOCK)
build_bitmap_clear(arch_bitmap32_clear, "l", uint32_t, BUS_LOCK)
/*
* return !!((*addr) & (1UL<<nr));
* Note:Input parameter nr shall be less than 64. If nr>=64, it will
* be truncated.
*/
static inline bool bitmap_test(uint16_t nr, const volatile uint64_t *addr)
static inline bool arch_bitmap_test(uint32_t nr, const volatile uint64_t *addr)
{
int32_t ret = 0;
asm volatile("btq %q2,%1\n\tsbbl %0, %0"
@@ -232,7 +161,7 @@ static inline bool bitmap_test(uint16_t nr, const volatile uint64_t *addr)
return (ret != 0);
}
static inline bool bitmap32_test(uint16_t nr, const volatile uint32_t *addr)
static inline bool arch_bitmap32_test(uint32_t nr, const volatile uint32_t *addr)
{
int32_t ret = 0;
asm volatile("btl %2,%1\n\tsbbl %0, %0"
@@ -250,9 +179,9 @@ static inline bool bitmap32_test(uint16_t nr, const volatile uint32_t *addr)
* will be truncated.
*/
#define build_bitmap_testandset(name, op_len, op_type, lock) \
static inline bool name(uint16_t nr_arg, volatile op_type *addr) \
static inline bool name(uint32_t nr_arg, volatile op_type *addr) \
{ \
uint16_t nr; \
uint32_t nr; \
int32_t ret=0; \
nr = nr_arg & ((8U * sizeof(op_type)) - 1U); \
asm volatile(lock "bts" op_len " %2,%1\n\tsbbl %0,%0" \
@@ -261,10 +190,8 @@ static inline bool name(uint16_t nr_arg, volatile op_type *addr) \
: "cc", "memory"); \
return (ret != 0); \
}
build_bitmap_testandset(bitmap_test_and_set_nolock, "q", uint64_t, "")
build_bitmap_testandset(bitmap_test_and_set_lock, "q", uint64_t, BUS_LOCK)
build_bitmap_testandset(bitmap32_test_and_set_nolock, "l", uint32_t, "")
build_bitmap_testandset(bitmap32_test_and_set_lock, "l", uint32_t, BUS_LOCK)
build_bitmap_testandset(arch_bitmap_test_and_set, "q", uint64_t, BUS_LOCK)
build_bitmap_testandset(arch_bitmap32_test_and_set, "l", uint32_t, BUS_LOCK)
/*
* bool ret = (*addr) & (1UL<<nr);
@@ -274,9 +201,9 @@ build_bitmap_testandset(bitmap32_test_and_set_lock, "l", uint32_t, BUS_LOCK)
* it will be truncated.
*/
#define build_bitmap_testandclear(name, op_len, op_type, lock) \
static inline bool name(uint16_t nr_arg, volatile op_type *addr) \
static inline bool name(uint32_t nr_arg, volatile op_type *addr) \
{ \
uint16_t nr; \
uint32_t nr; \
int32_t ret=0; \
nr = nr_arg & ((8U * sizeof(op_type)) - 1U); \
asm volatile(lock "btr" op_len " %2,%1\n\tsbbl %0,%0" \
@@ -285,14 +212,6 @@ static inline bool name(uint16_t nr_arg, volatile op_type *addr) \
: "cc", "memory"); \
return (ret != 0); \
}
build_bitmap_testandclear(bitmap_test_and_clear_nolock, "q", uint64_t, "")
build_bitmap_testandclear(bitmap_test_and_clear_lock, "q", uint64_t, BUS_LOCK)
build_bitmap_testandclear(bitmap32_test_and_clear_nolock, "l", uint32_t, "")
build_bitmap_testandclear(bitmap32_test_and_clear_lock, "l", uint32_t, BUS_LOCK)
static inline uint16_t bitmap_weight(uint64_t bits)
{
return (uint16_t)__builtin_popcountl(bits);
}
#endif /* BITS_H*/
build_bitmap_testandclear(arch_bitmap_test_and_clear, "q", uint64_t, BUS_LOCK)
build_bitmap_testandclear(arch_bitmap32_test_and_clear, "l", uint32_t, BUS_LOCK)
#endif /* X86_LIB_BITS_H */