HV: code format for memory.c

replace blank space indent with tabs

Signed-off-by: Yonghua Huang <yonghua.huang@intel.com>
Acked-by: Eddie Dong <eddie.dong@intel.com>
This commit is contained in:
Yonghua Huang 2018-07-09 21:35:14 +08:00 committed by lijinxia
parent d3e8c29d0e
commit eb7cf14bcf

View File

@ -16,19 +16,19 @@ Malloc_Heap[CONFIG_HEAP_SIZE] __aligned(CONFIG_MALLOC_ALIGN);
#define MALLOC_HEAP_BUFF_SIZE CONFIG_MALLOC_ALIGN #define MALLOC_HEAP_BUFF_SIZE CONFIG_MALLOC_ALIGN
#define MALLOC_HEAP_TOTAL_BUFF (CONFIG_HEAP_SIZE/MALLOC_HEAP_BUFF_SIZE) #define MALLOC_HEAP_TOTAL_BUFF (CONFIG_HEAP_SIZE/MALLOC_HEAP_BUFF_SIZE)
#define MALLOC_HEAP_BITMAP_SIZE \ #define MALLOC_HEAP_BITMAP_SIZE \
INT_DIV_ROUNDUP(MALLOC_HEAP_TOTAL_BUFF, BITMAP_WORD_SIZE) INT_DIV_ROUNDUP(MALLOC_HEAP_TOTAL_BUFF, BITMAP_WORD_SIZE)
static uint32_t Malloc_Heap_Bitmap[MALLOC_HEAP_BITMAP_SIZE]; static uint32_t Malloc_Heap_Bitmap[MALLOC_HEAP_BITMAP_SIZE];
static uint32_t Malloc_Heap_Contiguity_Bitmap[MALLOC_HEAP_BITMAP_SIZE]; static uint32_t Malloc_Heap_Contiguity_Bitmap[MALLOC_HEAP_BITMAP_SIZE];
struct mem_pool Memory_Pool = { struct mem_pool Memory_Pool = {
.start_addr = Malloc_Heap, .start_addr = Malloc_Heap,
.spinlock = {.head = 0U, .tail = 0U}, .spinlock = {.head = 0U, .tail = 0U},
.size = CONFIG_HEAP_SIZE, .size = CONFIG_HEAP_SIZE,
.buff_size = MALLOC_HEAP_BUFF_SIZE, .buff_size = MALLOC_HEAP_BUFF_SIZE,
.total_buffs = MALLOC_HEAP_TOTAL_BUFF, .total_buffs = MALLOC_HEAP_TOTAL_BUFF,
.bmp_size = MALLOC_HEAP_BITMAP_SIZE, .bmp_size = MALLOC_HEAP_BITMAP_SIZE,
.bitmap = Malloc_Heap_Bitmap, .bitmap = Malloc_Heap_Bitmap,
.contiguity_bitmap = Malloc_Heap_Contiguity_Bitmap .contiguity_bitmap = Malloc_Heap_Contiguity_Bitmap
}; };
/************************************************************************/ /************************************************************************/
@ -40,281 +40,282 @@ Paging_Heap[CONFIG_NUM_ALLOC_PAGES][CPU_PAGE_SIZE] __aligned(CPU_PAGE_SIZE);
#define PAGING_HEAP_BUFF_SIZE CPU_PAGE_SIZE #define PAGING_HEAP_BUFF_SIZE CPU_PAGE_SIZE
#define PAGING_HEAP_TOTAL_BUFF CONFIG_NUM_ALLOC_PAGES #define PAGING_HEAP_TOTAL_BUFF CONFIG_NUM_ALLOC_PAGES
#define PAGING_HEAP_BITMAP_SIZE \ #define PAGING_HEAP_BITMAP_SIZE \
INT_DIV_ROUNDUP(PAGING_HEAP_TOTAL_BUFF, BITMAP_WORD_SIZE) INT_DIV_ROUNDUP(PAGING_HEAP_TOTAL_BUFF, BITMAP_WORD_SIZE)
static uint32_t Paging_Heap_Bitmap[PAGING_HEAP_BITMAP_SIZE]; static uint32_t Paging_Heap_Bitmap[PAGING_HEAP_BITMAP_SIZE];
static uint32_t Paging_Heap_Contiguity_Bitmap[MALLOC_HEAP_BITMAP_SIZE]; static uint32_t Paging_Heap_Contiguity_Bitmap[MALLOC_HEAP_BITMAP_SIZE];
struct mem_pool Paging_Memory_Pool = { struct mem_pool Paging_Memory_Pool = {
.start_addr = Paging_Heap, .start_addr = Paging_Heap,
.spinlock = {.head = 0U, .tail = 0U}, .spinlock = {.head = 0U, .tail = 0U},
.size = CONFIG_NUM_ALLOC_PAGES * CPU_PAGE_SIZE, .size = CONFIG_NUM_ALLOC_PAGES * CPU_PAGE_SIZE,
.buff_size = PAGING_HEAP_BUFF_SIZE, .buff_size = PAGING_HEAP_BUFF_SIZE,
.total_buffs = PAGING_HEAP_TOTAL_BUFF, .total_buffs = PAGING_HEAP_TOTAL_BUFF,
.bmp_size = PAGING_HEAP_BITMAP_SIZE, .bmp_size = PAGING_HEAP_BITMAP_SIZE,
.bitmap = Paging_Heap_Bitmap, .bitmap = Paging_Heap_Bitmap,
.contiguity_bitmap = Paging_Heap_Contiguity_Bitmap .contiguity_bitmap = Paging_Heap_Contiguity_Bitmap
}; };
static void *allocate_mem(struct mem_pool *pool, unsigned int num_bytes) static void *allocate_mem(struct mem_pool *pool, unsigned int num_bytes)
{ {
void *memory = NULL; void *memory = NULL;
uint32_t idx; uint32_t idx;
uint16_t bit_idx; uint16_t bit_idx;
uint32_t requested_buffs; uint32_t requested_buffs;
/* Check if provided memory pool exists */ /* Check if provided memory pool exists */
if (pool == NULL) if (pool == NULL)
return NULL; return NULL;
/* Acquire the pool lock */ /* Acquire the pool lock */
spinlock_obtain(&pool->spinlock); spinlock_obtain(&pool->spinlock);
/* Calculate number of buffers to be allocated from memory pool */ /* Calculate number of buffers to be allocated from memory pool */
requested_buffs = INT_DIV_ROUNDUP(num_bytes, pool->buff_size); requested_buffs = INT_DIV_ROUNDUP(num_bytes, pool->buff_size);
for (idx = 0U; idx < pool->bmp_size; idx++) { for (idx = 0U; idx < pool->bmp_size; idx++) {
/* Find the first occurrence of requested_buffs number of free /* Find the first occurrence of requested_buffs number of free
* buffers. The 0th bit in bitmap represents a free buffer. * buffers. The 0th bit in bitmap represents a free buffer.
*/ */
for (bit_idx = ffz64(pool->bitmap[idx]); for (bit_idx = ffz64(pool->bitmap[idx]);
bit_idx < BITMAP_WORD_SIZE; bit_idx++) { bit_idx < BITMAP_WORD_SIZE; bit_idx++) {
/* Check if selected buffer is free */ /* Check if selected buffer is free */
if ((pool->bitmap[idx] & (1U << bit_idx)) != 0U) if ((pool->bitmap[idx] & (1U << bit_idx)) != 0U)
continue; continue;
/* Declare temporary variables to be used locally in /* Declare temporary variables to be used locally in
* this block * this block
*/ */
uint32_t i; uint32_t i;
uint16_t tmp_bit_idx = bit_idx; uint16_t tmp_bit_idx = bit_idx;
uint32_t tmp_idx = idx; uint32_t tmp_idx = idx;
/* Check requested_buffs number of buffers availability /* Check requested_buffs number of buffers availability
* in memory-pool right after selected buffer * in memory-pool right after selected buffer
*/ */
for (i = 1; i < requested_buffs; i++) { for (i = 1; i < requested_buffs; i++) {
/* Check if tmp_bit_idx is out-of-range */ /* Check if tmp_bit_idx is out-of-range */
tmp_bit_idx++; tmp_bit_idx++;
if (tmp_bit_idx == BITMAP_WORD_SIZE) { if (tmp_bit_idx == BITMAP_WORD_SIZE) {
/* Break the loop if tmp_idx is /* Break the loop if tmp_idx is
* out-of-range * out-of-range
*/ */
tmp_idx++; tmp_idx++;
if (tmp_idx == pool->bmp_size) if (tmp_idx == pool->bmp_size)
break; break;
/* Reset tmp_bit_idx */ /* Reset tmp_bit_idx */
tmp_bit_idx = 0U; tmp_bit_idx = 0U;
} }
/* Break if selected buffer is not free */ /* Break if selected buffer is not free */
if ((pool->bitmap[tmp_idx] & (1U << tmp_bit_idx)) != 0U) if ((pool->bitmap[tmp_idx]
break; & (1U << tmp_bit_idx)) != 0U)
} break;
}
/* Check if requested_buffs number of free contiguous /* Check if requested_buffs number of free contiguous
* buffers are found in memory pool * buffers are found in memory pool
*/ */
if (i == requested_buffs) { if (i == requested_buffs) {
/* Get start address of first buffer among /* Get start address of first buffer among
* selected free contiguous buffer in the * selected free contiguous buffer in the
* memory pool * memory pool
*/ */
memory = (char *)pool->start_addr + memory = (char *)pool->start_addr +
pool->buff_size * (idx * BITMAP_WORD_SIZE + pool->buff_size *
bit_idx); (idx * BITMAP_WORD_SIZE +
bit_idx);
/* Update allocation bitmaps information for /* Update allocation bitmaps information for
* selected buffers * selected buffers
*/ */
for (i = 0U; i < requested_buffs; i++) { for (i = 0U; i < requested_buffs; i++) {
/* Set allocation bit in bitmap for /* Set allocation bit in bitmap for
* this buffer * this buffer
*/ */
pool->bitmap[idx] |= (1U << bit_idx); pool->bitmap[idx] |= (1U << bit_idx);
/* Set contiguity information for this /* Set contiguity information for this
* buffer in contiguity-bitmap * buffer in contiguity-bitmap
*/ */
if (i < (requested_buffs - 1)) { if (i < (requested_buffs - 1)) {
/* Set contiguity bit to 1 if /* Set contiguity bit to 1 if
* this buffer is not the last * this buffer is not the last
* of selected contiguous * of selected contiguous
* buffers array * buffers array
*/ */
pool->contiguity_bitmap[idx] |= pool->contiguity_bitmap[idx] |=
(1U << bit_idx); (1U << bit_idx);
} else { } else {
/* Set contiguity bit to 0 if /* Set contiguity bit to 0 if
* this buffer is not the last * this buffer is not the last
* of selected contiguous * of selected contiguous
* buffers array * buffers array
*/ */
pool->contiguity_bitmap[idx] &= pool->contiguity_bitmap[idx] &=
~(1U << bit_idx); ~(1U << bit_idx);
} }
/* Check if bit_idx is out-of-range */ /* Check if bit_idx is out-of-range */
bit_idx++; bit_idx++;
if (bit_idx == BITMAP_WORD_SIZE) { if (bit_idx == BITMAP_WORD_SIZE) {
/* Increment idx */ /* Increment idx */
idx++; idx++;
/* Reset bit_idx */ /* Reset bit_idx */
bit_idx = 0U; bit_idx = 0U;
} }
} }
/* Release the pool lock. */ /* Release the pool lock. */
spinlock_release(&pool->spinlock); spinlock_release(&pool->spinlock);
return memory; return memory;
} }
/* Update bit_idx and idx */ /* Update bit_idx and idx */
bit_idx = tmp_bit_idx; bit_idx = tmp_bit_idx;
idx = tmp_idx; idx = tmp_idx;
} }
} }
/* Release the pool lock. */ /* Release the pool lock. */
spinlock_release(&pool->spinlock); spinlock_release(&pool->spinlock);
return (void *)NULL; return (void *)NULL;
} }
static void deallocate_mem(struct mem_pool *pool, void *ptr) static void deallocate_mem(struct mem_pool *pool, void *ptr)
{ {
uint32_t *bitmask, *contiguity_bitmask; uint32_t *bitmask, *contiguity_bitmask;
uint32_t bmp_idx, bit_idx, buff_idx; uint32_t bmp_idx, bit_idx, buff_idx;
if ((pool != NULL) && (ptr != NULL)) { if ((pool != NULL) && (ptr != NULL)) {
/* Acquire the pool lock */ /* Acquire the pool lock */
spinlock_obtain(&pool->spinlock); spinlock_obtain(&pool->spinlock);
/* Map the buffer address to its index. */ /* Map the buffer address to its index. */
buff_idx = ((char *)ptr - (char *)pool->start_addr) / buff_idx = ((char *)ptr - (char *)pool->start_addr) /
pool->buff_size; pool->buff_size;
/* De-allocate all allocated contiguous memory buffers */ /* De-allocate all allocated contiguous memory buffers */
while (buff_idx < pool->total_buffs) { while (buff_idx < pool->total_buffs) {
/* Translate the buffer index to bitmap index. */ /* Translate the buffer index to bitmap index. */
bmp_idx = buff_idx / BITMAP_WORD_SIZE; bmp_idx = buff_idx / BITMAP_WORD_SIZE;
bit_idx = buff_idx % BITMAP_WORD_SIZE; bit_idx = buff_idx % BITMAP_WORD_SIZE;
/* Get bitmap's reference for this buffer */ /* Get bitmap's reference for this buffer */
bitmask = &pool->bitmap[bmp_idx]; bitmask = &pool->bitmap[bmp_idx];
contiguity_bitmask = &pool->contiguity_bitmap[bmp_idx]; contiguity_bitmask = &pool->contiguity_bitmap[bmp_idx];
/* Mark the buffer as free */ /* Mark the buffer as free */
if ((*bitmask & (1U << bit_idx)) != 0U) if ((*bitmask & (1U << bit_idx)) != 0U)
*bitmask ^= (1U << bit_idx); *bitmask ^= (1U << bit_idx);
else else
break; break;
/* Reset the Contiguity bit of buffer */ /* Reset the Contiguity bit of buffer */
if ((*contiguity_bitmask & (1U << bit_idx)) != 0U) if ((*contiguity_bitmask & (1U << bit_idx)) != 0U)
*contiguity_bitmask ^= (1U << bit_idx); *contiguity_bitmask ^= (1U << bit_idx);
else else
break; break;
/* Increment buff_idx */ /* Increment buff_idx */
buff_idx++; buff_idx++;
} }
/* Release the pool lock. */ /* Release the pool lock. */
spinlock_release(&pool->spinlock); spinlock_release(&pool->spinlock);
} }
} }
void *malloc(unsigned int num_bytes) void *malloc(unsigned int num_bytes)
{ {
void *memory = NULL; void *memory = NULL;
/* Check if bytes requested extend page-size */ /* Check if bytes requested extend page-size */
if (num_bytes < CPU_PAGE_SIZE) { if (num_bytes < CPU_PAGE_SIZE) {
/* Request memory allocation from smaller segmented memory pool /*
*/ * Request memory allocation from smaller segmented memory pool
memory = allocate_mem(&Memory_Pool, num_bytes); */
} else { memory = allocate_mem(&Memory_Pool, num_bytes);
int page_num = } else {
(num_bytes + CPU_PAGE_SIZE - 1) >> CPU_PAGE_SHIFT; int page_num =
/* Request memory allocation through alloc_page */ (num_bytes + CPU_PAGE_SIZE - 1) >> CPU_PAGE_SHIFT;
memory = alloc_pages(page_num); /* Request memory allocation through alloc_page */
} memory = alloc_pages(page_num);
}
/* Check if memory allocation is successful */ /* Check if memory allocation is successful */
if (memory == NULL) if (memory == NULL)
pr_err("%s: failed to alloc 0x%x Bytes", __func__, num_bytes); pr_err("%s: failed to alloc 0x%x Bytes", __func__, num_bytes);
/* Return memory pointer to caller */ /* Return memory pointer to caller */
return memory; return memory;
} }
void *alloc_pages(unsigned int page_num) void *alloc_pages(unsigned int page_num)
{ {
void *memory = NULL; void *memory = NULL;
/* Request memory allocation from Page-aligned memory pool */ /* Request memory allocation from Page-aligned memory pool */
memory = allocate_mem(&Paging_Memory_Pool, page_num * CPU_PAGE_SIZE); memory = allocate_mem(&Paging_Memory_Pool, page_num * CPU_PAGE_SIZE);
/* Check if memory allocation is successful */ /* Check if memory allocation is successful */
if (memory == NULL) if (memory == NULL)
pr_err("%s: failed to alloc %d pages", __func__, page_num); pr_err("%s: failed to alloc %d pages", __func__, page_num);
return memory; return memory;
} }
void *alloc_page(void) void *alloc_page(void)
{ {
return alloc_pages(1); return alloc_pages(1);
} }
void *calloc(unsigned int num_elements, unsigned int element_size) void *calloc(unsigned int num_elements, unsigned int element_size)
{ {
void *memory = malloc(num_elements * element_size); void *memory = malloc(num_elements * element_size);
/* Determine if memory was allocated */ /* Determine if memory was allocated */
if (memory != NULL) { if (memory != NULL) {
/* Zero all the memory */ /* Zero all the memory */
(void)memset(memory, 0, num_elements * element_size); (void)memset(memory, 0, num_elements * element_size);
} }
/* Return pointer to memory */ /* Return pointer to memory */
return memory; return memory;
} }
void free(void *ptr) void free(void *ptr)
{ {
/* Check if ptr belongs to 16-Bytes aligned Memory Pool */ /* Check if ptr belongs to 16-Bytes aligned Memory Pool */
if ((Memory_Pool.start_addr < ptr) && if ((Memory_Pool.start_addr < ptr) &&
(ptr < (Memory_Pool.start_addr + (ptr < (Memory_Pool.start_addr +
(Memory_Pool.total_buffs * Memory_Pool.buff_size)))) { (Memory_Pool.total_buffs * Memory_Pool.buff_size)))) {
/* Free buffer in 16-Bytes aligned Memory Pool */ /* Free buffer in 16-Bytes aligned Memory Pool */
deallocate_mem(&Memory_Pool, ptr); deallocate_mem(&Memory_Pool, ptr);
} }
/* Check if ptr belongs to page aligned Memory Pool */ /* Check if ptr belongs to page aligned Memory Pool */
else if ((Paging_Memory_Pool.start_addr < ptr) && else if ((Paging_Memory_Pool.start_addr < ptr) &&
(ptr < (Paging_Memory_Pool.start_addr + (ptr < (Paging_Memory_Pool.start_addr +
(Paging_Memory_Pool.total_buffs * (Paging_Memory_Pool.total_buffs *
Paging_Memory_Pool.buff_size)))) { Paging_Memory_Pool.buff_size)))) {
/* Free buffer in page aligned Memory Pool */ /* Free buffer in page aligned Memory Pool */
deallocate_mem(&Paging_Memory_Pool, ptr); deallocate_mem(&Paging_Memory_Pool, ptr);
} }
} }
void *memchr(const void *void_s, int c, size_t n) void *memchr(const void *void_s, int c, size_t n)
{ {
unsigned char val = (unsigned char)c; unsigned char val = (unsigned char)c;
unsigned char *ptr = (unsigned char *)void_s; unsigned char *ptr = (unsigned char *)void_s;
unsigned char *end = ptr + n; unsigned char *end = ptr + n;
while (ptr < end) { while (ptr < end) {
if (*ptr == val)
if (*ptr == val) return ((void *)ptr);
return ((void *)ptr); ptr++;
ptr++; }
} return NULL;
return NULL;
} }
/*********************************************************************** /***********************************************************************
@ -344,24 +345,22 @@ void *memchr(const void *void_s, int c, size_t n)
void *memcpy_s(void *d, size_t dmax, const void *s, size_t slen) void *memcpy_s(void *d, size_t dmax, const void *s, size_t slen)
{ {
uint8_t *dest8; uint8_t *dest8;
uint8_t *src8; uint8_t *src8;
if (slen == 0U || dmax == 0U || dmax < slen) { if (slen == 0U || dmax == 0U || dmax < slen)
ASSERT(false); ASSERT(false);
}
if ((d > s && d <= s + slen - 1) if ((d > s && d <= s + slen - 1)
|| (d < s && s <= d + dmax - 1)) { || (d < s && s <= d + dmax - 1))
ASSERT(false); ASSERT(false);
}
/*same memory block, no need to copy*/ /*same memory block, no need to copy*/
if (d == s) if (d == s)
return d; return d;
dest8 = (uint8_t *)d; dest8 = (uint8_t *)d;
src8 = (uint8_t *)s; src8 = (uint8_t *)s;
/*small data block*/ /*small data block*/
if (slen < 8U) { if (slen < 8U) {
@ -372,8 +371,8 @@ void *memcpy_s(void *d, size_t dmax, const void *s, size_t slen)
slen--; slen--;
} }
return d; return d;
} }
/*make sure 8bytes-aligned for at least one addr.*/ /*make sure 8bytes-aligned for at least one addr.*/
if ((!MEM_ALIGNED_CHECK(src8, 8)) && (!MEM_ALIGNED_CHECK(dest8, 8))) { if ((!MEM_ALIGNED_CHECK(src8, 8)) && (!MEM_ALIGNED_CHECK(dest8, 8))) {
@ -384,17 +383,17 @@ void *memcpy_s(void *d, size_t dmax, const void *s, size_t slen)
} }
} }
/*copy main data blocks, with rep prefix*/ /*copy main data blocks, with rep prefix*/
if (slen > 8U) { if (slen > 8U) {
uint32_t ecx; uint32_t ecx;
asm volatile ("cld; rep; movsq" asm volatile ("cld; rep; movsq"
: "=&c"(ecx), "=&D"(dest8), "=&S"(src8) : "=&c"(ecx), "=&D"(dest8), "=&S"(src8)
: "0" (slen / 8), "1" (dest8), "2" (src8) : "0" (slen / 8), "1" (dest8), "2" (src8)
: "memory"); : "memory");
slen = slen % 8U; slen = slen % 8U;
} }
/*tail bytes*/ /*tail bytes*/
while (slen != 0U) { while (slen != 0U) {
@ -404,19 +403,19 @@ void *memcpy_s(void *d, size_t dmax, const void *s, size_t slen)
slen--; slen--;
} }
return d; return d;
} }
void *memset(void *base, uint8_t v, size_t n) void *memset(void *base, uint8_t v, size_t n)
{ {
uint8_t *dest_p; uint8_t *dest_p;
size_t n_q; size_t n_q;
size_t count; size_t count;
dest_p = (uint8_t *)base; dest_p = (uint8_t *)base;
if ((dest_p == NULL) || (n == 0U)) if ((dest_p == NULL) || (n == 0U))
return NULL; return NULL;
/*do the few bytes to get uint64_t alignment*/ /*do the few bytes to get uint64_t alignment*/
count = n; count = n;
@ -425,12 +424,12 @@ void *memset(void *base, uint8_t v, size_t n)
dest_p++; dest_p++;
} }
/*64-bit mode*/ /*64-bit mode*/
n_q = count >> 3U; n_q = count >> 3U;
asm volatile("cld ; rep ; stosq ; movl %3,%%ecx ; rep ; stosb" asm volatile("cld ; rep ; stosq ; movl %3,%%ecx ; rep ; stosb"
: "+c"(n_q), "+D"(dest_p) : "+c"(n_q), "+D"(dest_p)
: "a" (v * 0x0101010101010101U), : "a" (v * 0x0101010101010101U),
"r"((unsigned int)count & 7U)); "r"((unsigned int)count & 7U));
return (void *)dest_p; return (void *)dest_p;
} }