mirror of
https://github.com/apache/nuttx.git
synced 2025-01-13 02:48:37 +08:00
mm: The alignment length in mm is consistent with Kasan
preceding will cause the mm alignment to be inconsistent with the kasan alignment Signed-off-by: wangmingrong1 <wangmingrong1@xiaomi.com>
This commit is contained in:
parent
3e6649856b
commit
3f9dd51ec8
4 changed files with 34 additions and 18 deletions
|
@ -25,6 +25,7 @@
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
|
||||||
#include <nuttx/nuttx.h>
|
#include <nuttx/nuttx.h>
|
||||||
|
#include <nuttx/mm/mm.h>
|
||||||
#include <nuttx/mm/kasan.h>
|
#include <nuttx/mm/kasan.h>
|
||||||
#include <nuttx/compiler.h>
|
#include <nuttx/compiler.h>
|
||||||
#include <nuttx/spinlock.h>
|
#include <nuttx/spinlock.h>
|
||||||
|
@ -44,10 +45,8 @@
|
||||||
#define KASAN_LAST_WORD_MASK(end) \
|
#define KASAN_LAST_WORD_MASK(end) \
|
||||||
(UINTPTR_MAX >> (-(end) & (KASAN_BITS_PER_WORD - 1)))
|
(UINTPTR_MAX >> (-(end) & (KASAN_BITS_PER_WORD - 1)))
|
||||||
|
|
||||||
#define KASAN_SHADOW_SCALE (sizeof(uintptr_t))
|
|
||||||
|
|
||||||
#define KASAN_SHADOW_SIZE(size) \
|
#define KASAN_SHADOW_SIZE(size) \
|
||||||
(KASAN_BYTES_PER_WORD * ((size) / KASAN_SHADOW_SCALE / KASAN_BITS_PER_WORD))
|
(KASAN_BYTES_PER_WORD * ((size) / MM_ALIGN / KASAN_BITS_PER_WORD))
|
||||||
#define KASAN_REGION_SIZE(size) \
|
#define KASAN_REGION_SIZE(size) \
|
||||||
(sizeof(struct kasan_region_s) + KASAN_SHADOW_SIZE(size))
|
(sizeof(struct kasan_region_s) + KASAN_SHADOW_SIZE(size))
|
||||||
|
|
||||||
|
@ -87,7 +86,7 @@ kasan_mem_to_shadow(FAR const void *ptr, size_t size,
|
||||||
{
|
{
|
||||||
DEBUGASSERT(addr + size <= g_region[i]->end);
|
DEBUGASSERT(addr + size <= g_region[i]->end);
|
||||||
addr -= g_region[i]->begin;
|
addr -= g_region[i]->begin;
|
||||||
addr /= KASAN_SHADOW_SCALE;
|
addr /= MM_ALIGN;
|
||||||
*bit = addr % KASAN_BITS_PER_WORD;
|
*bit = addr % KASAN_BITS_PER_WORD;
|
||||||
return &g_region[i]->shadow[addr / KASAN_BITS_PER_WORD];
|
return &g_region[i]->shadow[addr / KASAN_BITS_PER_WORD];
|
||||||
}
|
}
|
||||||
|
@ -110,15 +109,15 @@ kasan_is_poisoned(FAR const void *addr, size_t size)
|
||||||
return kasan_global_is_poisoned(addr, size);
|
return kasan_global_is_poisoned(addr, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (size <= KASAN_SHADOW_SCALE)
|
if (size <= MM_ALIGN)
|
||||||
{
|
{
|
||||||
return ((*p >> bit) & 1);
|
return ((*p >> bit) & 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
nbit = KASAN_BITS_PER_WORD - bit % KASAN_BITS_PER_WORD;
|
nbit = KASAN_BITS_PER_WORD - bit % KASAN_BITS_PER_WORD;
|
||||||
mask = KASAN_FIRST_WORD_MASK(bit);
|
mask = KASAN_FIRST_WORD_MASK(bit);
|
||||||
size = ALIGN_UP(size, KASAN_SHADOW_SCALE);
|
size = ALIGN_UP(size, MM_ALIGN);
|
||||||
size /= KASAN_SHADOW_SCALE;
|
size /= MM_ALIGN;
|
||||||
|
|
||||||
while (size >= nbit)
|
while (size >= nbit)
|
||||||
{
|
{
|
||||||
|
@ -155,6 +154,9 @@ static void kasan_set_poison(FAR const void *addr, size_t size,
|
||||||
unsigned int nbit;
|
unsigned int nbit;
|
||||||
uintptr_t mask;
|
uintptr_t mask;
|
||||||
|
|
||||||
|
DEBUGASSERT((uintptr_t)addr % MM_ALIGN == 0);
|
||||||
|
DEBUGASSERT(size % MM_ALIGN == 0);
|
||||||
|
|
||||||
p = kasan_mem_to_shadow(addr, size, &bit);
|
p = kasan_mem_to_shadow(addr, size, &bit);
|
||||||
if (p == NULL)
|
if (p == NULL)
|
||||||
{
|
{
|
||||||
|
@ -163,7 +165,7 @@ static void kasan_set_poison(FAR const void *addr, size_t size,
|
||||||
|
|
||||||
nbit = KASAN_BITS_PER_WORD - bit % KASAN_BITS_PER_WORD;
|
nbit = KASAN_BITS_PER_WORD - bit % KASAN_BITS_PER_WORD;
|
||||||
mask = KASAN_FIRST_WORD_MASK(bit);
|
mask = KASAN_FIRST_WORD_MASK(bit);
|
||||||
size /= KASAN_SHADOW_SCALE;
|
size /= MM_ALIGN;
|
||||||
|
|
||||||
flags = spin_lock_irqsave(&g_lock);
|
flags = spin_lock_irqsave(&g_lock);
|
||||||
while (size >= nbit)
|
while (size >= nbit)
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
* Included Files
|
* Included Files
|
||||||
****************************************************************************/
|
****************************************************************************/
|
||||||
|
|
||||||
|
#include <nuttx/mm/mm.h>
|
||||||
#include <nuttx/mm/kasan.h>
|
#include <nuttx/mm/kasan.h>
|
||||||
#include <nuttx/compiler.h>
|
#include <nuttx/compiler.h>
|
||||||
#include <nuttx/spinlock.h>
|
#include <nuttx/spinlock.h>
|
||||||
|
@ -45,10 +46,8 @@
|
||||||
|
|
||||||
#define kasan_random_tag() (1 + rand() % ((1 << (64 - KASAN_TAG_SHIFT)) - 2))
|
#define kasan_random_tag() (1 + rand() % ((1 << (64 - KASAN_TAG_SHIFT)) - 2))
|
||||||
|
|
||||||
#define KASAN_SHADOW_SCALE (sizeof(uintptr_t))
|
|
||||||
|
|
||||||
#define KASAN_SHADOW_SIZE(size) \
|
#define KASAN_SHADOW_SIZE(size) \
|
||||||
((size) + KASAN_SHADOW_SCALE - 1) / KASAN_SHADOW_SCALE
|
((size) + MM_ALIGN - 1) / MM_ALIGN
|
||||||
#define KASAN_REGION_SIZE(size) \
|
#define KASAN_REGION_SIZE(size) \
|
||||||
(sizeof(struct kasan_region_s) + KASAN_SHADOW_SIZE(size))
|
(sizeof(struct kasan_region_s) + KASAN_SHADOW_SIZE(size))
|
||||||
|
|
||||||
|
@ -89,7 +88,7 @@ kasan_mem_to_shadow(FAR const void *ptr, size_t size)
|
||||||
{
|
{
|
||||||
DEBUGASSERT(addr + size <= g_region[i]->end);
|
DEBUGASSERT(addr + size <= g_region[i]->end);
|
||||||
addr -= g_region[i]->begin;
|
addr -= g_region[i]->begin;
|
||||||
return &g_region[i]->shadow[addr / KASAN_SHADOW_SCALE];
|
return &g_region[i]->shadow[addr / MM_ALIGN];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,6 +134,9 @@ static void kasan_set_poison(FAR const void *addr,
|
||||||
irqstate_t flags;
|
irqstate_t flags;
|
||||||
FAR uint8_t *p;
|
FAR uint8_t *p;
|
||||||
|
|
||||||
|
DEBUGASSERT((uintptr_t)addr % MM_ALIGN == 0);
|
||||||
|
DEBUGASSERT(size % MM_ALIGN == 0);
|
||||||
|
|
||||||
p = kasan_mem_to_shadow(addr, size);
|
p = kasan_mem_to_shadow(addr, size);
|
||||||
if (p == NULL)
|
if (p == NULL)
|
||||||
{
|
{
|
||||||
|
|
|
@ -143,7 +143,7 @@
|
||||||
* previous freenode
|
* previous freenode
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#define MM_ALLOCNODE_OVERHEAD (MM_SIZEOF_ALLOCNODE - sizeof(mmsize_t))
|
#define MM_ALLOCNODE_OVERHEAD (MM_SIZEOF_ALLOCNODE - MM_ALIGN)
|
||||||
|
|
||||||
/* Get the node size */
|
/* Get the node size */
|
||||||
|
|
||||||
|
@ -173,7 +173,12 @@ typedef size_t mmsize_t;
|
||||||
|
|
||||||
struct mm_allocnode_s
|
struct mm_allocnode_s
|
||||||
{
|
{
|
||||||
mmsize_t preceding; /* Physical preceding chunk size */
|
union
|
||||||
|
{
|
||||||
|
mmsize_t preceding; /* Physical preceding chunk size */
|
||||||
|
uint8_t align[MM_ALIGN];
|
||||||
|
};
|
||||||
|
|
||||||
mmsize_t size; /* Size of this chunk */
|
mmsize_t size; /* Size of this chunk */
|
||||||
#if CONFIG_MM_BACKTRACE >= 0
|
#if CONFIG_MM_BACKTRACE >= 0
|
||||||
pid_t pid; /* The pid for caller */
|
pid_t pid; /* The pid for caller */
|
||||||
|
@ -182,13 +187,19 @@ struct mm_allocnode_s
|
||||||
FAR void *backtrace[CONFIG_MM_BACKTRACE]; /* The backtrace buffer for caller */
|
FAR void *backtrace[CONFIG_MM_BACKTRACE]; /* The backtrace buffer for caller */
|
||||||
# endif
|
# endif
|
||||||
#endif
|
#endif
|
||||||
};
|
}
|
||||||
|
aligned_data(MM_ALIGN);
|
||||||
|
|
||||||
/* This describes a free chunk */
|
/* This describes a free chunk */
|
||||||
|
|
||||||
struct mm_freenode_s
|
struct mm_freenode_s
|
||||||
{
|
{
|
||||||
mmsize_t preceding; /* Physical preceding chunk size */
|
union
|
||||||
|
{
|
||||||
|
mmsize_t preceding; /* Physical preceding chunk size */
|
||||||
|
uint8_t align[MM_ALIGN];
|
||||||
|
};
|
||||||
|
|
||||||
mmsize_t size; /* Size of this chunk */
|
mmsize_t size; /* Size of this chunk */
|
||||||
#if CONFIG_MM_BACKTRACE >= 0
|
#if CONFIG_MM_BACKTRACE >= 0
|
||||||
pid_t pid; /* The pid for caller */
|
pid_t pid; /* The pid for caller */
|
||||||
|
@ -199,7 +210,8 @@ struct mm_freenode_s
|
||||||
#endif
|
#endif
|
||||||
FAR struct mm_freenode_s *flink; /* Supports a doubly linked list */
|
FAR struct mm_freenode_s *flink; /* Supports a doubly linked list */
|
||||||
FAR struct mm_freenode_s *blink;
|
FAR struct mm_freenode_s *blink;
|
||||||
};
|
}
|
||||||
|
aligned_data(MM_ALIGN);
|
||||||
|
|
||||||
static_assert(MM_SIZEOF_ALLOCNODE <= MM_MIN_CHUNK,
|
static_assert(MM_SIZEOF_ALLOCNODE <= MM_MIN_CHUNK,
|
||||||
"Error size for struct mm_allocnode_s\n");
|
"Error size for struct mm_allocnode_s\n");
|
||||||
|
|
|
@ -150,7 +150,7 @@ void mm_addregion(FAR struct mm_heap_s *heap, FAR void *heapstart,
|
||||||
|
|
||||||
heapbase = MM_ALIGN_UP((uintptr_t)heapstart + 2 * MM_SIZEOF_ALLOCNODE) -
|
heapbase = MM_ALIGN_UP((uintptr_t)heapstart + 2 * MM_SIZEOF_ALLOCNODE) -
|
||||||
2 * MM_SIZEOF_ALLOCNODE;
|
2 * MM_SIZEOF_ALLOCNODE;
|
||||||
heapsize = heapsize - (heapbase - (uintptr_t)heapstart);
|
heapsize = MM_ALIGN_DOWN(heapsize - (heapbase - (uintptr_t)heapstart));
|
||||||
|
|
||||||
/* Register KASan for access rights check. We need to register after
|
/* Register KASan for access rights check. We need to register after
|
||||||
* address alignment.
|
* address alignment.
|
||||||
|
|
Loading…
Reference in a new issue