summaryrefslogtreecommitdiff
path: root/libsanitizer/asan/asan_allocator2.cc
diff options
context:
space:
mode:
Diffstat (limited to 'libsanitizer/asan/asan_allocator2.cc')
-rw-r--r--libsanitizer/asan/asan_allocator2.cc57
1 files changed, 29 insertions, 28 deletions
diff --git a/libsanitizer/asan/asan_allocator2.cc b/libsanitizer/asan/asan_allocator2.cc
index 78c1ec113a6..33d9fea70cb 100644
--- a/libsanitizer/asan/asan_allocator2.cc
+++ b/libsanitizer/asan/asan_allocator2.cc
@@ -180,20 +180,19 @@ uptr AsanChunkView::UsedSize() { return chunk_->UsedSize(); }
uptr AsanChunkView::AllocTid() { return chunk_->alloc_tid; }
uptr AsanChunkView::FreeTid() { return chunk_->free_tid; }
-static void GetStackTraceFromId(u32 id, StackTrace *stack) {
+static StackTrace GetStackTraceFromId(u32 id) {
CHECK(id);
- uptr size = 0;
- const uptr *trace = StackDepotGet(id, &size);
- CHECK(trace);
- stack->CopyFrom(trace, size);
+ StackTrace res = StackDepotGet(id);
+ CHECK(res.trace);
+ return res;
}
-void AsanChunkView::GetAllocStack(StackTrace *stack) {
- GetStackTraceFromId(chunk_->alloc_context_id, stack);
+StackTrace AsanChunkView::GetAllocStack() {
+ return GetStackTraceFromId(chunk_->alloc_context_id);
}
-void AsanChunkView::GetFreeStack(StackTrace *stack) {
- GetStackTraceFromId(chunk_->free_context_id, stack);
+StackTrace AsanChunkView::GetFreeStack() {
+ return GetStackTraceFromId(chunk_->free_context_id);
}
struct QuarantineCallback;
@@ -261,7 +260,7 @@ void ReInitializeAllocator() {
quarantine.Init((uptr)flags()->quarantine_size, kMaxThreadLocalQuarantine);
}
-static void *Allocate(uptr size, uptr alignment, StackTrace *stack,
+static void *Allocate(uptr size, uptr alignment, BufferedStackTrace *stack,
AllocType alloc_type, bool can_fill) {
if (UNLIKELY(!asan_inited))
AsanInitFromRtl();
@@ -353,7 +352,7 @@ static void *Allocate(uptr size, uptr alignment, StackTrace *stack,
meta[1] = chunk_beg;
}
- m->alloc_context_id = StackDepotPut(stack->trace, stack->size);
+ m->alloc_context_id = StackDepotPut(*stack);
uptr size_rounded_down_to_granularity = RoundDownTo(size, SHADOW_GRANULARITY);
// Unpoison the bulk of the memory region.
@@ -389,15 +388,16 @@ static void *Allocate(uptr size, uptr alignment, StackTrace *stack,
return res;
}
-static void ReportInvalidFree(void *ptr, u8 chunk_state, StackTrace *stack) {
+static void ReportInvalidFree(void *ptr, u8 chunk_state,
+ BufferedStackTrace *stack) {
if (chunk_state == CHUNK_QUARANTINE)
ReportDoubleFree((uptr)ptr, stack);
else
ReportFreeNotMalloced((uptr)ptr, stack);
}
-static void AtomicallySetQuarantineFlag(AsanChunk *m,
- void *ptr, StackTrace *stack) {
+static void AtomicallySetQuarantineFlag(AsanChunk *m, void *ptr,
+ BufferedStackTrace *stack) {
u8 old_chunk_state = CHUNK_ALLOCATED;
// Flip the chunk_state atomically to avoid race on double-free.
if (!atomic_compare_exchange_strong((atomic_uint8_t*)m, &old_chunk_state,
@@ -408,8 +408,8 @@ static void AtomicallySetQuarantineFlag(AsanChunk *m,
// Expects the chunk to already be marked as quarantined by using
// AtomicallySetQuarantineFlag.
-static void QuarantineChunk(AsanChunk *m, void *ptr,
- StackTrace *stack, AllocType alloc_type) {
+static void QuarantineChunk(AsanChunk *m, void *ptr, BufferedStackTrace *stack,
+ AllocType alloc_type) {
CHECK_EQ(m->chunk_state, CHUNK_QUARANTINE);
if (m->alloc_type != alloc_type && flags()->alloc_dealloc_mismatch)
@@ -421,7 +421,7 @@ static void QuarantineChunk(AsanChunk *m, void *ptr,
CHECK_EQ(m->free_tid, kInvalidTid);
AsanThread *t = GetCurrentThread();
m->free_tid = t ? t->tid() : 0;
- m->free_context_id = StackDepotPut(stack->trace, stack->size);
+ m->free_context_id = StackDepotPut(*stack);
// Poison the region.
PoisonShadow(m->Beg(),
RoundUpTo(m->UsedSize(), SHADOW_GRANULARITY),
@@ -445,7 +445,7 @@ static void QuarantineChunk(AsanChunk *m, void *ptr,
}
}
-static void Deallocate(void *ptr, uptr delete_size, StackTrace *stack,
+static void Deallocate(void *ptr, uptr delete_size, BufferedStackTrace *stack,
AllocType alloc_type) {
uptr p = reinterpret_cast<uptr>(ptr);
if (p == 0) return;
@@ -462,7 +462,8 @@ static void Deallocate(void *ptr, uptr delete_size, StackTrace *stack,
QuarantineChunk(m, ptr, stack, alloc_type);
}
-static void *Reallocate(void *old_ptr, uptr new_size, StackTrace *stack) {
+static void *Reallocate(void *old_ptr, uptr new_size,
+ BufferedStackTrace *stack) {
CHECK(old_ptr && new_size);
uptr p = reinterpret_cast<uptr>(old_ptr);
uptr chunk_beg = p - kChunkHeaderSize;
@@ -575,25 +576,25 @@ void PrintInternalAllocatorStats() {
allocator.PrintStats();
}
-void *asan_memalign(uptr alignment, uptr size, StackTrace *stack,
+void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack,
AllocType alloc_type) {
return Allocate(size, alignment, stack, alloc_type, true);
}
-void asan_free(void *ptr, StackTrace *stack, AllocType alloc_type) {
+void asan_free(void *ptr, BufferedStackTrace *stack, AllocType alloc_type) {
Deallocate(ptr, 0, stack, alloc_type);
}
-void asan_sized_free(void *ptr, uptr size, StackTrace *stack,
+void asan_sized_free(void *ptr, uptr size, BufferedStackTrace *stack,
AllocType alloc_type) {
Deallocate(ptr, size, stack, alloc_type);
}
-void *asan_malloc(uptr size, StackTrace *stack) {
+void *asan_malloc(uptr size, BufferedStackTrace *stack) {
return Allocate(size, 8, stack, FROM_MALLOC, true);
}
-void *asan_calloc(uptr nmemb, uptr size, StackTrace *stack) {
+void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
if (CallocShouldReturnNullDueToOverflow(size, nmemb))
return AllocatorReturnNull();
void *ptr = Allocate(nmemb * size, 8, stack, FROM_MALLOC, false);
@@ -604,7 +605,7 @@ void *asan_calloc(uptr nmemb, uptr size, StackTrace *stack) {
return ptr;
}
-void *asan_realloc(void *p, uptr size, StackTrace *stack) {
+void *asan_realloc(void *p, uptr size, BufferedStackTrace *stack) {
if (p == 0)
return Allocate(size, 8, stack, FROM_MALLOC, true);
if (size == 0) {
@@ -614,11 +615,11 @@ void *asan_realloc(void *p, uptr size, StackTrace *stack) {
return Reallocate(p, size, stack);
}
-void *asan_valloc(uptr size, StackTrace *stack) {
+void *asan_valloc(uptr size, BufferedStackTrace *stack) {
return Allocate(size, GetPageSizeCached(), stack, FROM_MALLOC, true);
}
-void *asan_pvalloc(uptr size, StackTrace *stack) {
+void *asan_pvalloc(uptr size, BufferedStackTrace *stack) {
uptr PageSize = GetPageSizeCached();
size = RoundUpTo(size, PageSize);
if (size == 0) {
@@ -629,7 +630,7 @@ void *asan_pvalloc(uptr size, StackTrace *stack) {
}
int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
- StackTrace *stack) {
+ BufferedStackTrace *stack) {
void *ptr = Allocate(size, alignment, stack, FROM_MALLOC, true);
CHECK(IsAligned((uptr)ptr, alignment));
*memptr = ptr;