Add asan integration to arenas.

This commit is contained in:
Alexander Kuzmenkov 2019-06-24 16:24:34 +03:00
parent 47e1537b20
commit f6ee2ea4e4
2 changed files with 37 additions and 1 deletions

View File

@ -5,6 +5,7 @@
#include <vector>
#include <boost/noncopyable.hpp>
#include <common/likely.h>
#include <sanitizer/asan_interface.h>
#include <Core/Defines.h>
#include <Common/memcpySmall.h>
#include <Common/ProfileEvents.h>
@ -53,10 +54,18 @@ private:
pos = begin;
end = begin + size_ - pad_right;
prev = prev_;
ASAN_POISON_MEMORY_REGION(begin, size_);
}
~Chunk()
{
/// We must unpoison the memory before returning to the allocator,
/// because the allocator might not have asan integration, and the
/// memory would stay poisoned forever. If the allocator supports
/// asan, it will correctly poison the memory by itself.
ASAN_UNPOISON_MEMORY_REGION(begin, size());
Allocator<false>::free(begin, size());
if (prev)
@ -126,6 +135,7 @@ public:
char * res = head->pos;
head->pos += size;
ASAN_UNPOISON_MEMORY_REGION(res, size + pad_right);
return res;
}
@ -142,6 +152,7 @@ public:
{
head->pos = static_cast<char *>(head_pos);
head->pos += size;
ASAN_UNPOISON_MEMORY_REGION(res, size + pad_right);
return res;
}
@ -161,6 +172,7 @@ public:
void rollback(size_t size)
{
head->pos -= size;
ASAN_POISON_MEMORY_REGION(head->pos, size + pad_right);
}
/** Begin or expand allocation of contiguous piece of memory without alignment.
@ -187,6 +199,7 @@ public:
if (!begin)
begin = res;
ASAN_UNPOISON_MEMORY_REGION(res, size + pad_right);
return res;
}
@ -218,6 +231,8 @@ public:
if (!begin)
begin = res;
ASAN_UNPOISON_MEMORY_REGION(res, size + pad_right);
return res;
}
@ -226,7 +241,10 @@ public:
{
char * res = alloc(new_size);
if (old_data)
{
memcpy(res, old_data, old_size);
ASAN_POISON_MEMORY_REGION(old_data, old_size);
}
return res;
}
@ -234,7 +252,10 @@ public:
{
char * res = alignedAlloc(new_size, alignment);
if (old_data)
{
memcpy(res, old_data, old_size);
ASAN_POISON_MEMORY_REGION(old_data, old_size);
}
return res;
}

View File

@ -1,5 +1,6 @@
#pragma once
#include <sanitizer/asan_interface.h>
#include <Common/Arena.h>
#include <Common/BitHelpers.h>
@ -63,7 +64,13 @@ public:
/// If there is a free block.
if (auto & free_block_ptr = free_lists[list_idx])
{
/// Let's take it. And change the head of the list to the next item in the list.
/// Let's take it. And change the head of the list to the next
/// item in the list. We poisoned the free block before putting
/// it into the free list, so we have to unpoison it before
/// reading anything.
ASAN_UNPOISON_MEMORY_REGION(free_block_ptr,
std::max(size, sizeof(Block)));
const auto res = free_block_ptr->data;
free_block_ptr = free_block_ptr->next;
return res;
@ -86,6 +93,14 @@ public:
const auto old_head = free_block_ptr;
free_block_ptr = reinterpret_cast<Block *>(ptr);
free_block_ptr->next = old_head;
/// The requested size may be less than the size of the block, but
/// we still want to poison the entire block.
/// Strictly speaking, the free blocks must be unpoisoned in
/// destructor, to support an underlying allocator that doesn't
/// integrate with asan. We don't do that, and rely on the fact that
/// our underlying allocator is Arena, which does have asan integration.
ASAN_POISON_MEMORY_REGION(ptr, 1ULL << (list_idx + 1));
}
/// Size of the allocated pool in bytes