2012-05-31 04:49:55 +00:00
|
|
|
#pragma once
|
|
|
|
|
|
|
|
#include <string.h>
|
2013-02-08 19:34:44 +00:00
|
|
|
#include <memory>
|
2013-02-09 00:12:04 +00:00
|
|
|
#include <vector>
|
2015-12-27 10:58:20 +00:00
|
|
|
#include <boost/noncopyable.hpp>
|
2020-06-14 22:29:22 +00:00
|
|
|
#include <Core/Defines.h>
|
2020-06-14 04:52:28 +00:00
|
|
|
#if __has_include(<sanitizer/asan_interface.h>) && defined(ADDRESS_SANITIZER)
|
2019-08-04 00:19:03 +00:00
|
|
|
# include <sanitizer/asan_interface.h>
|
|
|
|
#endif
|
2018-09-02 21:28:25 +00:00
|
|
|
#include <Common/memcpySmall.h>
|
2017-04-01 09:19:00 +00:00
|
|
|
#include <Common/ProfileEvents.h>
|
|
|
|
#include <Common/Allocator.h>
|
2012-05-31 04:49:55 +00:00
|
|
|
|
|
|
|
|
2016-10-24 02:02:37 +00:00
|
|
|
namespace ProfileEvents
|
|
|
|
{
|
2020-06-11 23:00:49 +00:00
|
|
|
extern const Event ArenaAllocChunks;
|
2017-04-01 07:20:54 +00:00
|
|
|
extern const Event ArenaAllocBytes;
|
2016-10-24 02:02:37 +00:00
|
|
|
}
|
|
|
|
|
2012-05-31 04:49:55 +00:00
|
|
|
namespace DB
|
|
|
|
{
|
|
|
|
|
|
|
|
|
2016-10-24 02:02:37 +00:00
|
|
|
/** Memory pool to append something. For example, short strings.
|
|
|
|
* Usage scenario:
|
|
|
|
* - put lot of strings inside pool, keep their addresses;
|
|
|
|
* - addresses remain valid during lifetime of pool;
|
|
|
|
* - at destruction of pool, all memory is freed;
|
2020-06-11 00:36:57 +00:00
|
|
|
* - memory is allocated and freed by large MemoryChunks;
|
2016-10-24 02:02:37 +00:00
|
|
|
* - freeing parts of data is not possible (but look at ArenaWithFreeLists if you need);
|
2012-05-31 04:49:55 +00:00
|
|
|
*/
|
2015-12-27 10:58:20 +00:00
|
|
|
class Arena : private boost::noncopyable
|
2012-05-31 04:49:55 +00:00
|
|
|
{
|
|
|
|
private:
|
2018-09-02 21:28:25 +00:00
|
|
|
/// Padding allows to use 'memcpySmallAllowReadWriteOverflow15' instead of 'memcpy'.
|
|
|
|
static constexpr size_t pad_right = 15;
|
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
/// Contiguous MemoryChunk of memory and pointer to free space inside it. Member of single-linked list.
|
|
|
|
struct alignas(16) MemoryChunk : private Allocator<false> /// empty base optimization
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
|
|
|
char * begin;
|
|
|
|
char * pos;
|
2018-09-02 21:28:25 +00:00
|
|
|
char * end; /// does not include padding.
|
2017-04-01 07:20:54 +00:00
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
MemoryChunk * prev;
|
2017-04-01 07:20:54 +00:00
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
MemoryChunk(size_t size_, MemoryChunk * prev_)
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
2020-06-11 23:00:49 +00:00
|
|
|
ProfileEvents::increment(ProfileEvents::ArenaAllocChunks);
|
2017-04-01 07:20:54 +00:00
|
|
|
ProfileEvents::increment(ProfileEvents::ArenaAllocBytes, size_);
|
|
|
|
|
2019-04-06 15:27:39 +00:00
|
|
|
begin = reinterpret_cast<char *>(Allocator<false>::alloc(size_));
|
2017-04-01 07:20:54 +00:00
|
|
|
pos = begin;
|
2018-09-02 21:28:25 +00:00
|
|
|
end = begin + size_ - pad_right;
|
2017-04-01 07:20:54 +00:00
|
|
|
prev = prev_;
|
2019-06-24 13:24:34 +00:00
|
|
|
|
|
|
|
ASAN_POISON_MEMORY_REGION(begin, size_);
|
2017-04-01 07:20:54 +00:00
|
|
|
}
|
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
~MemoryChunk()
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
2019-06-24 13:24:34 +00:00
|
|
|
/// We must unpoison the memory before returning to the allocator,
|
|
|
|
/// because the allocator might not have asan integration, and the
|
|
|
|
/// memory would stay poisoned forever. If the allocator supports
|
|
|
|
/// asan, it will correctly poison the memory by itself.
|
|
|
|
ASAN_UNPOISON_MEMORY_REGION(begin, size());
|
|
|
|
|
2019-04-06 15:27:39 +00:00
|
|
|
Allocator<false>::free(begin, size());
|
2017-04-01 07:20:54 +00:00
|
|
|
|
|
|
|
if (prev)
|
|
|
|
delete prev;
|
|
|
|
}
|
|
|
|
|
2018-09-02 21:28:25 +00:00
|
|
|
size_t size() const { return end + pad_right - begin; }
|
2017-12-06 03:10:32 +00:00
|
|
|
size_t remaining() const { return end - pos; }
|
2017-04-01 07:20:54 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
size_t growth_factor;
|
|
|
|
size_t linear_growth_threshold;
|
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
/// Last contiguous MemoryChunk of memory.
|
|
|
|
MemoryChunk * head;
|
2017-04-01 07:20:54 +00:00
|
|
|
size_t size_in_bytes;
|
2020-10-30 21:24:16 +00:00
|
|
|
size_t page_size;
|
2017-04-01 07:20:54 +00:00
|
|
|
|
2020-10-30 21:24:16 +00:00
|
|
|
static size_t roundUpToPageSize(size_t s, size_t page_size)
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
2020-10-30 21:24:16 +00:00
|
|
|
return (s + page_size - 1) / page_size * page_size;
|
2017-04-01 07:20:54 +00:00
|
|
|
}
|
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
/// If MemoryChunks size is less than 'linear_growth_threshold', then use exponential growth, otherwise - linear growth
|
2017-04-01 07:20:54 +00:00
|
|
|
/// (to not allocate too much excessive memory).
|
|
|
|
size_t nextSize(size_t min_next_size) const
|
|
|
|
{
|
|
|
|
size_t size_after_grow = 0;
|
|
|
|
|
|
|
|
if (head->size() < linear_growth_threshold)
|
2019-09-16 18:19:10 +00:00
|
|
|
{
|
|
|
|
size_after_grow = std::max(min_next_size, head->size() * growth_factor);
|
|
|
|
}
|
2017-04-01 07:20:54 +00:00
|
|
|
else
|
2019-09-16 18:19:10 +00:00
|
|
|
{
|
|
|
|
// allocContinue() combined with linear growth results in quadratic
|
|
|
|
// behavior: we append the data by small amounts, and when it
|
2020-06-11 00:36:57 +00:00
|
|
|
// doesn't fit, we create a new MemoryChunk and copy all the previous data
|
2019-09-16 18:19:10 +00:00
|
|
|
// into it. The number of times we do this is directly proportional
|
|
|
|
// to the total size of data that is going to be serialized. To make
|
|
|
|
// the copying happen less often, round the next size up to the
|
|
|
|
// linear_growth_threshold.
|
|
|
|
size_after_grow = ((min_next_size + linear_growth_threshold - 1)
|
|
|
|
/ linear_growth_threshold) * linear_growth_threshold;
|
|
|
|
}
|
2017-04-01 07:20:54 +00:00
|
|
|
|
2019-09-16 18:19:10 +00:00
|
|
|
assert(size_after_grow >= min_next_size);
|
2020-10-30 21:24:16 +00:00
|
|
|
return roundUpToPageSize(size_after_grow, page_size);
|
2017-04-01 07:20:54 +00:00
|
|
|
}
|
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
/// Add next contiguous MemoryChunk of memory with size not less than specified.
|
|
|
|
void NO_INLINE addMemoryChunk(size_t min_size)
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
2020-06-11 00:36:57 +00:00
|
|
|
head = new MemoryChunk(nextSize(min_size + pad_right), head);
|
2017-04-01 07:20:54 +00:00
|
|
|
size_in_bytes += head->size();
|
|
|
|
}
|
2012-05-31 04:49:55 +00:00
|
|
|
|
2017-06-26 12:16:29 +00:00
|
|
|
friend class ArenaAllocator;
|
2018-09-02 05:23:20 +00:00
|
|
|
template <size_t> friend class AlignedArenaAllocator;
|
2017-06-26 12:16:29 +00:00
|
|
|
|
2012-05-31 04:49:55 +00:00
|
|
|
public:
|
2021-04-08 21:22:18 +00:00
|
|
|
explicit Arena(size_t initial_size_ = 4096, size_t growth_factor_ = 2, size_t linear_growth_threshold_ = 128 * 1024 * 1024)
|
2017-04-01 07:20:54 +00:00
|
|
|
: growth_factor(growth_factor_), linear_growth_threshold(linear_growth_threshold_),
|
2020-12-17 23:31:01 +00:00
|
|
|
head(new MemoryChunk(initial_size_, nullptr)), size_in_bytes(head->size()),
|
2020-10-30 21:24:16 +00:00
|
|
|
page_size(static_cast<size_t>(::getPageSize()))
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
~Arena()
|
|
|
|
{
|
|
|
|
delete head;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Get piece of memory, without alignment.
|
|
|
|
char * alloc(size_t size)
|
|
|
|
{
|
|
|
|
if (unlikely(head->pos + size > head->end))
|
2020-06-11 00:36:57 +00:00
|
|
|
addMemoryChunk(size);
|
2017-04-01 07:20:54 +00:00
|
|
|
|
|
|
|
char * res = head->pos;
|
|
|
|
head->pos += size;
|
2019-06-24 13:24:34 +00:00
|
|
|
ASAN_UNPOISON_MEMORY_REGION(res, size + pad_right);
|
2017-04-01 07:20:54 +00:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2020-04-18 09:51:21 +00:00
|
|
|
/// Get piece of memory with alignment
|
2018-09-01 03:17:43 +00:00
|
|
|
char * alignedAlloc(size_t size, size_t alignment)
|
2018-08-05 08:45:15 +00:00
|
|
|
{
|
2018-09-01 03:17:43 +00:00
|
|
|
do
|
2018-08-05 08:45:15 +00:00
|
|
|
{
|
2018-09-01 03:17:43 +00:00
|
|
|
void * head_pos = head->pos;
|
|
|
|
size_t space = head->end - head->pos;
|
|
|
|
|
2021-04-08 21:22:18 +00:00
|
|
|
auto * res = static_cast<char *>(std::align(alignment, size, head_pos, space));
|
2018-09-01 03:17:43 +00:00
|
|
|
if (res)
|
|
|
|
{
|
|
|
|
head->pos = static_cast<char *>(head_pos);
|
|
|
|
head->pos += size;
|
2019-06-24 13:24:34 +00:00
|
|
|
ASAN_UNPOISON_MEMORY_REGION(res, size + pad_right);
|
2018-09-01 03:17:43 +00:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
addMemoryChunk(size + alignment);
|
2018-09-01 03:17:43 +00:00
|
|
|
} while (true);
|
2018-08-05 08:45:15 +00:00
|
|
|
}
|
|
|
|
|
2018-12-25 23:27:52 +00:00
|
|
|
template <typename T>
|
|
|
|
T * alloc()
|
|
|
|
{
|
|
|
|
return reinterpret_cast<T *>(alignedAlloc(sizeof(T), alignof(T)));
|
|
|
|
}
|
|
|
|
|
2017-04-01 07:20:54 +00:00
|
|
|
/** Rollback just performed allocation.
|
|
|
|
* Must pass size not more that was just allocated.
|
2020-04-05 23:57:24 +00:00
|
|
|
* Return the resulting head pointer, so that the caller can assert that
|
|
|
|
* the allocation it intended to roll back was indeed the last one.
|
2017-04-01 07:20:54 +00:00
|
|
|
*/
|
2019-07-31 15:44:03 +00:00
|
|
|
void * rollback(size_t size)
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
|
|
|
head->pos -= size;
|
2019-06-24 13:24:34 +00:00
|
|
|
ASAN_POISON_MEMORY_REGION(head->pos, size + pad_right);
|
2019-07-31 15:44:03 +00:00
|
|
|
return head->pos;
|
2017-04-01 07:20:54 +00:00
|
|
|
}
|
|
|
|
|
2019-09-16 18:19:10 +00:00
|
|
|
/** Begin or expand a contiguous range of memory.
|
|
|
|
* 'range_start' is the start of range. If nullptr, a new range is
|
|
|
|
* allocated.
|
2020-06-11 00:36:57 +00:00
|
|
|
* If there is no space in the current MemoryChunk to expand the range,
|
|
|
|
* the entire range is copied to a new, bigger memory MemoryChunk, and the value
|
2019-09-16 18:19:10 +00:00
|
|
|
* of 'range_start' is updated.
|
|
|
|
* If the optional 'start_alignment' is specified, the start of range is
|
|
|
|
* kept aligned to this value.
|
|
|
|
*
|
|
|
|
* NOTE This method is usable only for the last allocation made on this
|
|
|
|
* Arena. For earlier allocations, see 'realloc' method.
|
2017-04-01 07:20:54 +00:00
|
|
|
*/
|
2019-09-16 18:19:10 +00:00
|
|
|
char * allocContinue(size_t additional_bytes, char const *& range_start,
|
|
|
|
size_t start_alignment = 0)
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
2020-04-23 23:25:30 +00:00
|
|
|
/*
|
|
|
|
* Allocating zero bytes doesn't make much sense. Also, a zero-sized
|
|
|
|
* range might break the invariant that the range begins at least before
|
2020-06-11 00:36:57 +00:00
|
|
|
* the current MemoryChunk end.
|
2020-04-23 23:25:30 +00:00
|
|
|
*/
|
|
|
|
assert(additional_bytes > 0);
|
|
|
|
|
2019-09-16 18:19:10 +00:00
|
|
|
if (!range_start)
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
2019-09-16 18:19:10 +00:00
|
|
|
// Start a new memory range.
|
|
|
|
char * result = start_alignment
|
|
|
|
? alignedAlloc(additional_bytes, start_alignment)
|
|
|
|
: alloc(additional_bytes);
|
2017-04-01 07:20:54 +00:00
|
|
|
|
2019-09-16 18:19:10 +00:00
|
|
|
range_start = result;
|
|
|
|
return result;
|
2017-04-01 07:20:54 +00:00
|
|
|
}
|
|
|
|
|
2019-09-16 18:19:10 +00:00
|
|
|
// Extend an existing memory range with 'additional_bytes'.
|
2017-04-01 07:20:54 +00:00
|
|
|
|
2019-09-16 18:19:10 +00:00
|
|
|
// This method only works for extending the last allocation. For lack of
|
|
|
|
// original size, check a weaker condition: that 'begin' is at least in
|
2020-06-11 00:36:57 +00:00
|
|
|
// the current MemoryChunk.
|
2020-04-17 21:36:07 +00:00
|
|
|
assert(range_start >= head->begin);
|
|
|
|
assert(range_start < head->end);
|
2018-09-02 04:58:55 +00:00
|
|
|
|
2019-09-16 18:19:10 +00:00
|
|
|
if (head->pos + additional_bytes <= head->end)
|
2018-09-02 04:58:55 +00:00
|
|
|
{
|
2020-06-11 00:36:57 +00:00
|
|
|
// The new size fits into the last MemoryChunk, so just alloc the
|
2019-09-16 18:19:10 +00:00
|
|
|
// additional size. We can alloc without alignment here, because it
|
|
|
|
// only applies to the start of the range, and we don't change it.
|
|
|
|
return alloc(additional_bytes);
|
|
|
|
}
|
2019-06-24 13:24:34 +00:00
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
// New range doesn't fit into this MemoryChunk, will copy to a new one.
|
2019-09-16 18:19:10 +00:00
|
|
|
//
|
|
|
|
// Note: among other things, this method is used to provide a hack-ish
|
|
|
|
// implementation of realloc over Arenas in ArenaAllocators. It wastes a
|
|
|
|
// lot of memory -- quadratically so when we reach the linear allocation
|
|
|
|
// threshold. This deficiency is intentionally left as is, and should be
|
|
|
|
// solved not by complicating this method, but by rethinking the
|
|
|
|
// approach to memory management for aggregate function states, so that
|
|
|
|
// we can provide a proper realloc().
|
|
|
|
const size_t existing_bytes = head->pos - range_start;
|
|
|
|
const size_t new_bytes = existing_bytes + additional_bytes;
|
|
|
|
const char * old_range = range_start;
|
|
|
|
|
|
|
|
char * new_range = start_alignment
|
|
|
|
? alignedAlloc(new_bytes, start_alignment)
|
|
|
|
: alloc(new_bytes);
|
|
|
|
|
|
|
|
memcpy(new_range, old_range, existing_bytes);
|
|
|
|
|
|
|
|
range_start = new_range;
|
|
|
|
return new_range + existing_bytes;
|
2018-09-02 04:58:55 +00:00
|
|
|
}
|
|
|
|
|
2017-04-16 03:38:37 +00:00
|
|
|
/// NOTE Old memory region is wasted.
|
|
|
|
char * realloc(const char * old_data, size_t old_size, size_t new_size)
|
|
|
|
{
|
|
|
|
char * res = alloc(new_size);
|
|
|
|
if (old_data)
|
2019-06-24 13:24:34 +00:00
|
|
|
{
|
2018-09-06 02:30:58 +00:00
|
|
|
memcpy(res, old_data, old_size);
|
2019-06-24 13:24:34 +00:00
|
|
|
ASAN_POISON_MEMORY_REGION(old_data, old_size);
|
|
|
|
}
|
2017-04-16 03:38:37 +00:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2018-09-01 04:25:03 +00:00
|
|
|
char * alignedRealloc(const char * old_data, size_t old_size, size_t new_size, size_t alignment)
|
|
|
|
{
|
|
|
|
char * res = alignedAlloc(new_size, alignment);
|
|
|
|
if (old_data)
|
2019-06-24 13:24:34 +00:00
|
|
|
{
|
2018-09-06 02:30:58 +00:00
|
|
|
memcpy(res, old_data, old_size);
|
2019-06-24 13:24:34 +00:00
|
|
|
ASAN_POISON_MEMORY_REGION(old_data, old_size);
|
|
|
|
}
|
2018-09-01 04:25:03 +00:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2017-04-01 07:20:54 +00:00
|
|
|
/// Insert string without alignment.
|
|
|
|
const char * insert(const char * data, size_t size)
|
|
|
|
{
|
|
|
|
char * res = alloc(size);
|
2018-09-06 02:30:58 +00:00
|
|
|
memcpy(res, data, size);
|
2017-04-01 07:20:54 +00:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2018-09-02 04:58:55 +00:00
|
|
|
const char * alignedInsert(const char * data, size_t size, size_t alignment)
|
|
|
|
{
|
|
|
|
char * res = alignedAlloc(size, alignment);
|
2018-09-06 02:30:58 +00:00
|
|
|
memcpy(res, data, size);
|
2018-09-02 04:58:55 +00:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
/// Size of MemoryChunks in bytes.
|
2017-04-01 07:20:54 +00:00
|
|
|
size_t size() const
|
|
|
|
{
|
|
|
|
return size_in_bytes;
|
|
|
|
}
|
2017-12-05 21:25:01 +00:00
|
|
|
|
2020-06-11 00:36:57 +00:00
|
|
|
/// Bad method, don't use it -- the MemoryChunks are not your business, the entire
|
2020-04-23 23:25:30 +00:00
|
|
|
/// purpose of the arena code is to manage them for you, so if you find
|
|
|
|
/// yourself having to use this method, probably you're doing something wrong.
|
2020-06-11 00:36:57 +00:00
|
|
|
size_t remainingSpaceInCurrentMemoryChunk() const
|
2017-12-05 21:25:01 +00:00
|
|
|
{
|
2017-12-06 03:10:32 +00:00
|
|
|
return head->remaining();
|
2017-12-05 21:25:01 +00:00
|
|
|
}
|
2012-05-31 04:49:55 +00:00
|
|
|
};
|
|
|
|
|
2016-05-28 10:15:36 +00:00
|
|
|
using ArenaPtr = std::shared_ptr<Arena>;
|
|
|
|
using Arenas = std::vector<ArenaPtr>;
|
2013-02-09 00:12:04 +00:00
|
|
|
|
2012-05-31 04:49:55 +00:00
|
|
|
|
|
|
|
}
|