Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve slow path performance for allocation #143

Merged
merged 39 commits into from
Mar 31, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
1e454cc
Remote dealloc refactor.
mjp41 Mar 9, 2020
91b7e08
Clang format
mjp41 Mar 9, 2020
ffb7b82
Clang format again.
mjp41 Mar 16, 2020
78f40d4
Improve remote dealloc
mjp41 Mar 9, 2020
a22e438
CR feedback.
mjp41 Mar 10, 2020
0274b71
Clang format.
mjp41 Mar 10, 2020
d080654
Inline remote_dealloc
mjp41 Mar 10, 2020
bac6336
Improve fast path in Slab::alloc
mjp41 Mar 10, 2020
a52aca6
Refactor initialisation to help fast path.
mjp41 Mar 10, 2020
bd8c443
Fixup
mjp41 Mar 10, 2020
267a726
Minor tidy to statically sized dealloc.
mjp41 Mar 10, 2020
52c0ff0
Refactor semi-slow path for alloc
mjp41 Mar 10, 2020
e563bfb
Test initial operation of a thread
mjp41 Mar 12, 2020
a1d139c
Correctly handle reusing get_noncachable
mjp41 Mar 12, 2020
f9e0f64
Fix large alloc stats
mjp41 Mar 12, 2020
37d7e15
Fix TLS init on large alloc path
mjp41 Mar 12, 2020
075874e
Fixup slab refactor
mjp41 Mar 12, 2020
68b49df
Minor refactor.
mjp41 Mar 12, 2020
f8b77a8
Minor refactor
mjp41 Mar 12, 2020
d656232
Add Bump ptrs to allocator
mjp41 Mar 12, 2020
54dcb20
Bug fix
mjp41 Mar 13, 2020
bd19484
Change to a cycle non-empty list
mjp41 Mar 13, 2020
ed69bbb
Comments.
mjp41 Mar 16, 2020
06032f2
Update differences
mjp41 Mar 16, 2020
941e28a
Rename first allocation
mjp41 Mar 16, 2020
8a8a2f6
Fixup for thread alloc.
mjp41 Mar 16, 2020
2215815
Clangformat + CR feedback
mjp41 Mar 16, 2020
a857b92
More CR
mjp41 Mar 16, 2020
841314e
Revert "More CR"
mjp41 Mar 16, 2020
baff3ef
CR attempt two.
mjp41 Mar 16, 2020
6bf7115
Fix assert
mjp41 Mar 16, 2020
193e27a
Bug fix found by CI.
mjp41 Mar 16, 2020
6d60feb
Clang tidy.
mjp41 Mar 16, 2020
04e74c4
Merge branch 'master' into alloc_slow_optimise
mjp41 Mar 19, 2020
0c40c84
Use a ptrdiff to help with zero init.
mjp41 Mar 25, 2020
65bb8c1
Make GlobalPlaceholder zero init
mjp41 Mar 25, 2020
50486c0
Comment.
mjp41 Mar 25, 2020
4fd24db
Merge remote-tracking branch 'origin/master' into alloc_slow_optimise
mjp41 Mar 25, 2020
4b19611
Clang format.
mjp41 Mar 25, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion difference.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,10 @@ This document outlines the changes that have diverged from
4. We now store a direct pointer to the next element in each slabs free list
rather than a relative offset into the slab. This enables list
calculation on the fast path.


5. There is a single bump-ptr per size class that is part of the
allocator structure. The per size class slab list now only contains slabs
with free list, and not if it only has a bump ptr.

[2-4] Are changes that are directly inspired by
(mimalloc)[http://github.com/microsoft/mimalloc].
20 changes: 20 additions & 0 deletions src/ds/address.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,15 @@ namespace snmalloc
return reinterpret_cast<T*>(reinterpret_cast<char*>(base) + diff);
}

/**
* Perform pointer arithmetic and return the adjusted pointer.
*/
template<typename T>
inline T* pointer_offset_signed(T* base, ptrdiff_t diff)
{
return reinterpret_cast<T*>(reinterpret_cast<char*>(base) + diff);
}

/**
* Cast from a pointer type to an address.
*/
Expand Down Expand Up @@ -115,4 +124,15 @@ namespace snmalloc
return static_cast<size_t>(
static_cast<char*>(cursor) - static_cast<char*>(base));
}

/**
* Compute the difference in pointers in units of char. This can be used
* across allocations.
*/
inline ptrdiff_t pointer_diff_signed(void* base, void* cursor)
{
return static_cast<ptrdiff_t>(
static_cast<char*>(cursor) - static_cast<char*>(base));
}

} // namespace snmalloc
4 changes: 2 additions & 2 deletions src/ds/bits.h
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ namespace snmalloc
*
* `std::min` is in `<algorithm>`, so pulls in a lot of unneccessary code
* We write our own to reduce the code that potentially needs reviewing.
**/
*/
template<typename T>
constexpr inline T min(T t1, T t2)
{
Expand All @@ -341,7 +341,7 @@ namespace snmalloc
*
* `std::max` is in `<algorithm>`, so pulls in a lot of unneccessary code
* We write our own to reduce the code that potentially needs reviewing.
**/
*/
template<typename T>
constexpr inline T max(T t1, T t2)
{
Expand Down
122 changes: 122 additions & 0 deletions src/ds/cdllist.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
#pragma once

#include "defines.h"

#include <cstdint>
#include <type_traits>

namespace snmalloc
{
/**
* Special class for cyclic doubly linked non-empty linked list
*
* This code assumes there is always one element in the list. The client
* must ensure there is a sentinal element.
*/
class CDLLNode
{
/**
* to_next is used to handle a zero initialised data structure.
* This means that `is_empty` works even when the constructor hasn't
* been run.
*/
ptrdiff_t to_next = 0;

// TODO: CHERI will need a real pointer too
// CDLLNode* next = nullptr;
CDLLNode* prev = nullptr;

void set_next(CDLLNode* c)
{
// TODO: CHERI will need a real pointer too
// next = c;
to_next = pointer_diff_signed(this, c);
}

public:
/**
* Single element cyclic list. This is the empty case.
*/
CDLLNode()
{
set_next(this);
prev = this;
}

SNMALLOC_FAST_PATH bool is_empty()
{
return to_next == 0;
}

/**
* Removes this element from the cyclic list is it part of.
*/
SNMALLOC_FAST_PATH void remove()
{
SNMALLOC_ASSERT(!is_empty());
debug_check();
get_next()->prev = prev;
prev->set_next(get_next());
// As this is no longer in the list, check invariant for
// neighbouring element.
get_next()->debug_check();

#ifndef NDEBUG
set_next(nullptr);
prev = nullptr;
#endif
}

SNMALLOC_FAST_PATH CDLLNode* get_next()
{
// TODO: CHERI will require a real pointer
// return next;
return pointer_offset_signed(this, to_next);
}

SNMALLOC_FAST_PATH CDLLNode* get_prev()
{
return prev;
}

SNMALLOC_FAST_PATH void insert_next(CDLLNode* item)
{
debug_check();
item->set_next(get_next());
get_next()->prev = item;
item->prev = this;
set_next(item);
debug_check();
}

SNMALLOC_FAST_PATH void insert_prev(CDLLNode* item)
{
debug_check();
item->prev = prev;
prev->set_next(item);
item->set_next(this);
prev = item;
debug_check();
}

/**
* Checks the lists invariants
* x->next->prev = x
* for all x in the list.
*/
void debug_check()
{
#ifndef NDEBUG
CDLLNode* item = get_next();
CDLLNode* p = this;

do
{
SNMALLOC_ASSERT(item->prev == p);
p = item;
item = item->get_next();
} while (item != this);
#endif
}
};
} // namespace snmalloc
8 changes: 4 additions & 4 deletions src/ds/dllist.h
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,12 @@ namespace snmalloc
return *this;
}

bool is_empty()
SNMALLOC_FAST_PATH bool is_empty()
{
return head == Terminator();
}

T* get_head()
SNMALLOC_FAST_PATH T* get_head()
{
return head;
}
Expand All @@ -109,7 +109,7 @@ namespace snmalloc
return tail;
}

T* pop()
SNMALLOC_FAST_PATH T* pop()
{
T* item = head;

Expand Down Expand Up @@ -169,7 +169,7 @@ namespace snmalloc
#endif
}

void remove(T* item)
SNMALLOC_FAST_PATH void remove(T* item)
{
#ifndef NDEBUG
debug_check_contains(item);
Expand Down
2 changes: 1 addition & 1 deletion src/ds/helpers.h
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ namespace snmalloc
*
* Wraps on read. This allows code to trust the value is in range, even when
* there is a memory corruption.
**/
*/
template<size_t length, typename T>
class Mod
{
Expand Down
Loading