Pyrogenesis  13997
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
unique_range.h
Go to the documentation of this file.
1 #ifndef INCLUDED_ALLOCATORS_UNIQUE_RANGE
2 #define INCLUDED_ALLOCATORS_UNIQUE_RANGE
3 
4 #include "lib/lib_api.h"
5 #include "lib/alignment.h" // allocationAlignment
6 #include "lib/sysdep/vm.h"
7 
8 // we usually don't hold multiple references to allocations, so unique_ptr
9 // can be used instead of the more complex (ICC generated incorrect code on
10 // 2 occasions) and expensive shared_ptr.
11 // a custom deleter is required because allocators such as ReserveAddressSpace need to
12 // pass the size to their deleter. we want to mix pointers from various allocators, but
13 // unique_ptr's deleter is fixed at compile-time, so it would need to be general enough
14 // to handle all allocators.
15 // storing the size and a function pointer would be one such solution, with the added
16 // bonus of no longer requiring a complete type at the invocation of ~unique_ptr.
17 // however, this inflates the pointer size to 3 words. if only a few allocator types
18 // are needed, we can replace the function pointer with an index stashed into the
19 // lower bits of the pointer (safe because all allocations' addresses are multiples
20 // of allocationAlignment).
21 typedef intptr_t IdxDeleter;
22 
23 // no-op deleter (use when returning part of an existing allocation)
24 static const IdxDeleter idxDeleterNone = 0;
25 
26 typedef void (*UniqueRangeDeleter)(void* pointer, size_t size);
27 
28 /**
29  * register a deleter, returning its index within the table.
30  *
31  * @param deleter function pointer. must be uniquely associated with
32  * the idxDeleter storage location.
33  * @param idxDeleter location where to store the next available index.
34  * if it is already non-zero, skip the call to this function to
35  * avoid overhead.
36  *
37  * thread-safe. idxDeleter is used for mutual exclusion between
38  * multiple callers for the same deleter. concurrent registration of
39  * different deleters is also safe due to atomic increments.
40  *
41  * halts the program if more than allocationAlignment deleters are
42  * to be registered.
43  **/
44 LIB_API void RegisterUniqueRangeDeleter(UniqueRangeDeleter deleter, volatile IdxDeleter* idxDeleter);
45 
46 LIB_API NOTHROW_DECLARE void CallUniqueRangeDeleter(void* pointer, size_t size, IdxDeleter idxDeleter);
47 
48 
49 // unfortunately, unique_ptr allows constructing without a custom deleter. to ensure callers can
50 // rely upon pointers being associated with a size, we introduce a `UniqueRange' replacement.
51 // its interface is identical to unique_ptr except for the constructors, the addition of
52 // size() and the removal of operator bool (which avoids implicit casts to int).
54 {
55 public:
56  typedef void* pointer;
57  typedef void element_type;
58 
60  {
61  Clear();
62  }
63 
64  UniqueRange(pointer p, size_t size, IdxDeleter deleter)
65  {
66  Set(p, size, deleter);
67  }
68 
70  {
71  Pilfer(LVALUE(rvalue));
72  }
73 
75  {
76  UniqueRange& lvalue = LVALUE(rvalue);
77  if(this != &lvalue)
78  {
79  Delete();
80  Pilfer(lvalue);
81  }
82  return *this;
83  }
84 
86  {
87  Delete();
88  }
89 
90  pointer get() const
91  {
92  return pointer(address_ & ~(allocationAlignment-1));
93  }
94 
96  {
98  }
99 
100  size_t size() const
101  {
102  return size_;
103  }
104 
105  // side effect: subsequent get_deleter will return idxDeleterNone
106  pointer release() // relinquish ownership
107  {
108  pointer ret = get();
109  Clear();
110  return ret;
111  }
112 
113  void reset()
114  {
115  Delete();
116  Clear();
117  }
118 
119  void reset(pointer p, size_t size, IdxDeleter deleter)
120  {
121  Delete();
122  Set(p, size, deleter);
123  }
124 
125  void swap(UniqueRange& rhs)
126  {
128  std::swap(size_, rhs.size_);
129  }
130 
131  // don't define construction and assignment from lvalue,
132  // but the declarations must be accessible
133  UniqueRange(const UniqueRange&);
135 
136 private:
137  void Set(pointer p, size_t size, IdxDeleter deleter)
138  {
139  ASSERT((uintptr_t(p) % allocationAlignment) == 0);
140  ASSERT(size_t(deleter) < allocationAlignment);
141 
142  address_ = uintptr_t(p) | deleter;
143  size_ = size;
144 
145  ASSERT(get() == p);
146  ASSERT(get_deleter() == deleter);
147  ASSERT(this->size() == size);
148  }
149 
150  void Clear()
151  {
152  Set(0, 0, idxDeleterNone);
153  }
154 
155  void Pilfer(UniqueRange& victim)
156  {
157  const size_t size = victim.size();
158  const IdxDeleter idxDeleter = victim.get_deleter();
159  pointer p = victim.release();
160  Set(p, size, idxDeleter);
161  victim.Clear();
162  }
163 
164  void Delete()
165  {
167  }
168 
169  // (IdxDeleter is stored in the lower bits of address since size might not even be a multiple of 4.)
170  uintptr_t address_;
171  size_t size_;
172 };
173 
174 namespace std {
175 
176 static inline void swap(UniqueRange& p1, UniqueRange& p2)
177 {
178  p1.swap(p2);
179 }
180 
181 static inline void swap(RVALUE_REF(UniqueRange) p1, UniqueRange& p2)
182 {
183  p2.swap(LVALUE(p1));
184 }
185 
186 static inline void swap(UniqueRange& p1, RVALUE_REF(UniqueRange) p2)
187 {
188  p1.swap(LVALUE(p2));
189 }
190 
191 }
192 
193 LIB_API UniqueRange AllocateAligned(size_t size, size_t alignment);
194 
195 LIB_API UniqueRange AllocateVM(size_t size, vm::PageType pageSize = vm::kDefault, int prot = PROT_READ|PROT_WRITE);
196 
197 
198 #endif // #ifndef INCLUDED_ALLOCATORS_UNIQUE_RANGE
#define PROT_WRITE
Definition: wmman.h:33
pointer release()
Definition: unique_range.h:106
static const size_t pageSize
Definition: alignment.h:61
void Set(pointer p, size_t size, IdxDeleter deleter)
Definition: unique_range.h:137
intptr_t IdxDeleter
Definition: unique_range.h:21
void(* UniqueRangeDeleter)(void *pointer, size_t size)
Definition: unique_range.h:26
void Clear()
Definition: unique_range.h:150
static void swap(UniqueRange &p1, UniqueRange &p2)
Definition: unique_range.h:176
uintptr_t address_
Definition: unique_range.h:170
void Delete()
Definition: unique_range.h:164
#define ASSERT(expr)
same as ENSURE in debug mode, does nothing in release mode.
Definition: debug.h:310
UniqueRange(pointer p, size_t size, IdxDeleter deleter)
Definition: unique_range.h:64
IdxDeleter get_deleter() const
Definition: unique_range.h:95
void RegisterUniqueRangeDeleter(UniqueRangeDeleter deleter, volatile IdxDeleter *idxDeleterOut)
register a deleter, returning its index within the table.
NOTHROW_DEFINE void CallUniqueRangeDeleter(void *pointer, size_t size, IdxDeleter idxDeleter)
#define NOTHROW_DECLARE
indicate a function will not throw any synchronous exceptions, thus hopefully generating smaller and ...
PageType
Definition: vm.h:42
UniqueRange & operator=(RVALUE_REF(UniqueRange) rvalue)
Definition: unique_range.h:74
#define PROT_READ
Definition: wmman.h:32
void swap(UniqueRange &rhs)
Definition: unique_range.h:125
size_t size_
Definition: unique_range.h:171
static Status AllocateAligned(shared_ptr< T > &p, size_t size, size_t alignment=cacheLineSize)
Definition: shared_ptr.h:66
static const IdxDeleter idxDeleterNone
Definition: unique_range.h:24
void Pilfer(UniqueRange &victim)
Definition: unique_range.h:155
#define RVALUE_REF(T)
expands to the type `rvalue reference to T&#39;; used in function parameter declarations.
void reset(pointer p, size_t size, IdxDeleter deleter)
Definition: unique_range.h:119
void element_type
Definition: unique_range.h:57
UniqueRange AllocateVM(size_t size, vm::PageType pageType, int prot)
void * pointer
Definition: unique_range.h:56
size_t size() const
Definition: unique_range.h:100
#define LVALUE(rvalue)
convert an rvalue to an lvalue
static const size_t allocationAlignment
Definition: alignment.h:69
void reset()
Definition: unique_range.h:113