1 /** 2 FreeList allocator proxy templates used to prevent memory segmentation 3 on base allocator. 4 5 Copyright: © 2012-2013 RejectedSoftware e.K. 6 © 2014-2015 Etienne Cimon 7 License: Subject to the terms of the MIT license. 8 Authors: Sönke Ludwig, Etienne Cimon 9 */ 10 module memutils.freelist; 11 12 import memutils.allocators; 13 import memutils.memory; 14 import memutils.helpers : destructRecurse, min, memset, memcpy; 15 import memutils.utils : ObjectAllocator, Malloc; 16 import std.typetuple; 17 18 nothrow: 19 @trusted: 20 static enum minExponent = 3; 21 static enum freeListCount = 12; 22 23 struct AutoFreeListAllocator(Base) { 24 nothrow: 25 @trusted: 26 private { 27 static if (is(Base == MallocAllocator)) 28 FreeListMallocAllocator*[freeListCount] m_freeLists; 29 else 30 FreeListAlloc!(Base)*[freeListCount] m_freeLists; 31 Base* m_baseAlloc; 32 } 33 34 static if (is(Base == MallocAllocator)) {} else { 35 36 this(Base* base) { 37 m_baseAlloc = base; 38 foreach (i; iotaTuple!freeListCount) { 39 m_freeLists[i] = ObjectAllocator!(FreeListAlloc!Base, Base)(base).alloc(nthFreeListSize!(i)); 40 } 41 recursing = false; 42 43 } 44 } 45 46 ~this() { 47 if (!m_baseAlloc) return; 48 foreach(fl; m_freeLists) 49 destructRecurse(*fl); 50 } 51 52 void[] alloc(size_t sz, bool must_zeroise = true) 53 { 54 if (!m_baseAlloc){ 55 m_baseAlloc = getAllocator!Base(); 56 foreach (i; iotaTuple!freeListCount) { 57 static if (is(Base == MallocAllocator)) 58 m_freeLists[i] = ObjectAllocator!(FreeListMallocAllocator, Malloc)().alloc(nthFreeListSize!(i)); 59 else 60 m_freeLists[i] = ObjectAllocator!(FreeListAlloc!Base, Malloc)().alloc(nthFreeListSize!(i)); 61 } 62 recursing = false; 63 } 64 logTrace("AFL alloc ", sz); 65 foreach (i; iotaTuple!freeListCount) 66 if (sz <= nthFreeListSize!(i)) { 67 auto ret = m_freeLists[i].alloc().ptr[0 .. sz]; 68 if (must_zeroise) memset(ret.ptr, 0, ret.length); 69 return ret; 70 } 71 if (sz > nthFreeListSize!(freeListCount-1)) { 72 auto ret = m_baseAlloc.alloc(sz, false); 73 if (must_zeroise) memset(ret.ptr, 0, ret.length); 74 return ret; 75 } 76 assert(false, "Alloc failed"); 77 } 78 79 void[] realloc(void[] data, size_t sz, bool must_zeroise = true) 80 { 81 foreach (fl; m_freeLists) { 82 if (data.length <= fl.elementSize) { 83 // just grow the slice if it still fits into the free list slot 84 if (sz <= fl.elementSize) { 85 if (must_zeroise) memset(data.ptr + data.length, 0, sz - data.length); 86 return data.ptr[0 .. sz]; 87 } 88 89 // otherwise re-allocate 90 auto newd = alloc(sz); 91 assert(newd.ptr+sz <= data.ptr || newd.ptr >= data.ptr+data.length, "New block overlaps old one!?"); 92 auto len = min(data.length, sz); 93 memcpy(newd.ptr, data.ptr, len); 94 95 free(data); 96 if (must_zeroise) memset(newd.ptr + len, 0, sz - len); 97 return newd; 98 } 99 } 100 // forward large blocks to the base allocator 101 auto ret = m_baseAlloc.realloc(data, sz, true); 102 return ret; 103 } 104 105 void free(void[] data, bool must_zeroise = false) 106 { 107 logTrace("AFL free ", data.length); 108 foreach(i; iotaTuple!freeListCount) { 109 if (data.length <= nthFreeListSize!i) { 110 m_freeLists[i].free(data.ptr[0 .. nthFreeListSize!i], must_zeroise); 111 return; 112 } 113 } 114 if (data.length > nthFreeListSize!(freeListCount-1)) { 115 m_baseAlloc.free(data, must_zeroise); 116 return; 117 } 118 assert(false, "Free failed"); 119 } 120 121 private static pure size_t nthFreeListSize(size_t i)() { return 1 << (i + minExponent); } 122 private template iotaTuple(size_t i) { 123 static if (i > 1) alias iotaTuple = TypeTuple!(iotaTuple!(i-1), i-1); 124 else alias iotaTuple = TypeTuple!(0); 125 } 126 } 127 128 struct FreeListAlloc(Base) 129 { 130 import memutils.vector : Vector; 131 import memutils.utils : Malloc; 132 private static struct FreeListSlot { FreeListSlot* next; } 133 private { 134 size_t m_elemSize; 135 Base* m_baseAlloc; 136 FreeListSlot* m_firstFree = null; 137 size_t space; 138 version(DebugLeaks) HashMap!(size_t, size_t, Malloc) m_owned; 139 size_t m_nalloc = 0; 140 size_t m_nfree = 0; 141 } 142 nothrow: 143 @trusted: 144 ~this() { 145 version(DebugLeaks)//if (!thread_isMainThread) 146 { 147 if (m_owned.length > 0) 148 { 149 //import std.stdio : writeln; 150 //foreach(const ref size_t ptr, const ref size_t size; m_owned) 151 // writeln( cast(void*)ptr, " : ", size); 152 //asm { int 3; } 153 } 154 } 155 while ( m_firstFree ){ 156 auto slot = m_firstFree; 157 m_firstFree = slot.next; 158 slot.next = null; 159 m_baseAlloc.free( (cast(void*)slot)[0 .. m_elemSize] ); 160 m_nfree--; 161 } 162 //foreach(size_t slot; m_owned[]) 163 // m_baseAlloc.free( (cast(void*)slot)[0 .. m_elemSize]); 164 165 } 166 167 this(size_t elem_size, Base* base = null) 168 { 169 //assert(elem_size >= size_t.sizeof); 170 m_elemSize = elem_size; 171 if (base) 172 m_baseAlloc = base; 173 else m_baseAlloc = getAllocator!Base(); 174 //logTrace("Create FreeListAlloc %d", m_elemSize); 175 } 176 177 @property size_t elementSize() const { return m_elemSize; } 178 179 void[] alloc(size_t sz) 180 { 181 assert(sz == m_elemSize, "Invalid allocation size."); 182 return alloc(); 183 } 184 185 void[] alloc() 186 { 187 void[] mem; 188 if (m_nfree == 0) m_firstFree = null; 189 if( m_firstFree ){ 190 auto slot = m_firstFree; 191 if (--m_nfree == 0) 192 m_firstFree = null; 193 else { 194 m_firstFree = slot.next; 195 } 196 mem = (cast(void*)slot)[0 .. m_elemSize]; 197 } else { 198 mem = m_baseAlloc.alloc(m_elemSize); 199 //logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree); 200 } 201 version(DebugLeaks)//if (!thread_isMainThread) 202 { 203 //import std.stdio : writeln; 204 //Exception ex = new Exception(""); 205 //try throw ex; catch (Exception e) { 206 //writeln(mem.ptr, " : ", e.toString()); 207 //} 208 m_owned[cast(size_t)mem.ptr] = m_elemSize; 209 210 } 211 m_nalloc++; 212 //logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree); 213 214 return mem; 215 } 216 217 void[] realloc(void[] mem, size_t sz, bool must_zeroise = false) 218 { 219 version(DebugLeaks)//if (!thread_isMainThread) 220 m_owned[cast(size_t)mem.ptr] = sz; 221 assert(mem.length == m_elemSize); 222 assert(sz == m_elemSize); 223 return mem; 224 } 225 226 void free(void[] mem, bool must_zeroise = false) 227 { 228 assert(mem.length == m_elemSize, "Memory block passed to free has wrong size."); 229 if (must_zeroise) memset(mem.ptr, 0, mem.length); 230 auto s = cast(FreeListSlot*)mem.ptr; 231 s.next = m_firstFree; 232 233 version(DebugLeaks)//if (!thread_isMainThread) 234 m_owned.remove(cast(size_t)mem.ptr); 235 m_firstFree = s; 236 m_nalloc--; 237 m_nfree++; 238 } 239 } 240 241 private 242 struct FreeListMallocAllocator 243 { 244 nothrow: 245 @trusted: 246 import memutils.utils : Malloc; 247 private static struct FreeListSlot { FreeListSlot* next; } 248 private { 249 size_t m_elemSize; 250 MallocAllocator* m_baseAlloc; 251 FreeListSlot* m_firstFree = null; 252 size_t space; 253 version(DebugLeaks) HashMap!(size_t, size_t, Malloc) m_owned; 254 size_t m_nalloc = 0; 255 size_t m_nfree = 0; 256 } 257 ~this() { 258 version(DebugLeaks)//if (!thread_isMainThread) 259 { 260 if (m_owned.length > 0) 261 { 262 //import std.stdio : writeln; 263 //foreach(const ref size_t ptr, const ref size_t size; m_owned) 264 // writeln( cast(void*)ptr, " : ", size); 265 //asm { int 3; } 266 } 267 } 268 while ( m_firstFree ){ 269 auto slot = m_firstFree; 270 m_firstFree = slot.next; 271 slot.next = null; 272 m_baseAlloc.free( (cast(void*)slot)[0 .. m_elemSize] ); 273 m_nfree--; 274 } 275 //foreach(size_t slot; m_owned[]) 276 // m_baseAlloc.free( (cast(void*)slot)[0 .. m_elemSize]); 277 278 } 279 280 this(size_t elem_size) 281 { 282 //assert(elem_size >= size_t.sizeof); 283 m_elemSize = elem_size; 284 m_baseAlloc = getAllocator!MallocAllocator(); 285 //logTrace("Create FreeListAlloc %d", m_elemSize); 286 } 287 288 @property size_t elementSize() const { return m_elemSize; } 289 290 void[] alloc(size_t sz) 291 { 292 assert(sz == m_elemSize, "Invalid allocation size."); 293 return alloc(); 294 } 295 296 void[] alloc() 297 { 298 void[] mem; 299 if (m_nfree == 0) m_firstFree = null; 300 if( m_firstFree ){ 301 auto slot = m_firstFree; 302 if (--m_nfree == 0) 303 m_firstFree = null; 304 else { 305 m_firstFree = slot.next; 306 } 307 mem = (cast(void*)slot)[0 .. m_elemSize]; 308 } else { 309 mem = m_baseAlloc.alloc(m_elemSize); 310 //logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree); 311 } 312 version(DebugLeaks)//if (!thread_isMainThread) 313 { 314 //import std.stdio : writeln; 315 //Exception ex = new Exception(""); 316 //try throw ex; catch (Exception e) { 317 //writeln(mem.ptr, " : ", e.toString()); 318 //} 319 m_owned[cast(size_t)mem.ptr] = m_elemSize; 320 321 } 322 m_nalloc++; 323 //logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree); 324 325 return mem; 326 } 327 328 void[] realloc(void[] mem, size_t sz, bool must_zeroise = false) 329 { 330 version(DebugLeaks)//if (!thread_isMainThread) 331 m_owned[cast(size_t)mem.ptr] = sz; 332 assert(mem.length == m_elemSize); 333 assert(sz == m_elemSize); 334 return mem; 335 } 336 337 void free(void[] mem, bool must_zeroise = false) 338 { 339 assert(mem.length == m_elemSize, "Memory block passed to free has wrong size."); 340 341 if (must_zeroise) memset(mem.ptr, 0, mem.length); 342 auto s = cast(FreeListSlot*)mem.ptr; 343 s.next = m_firstFree; 344 345 version(DebugLeaks)//if (!thread_isMainThread) 346 m_owned.remove(cast(size_t)mem.ptr); 347 m_firstFree = s; 348 m_nalloc--; 349 m_nfree++; 350 } 351 }