1 // Written in the D programming language.
2 /**
3 Source: $(PHOBOSSRC std/experimental/allocator/building_blocks/segregator.d)
4 */
5 module std.experimental.allocator.building_blocks.segregator;
6 
7 import std.experimental.allocator.common;
8 
9 /**
10 Dispatches allocations (and deallocations) between two allocators ($(D
11 SmallAllocator) and `LargeAllocator`) depending on the size allocated, as
12 follows. All allocations smaller than or equal to `threshold` will be
13 dispatched to `SmallAllocator`. The others will go to `LargeAllocator`.
14 
15 If both allocators are `shared`, the `Segregator` will also offer $(D
16 shared) methods.
17 */
18 struct Segregator(size_t threshold, SmallAllocator, LargeAllocator)
19 {
20     import std.algorithm.comparison : min;
21     import std.traits : hasMember, ReturnType;
22     import std.typecons : Ternary;
23 
24     static if (stateSize!SmallAllocator) private SmallAllocator _small;
25     else private alias _small = SmallAllocator.instance;
26     static if (stateSize!LargeAllocator) private LargeAllocator _large;
27     else private alias _large = LargeAllocator.instance;
28 
29     version (StdDdoc)
30     {
31         /**
32         The alignment offered is the minimum of the two allocators' alignment.
33         */
34         enum uint alignment;
35         /**
36         This method is defined only if at least one of the allocators defines
37         it. The good allocation size is obtained from `SmallAllocator` if $(D
38         s <= threshold), or `LargeAllocator` otherwise. (If one of the
39         allocators does not define `goodAllocSize`, the default
40         implementation in this module applies.)
41         */
42         static size_t goodAllocSize(size_t s);
43         /**
44         The memory is obtained from `SmallAllocator` if $(D s <= threshold),
45         or `LargeAllocator` otherwise.
46         */
47         void[] allocate(size_t);
48         /**
49         This method is defined if both allocators define it, and forwards to
50         `SmallAllocator` or `LargeAllocator` appropriately.
51         */
52         void[] alignedAllocate(size_t, uint);
53         /**
54         This method is defined only if at least one of the allocators defines
55         it. If `SmallAllocator` defines `expand` and $(D b.length +
56         delta <= threshold), the call is forwarded to `SmallAllocator`. If $(D
57         LargeAllocator) defines `expand` and $(D b.length > threshold), the
58         call is forwarded to `LargeAllocator`. Otherwise, the call returns
59         `false`.
60         */
61         bool expand(ref void[] b, size_t delta);
62         /**
63         This method is defined only if at least one of the allocators defines
64         it. If `SmallAllocator` defines `reallocate` and $(D b.length <=
65         threshold && s <= threshold), the call is forwarded to $(D
66         SmallAllocator). If `LargeAllocator` defines `expand` and $(D
67         b.length > threshold && s > threshold), the call is forwarded to $(D
68         LargeAllocator). Otherwise, the call returns `false`.
69         */
70         bool reallocate(ref void[] b, size_t s);
71         /**
72         This method is defined only if at least one of the allocators defines
73         it, and work similarly to `reallocate`.
74         */
75         bool alignedReallocate(ref void[] b, size_t s, uint a);
76         /**
77         This method is defined only if both allocators define it. The call is
78         forwarded to `SmallAllocator` if $(D b.length <= threshold), or $(D
79         LargeAllocator) otherwise.
80         */
81         Ternary owns(void[] b);
82         /**
83         This function is defined only if both allocators define it, and forwards
84         appropriately depending on `b.length`.
85         */
86         bool deallocate(void[] b);
87         /**
88         This function is defined only if both allocators define it, and calls
89         `deallocateAll` for them in turn.
90         */
91         bool deallocateAll();
92         /**
93         This function is defined only if both allocators define it, and returns
94         the conjunction of `empty` calls for the two.
95         */
96         Ternary empty();
97     }
98 
99     /**
100     Composite allocators involving nested instantiations of `Segregator` make
101     it difficult to access individual sub-allocators stored within. $(D
102     allocatorForSize) simplifies the task by supplying the allocator nested
103     inside a `Segregator` that is responsible for a specific size `s`.
104 
105     Example:
106     ----
107     alias A = Segregator!(300,
108         Segregator!(200, A1, A2),
109         A3);
110     A a;
111     static assert(typeof(a.allocatorForSize!10) == A1);
112     static assert(typeof(a.allocatorForSize!250) == A2);
113     static assert(typeof(a.allocatorForSize!301) == A3);
114     ----
115     */
116     ref auto allocatorForSize(size_t s)()
117     {
118         static if (s <= threshold)
119             static if (is(SmallAllocator == Segregator!(Args), Args...))
120                 return _small.allocatorForSize!s;
121             else return _small;
122         else
123             static if (is(LargeAllocator == Segregator!(Args), Args...))
124                 return _large.allocatorForSize!s;
125             else return _large;
126     }
127 
128     enum uint alignment = min(SmallAllocator.alignment,
129         LargeAllocator.alignment);
130 
131     private template Impl()
132     {
133         size_t goodAllocSize(size_t s)
134         {
135             return s <= threshold
136                 ? _small.goodAllocSize(s)
137                 : _large.goodAllocSize(s);
138         }
139 
140         void[] allocate(size_t s)
141         {
142             return s <= threshold ? _small.allocate(s) : _large.allocate(s);
143         }
144 
145         static if (hasMember!(SmallAllocator, "alignedAllocate")
146                 && hasMember!(LargeAllocator, "alignedAllocate"))
147         void[] alignedAllocate(size_t s, uint a)
148         {
149             return s <= threshold
150                 ? _small.alignedAllocate(s, a)
151                 : _large.alignedAllocate(s, a);
152         }
153 
154         static if (hasMember!(SmallAllocator, "expand")
155                 || hasMember!(LargeAllocator, "expand"))
156         bool expand(ref void[] b, size_t delta)
157         {
158             if (!delta) return true;
159             if (b.length + delta <= threshold)
160             {
161                 // Old and new allocations handled by _small
162                 static if (hasMember!(SmallAllocator, "expand"))
163                     return _small.expand(b, delta);
164                 else
165                     return false;
166             }
167             if (b.length > threshold)
168             {
169                 // Old and new allocations handled by _large
170                 static if (hasMember!(LargeAllocator, "expand"))
171                     return _large.expand(b, delta);
172                 else
173                     return false;
174             }
175             // Oops, cross-allocator transgression
176             return false;
177         }
178 
179         static if (hasMember!(SmallAllocator, "reallocate")
180                 || hasMember!(LargeAllocator, "reallocate"))
181         bool reallocate(ref void[] b, size_t s)
182         {
183             static if (hasMember!(SmallAllocator, "reallocate"))
184                 if (b.length <= threshold && s <= threshold)
185                 {
186                     // Old and new allocations handled by _small
187                     return _small.reallocate(b, s);
188                 }
189             static if (hasMember!(LargeAllocator, "reallocate"))
190                 if (b.length > threshold && s > threshold)
191                 {
192                     // Old and new allocations handled by _large
193                     return _large.reallocate(b, s);
194                 }
195             // Cross-allocator transgression
196             return .reallocate(this, b, s);
197         }
198 
199         static if (hasMember!(SmallAllocator, "alignedReallocate")
200                 || hasMember!(LargeAllocator, "alignedReallocate"))
201         bool alignedReallocate(ref void[] b, size_t s, uint a)
202         {
203             static if (hasMember!(SmallAllocator, "alignedReallocate"))
204                 if (b.length <= threshold && s <= threshold)
205                 {
206                     // Old and new allocations handled by _small
207                     return _small.alignedReallocate(b, s, a);
208                 }
209             static if (hasMember!(LargeAllocator, "alignedReallocate"))
210                 if (b.length > threshold && s > threshold)
211                 {
212                     // Old and new allocations handled by _large
213                     return _large.alignedReallocate(b, s, a);
214                 }
215             // Cross-allocator transgression
216             return .alignedReallocate(this, b, s, a);
217         }
218 
219         static if (hasMember!(SmallAllocator, "allocateZeroed")
220                 || hasMember!(LargeAllocator, "allocateZeroed"))
221         package(std) void[] allocateZeroed()(size_t s)
222         {
223             if (s <= threshold)
224             {
225                 static if (hasMember!(SmallAllocator, "allocateZeroed"))
226                     return _small.allocateZeroed(s);
227                 else
228                 {
229                     auto b = _small.allocate(s);
230                     (() @trusted => (cast(ubyte[]) b)[] = 0)(); // OK even if b is null.
231                     return b;
232                 }
233             }
234             else
235             {
236                 static if (hasMember!(LargeAllocator, "allocateZeroed"))
237                     return _large.allocateZeroed(s);
238                 else
239                 {
240                     auto b = _large.allocate(s);
241                     (() @trusted => (cast(ubyte[]) b)[] = 0)(); // OK even if b is null.
242                     return b;
243                 }
244             }
245         }
246 
247         static if (hasMember!(SmallAllocator, "owns")
248                 && hasMember!(LargeAllocator, "owns"))
249         Ternary owns(void[] b)
250         {
251             return Ternary(b.length <= threshold
252                 ? _small.owns(b) : _large.owns(b));
253         }
254 
255         static if (hasMember!(SmallAllocator, "deallocate")
256                 && hasMember!(LargeAllocator, "deallocate"))
257         bool deallocate(void[] data)
258         {
259             return data.length <= threshold
260                 ? _small.deallocate(data)
261                 : _large.deallocate(data);
262         }
263 
264         static if (hasMember!(SmallAllocator, "deallocateAll")
265                 && hasMember!(LargeAllocator, "deallocateAll"))
266         bool deallocateAll()
267         {
268             // Use & insted of && to evaluate both
269             return _small.deallocateAll() & _large.deallocateAll();
270         }
271 
272         static if (hasMember!(SmallAllocator, "empty")
273                 && hasMember!(LargeAllocator, "empty"))
274         Ternary empty()
275         {
276             return _small.empty & _large.empty;
277         }
278 
279         static if (hasMember!(SmallAllocator, "resolveInternalPointer")
280                 && hasMember!(LargeAllocator, "resolveInternalPointer"))
281         Ternary resolveInternalPointer(const void* p, ref void[] result)
282         {
283             Ternary r = _small.resolveInternalPointer(p, result);
284             return r == Ternary.no ? _large.resolveInternalPointer(p, result) : r;
285         }
286     }
287 
288     private enum sharedMethods =
289         !stateSize!SmallAllocator
290         && !stateSize!LargeAllocator
291         && is(typeof(SmallAllocator.instance) == shared)
292         && is(typeof(LargeAllocator.instance) == shared);
293 
294     static if (sharedMethods)
295     {
296         static shared Segregator instance;
297         shared { mixin Impl!(); }
298     }
299     else
300     {
301         static if (!stateSize!SmallAllocator && !stateSize!LargeAllocator)
302             __gshared Segregator instance;
303         mixin Impl!();
304     }
305 }
306 
307 ///
308 @system unittest
309 {
310     import std.experimental.allocator.building_blocks.free_list : FreeList;
311     import std.experimental.allocator.gc_allocator : GCAllocator;
312     import std.experimental.allocator.mallocator : Mallocator;
313     alias A =
314         Segregator!(
315             1024 * 4,
316             Segregator!(
317                 128, FreeList!(Mallocator, 0, 128),
318                 GCAllocator),
319             Segregator!(
320                 1024 * 1024, Mallocator,
321                 GCAllocator)
322             );
323     A a;
324     auto b = a.allocate(200);
325     assert(b.length == 200);
326     a.deallocate(b);
327 }
328 
329 /**
330 A `Segregator` with more than three arguments expands to a composition of
331 elemental `Segregator`s, as illustrated by the following example:
332 
333 ----
334 alias A =
335     Segregator!(
336         n1, A1,
337         n2, A2,
338         n3, A3,
339         A4
340     );
341 ----
342 
343 With this definition, allocation requests for `n1` bytes or less are directed
344 to `A1`; requests between $(D n1 + 1) and `n2` bytes (inclusive) are
345 directed to `A2`; requests between $(D n2 + 1) and `n3` bytes (inclusive)
346 are directed to `A3`; and requests for more than `n3` bytes are directed
347 to `A4`. If some particular range should not be handled, `NullAllocator`
348 may be used appropriately.
349 
350 */
351 template Segregator(Args...)
352 if (Args.length > 3)
353 {
354     // Binary search
355     private enum cutPoint = ((Args.length - 2) / 4) * 2;
356     static if (cutPoint >= 2)
357     {
358         alias Segregator = .Segregator!(
359             Args[cutPoint],
360             .Segregator!(Args[0 .. cutPoint], Args[cutPoint + 1]),
361             .Segregator!(Args[cutPoint + 2 .. $])
362         );
363     }
364     else
365     {
366         // Favor small sizes
367         alias Segregator = .Segregator!(
368             Args[0],
369             Args[1],
370             .Segregator!(Args[2 .. $])
371         );
372     }
373 }
374 
375 ///
376 @system unittest
377 {
378     import std.experimental.allocator.building_blocks.free_list : FreeList;
379     import std.experimental.allocator.gc_allocator : GCAllocator;
380     import std.experimental.allocator.mallocator : Mallocator;
381     alias A =
382         Segregator!(
383             128, FreeList!(Mallocator, 0, 128),
384             1024 * 4, GCAllocator,
385             1024 * 1024, Mallocator,
386             GCAllocator
387         );
388     A a;
389     auto b = a.allocate(201);
390     assert(b.length == 201);
391     a.deallocate(b);
392 }
393 
394 @system unittest
395 {
396     import std.experimental.allocator.gc_allocator : GCAllocator;
397     import std.experimental.allocator.building_blocks.kernighan_ritchie : KRRegion;
398     Segregator!(128, GCAllocator, KRRegion!GCAllocator) alloc;
399     assert((() nothrow @safe @nogc => alloc.goodAllocSize(1))()
400             == GCAllocator.instance.goodAllocSize(1));
401 
402     // Note: we infer `shared` from GCAllocator.goodAllocSize so we need a
403     // shared object in order to be able to use the function
404     shared Segregator!(128, GCAllocator, GCAllocator) sharedAlloc;
405     assert((() nothrow @safe @nogc => sharedAlloc.goodAllocSize(1))()
406             == GCAllocator.instance.goodAllocSize(1));
407 }
408 
409 @system unittest
410 {
411     import std.experimental.allocator.building_blocks.bitmapped_block : BitmappedBlock;
412     import std.typecons : Ternary;
413 
414     alias A =
415         Segregator!(
416             128, BitmappedBlock!(4096),
417             BitmappedBlock!(4096)
418         );
419 
420     A a = A(
421             BitmappedBlock!(4096)(new ubyte[4096 * 1024]),
422             BitmappedBlock!(4096)(new ubyte[4096 * 1024])
423     );
424 
425     assert(a.empty == Ternary.yes);
426     auto b = a.allocate(42);
427     assert(b.length == 42);
428     assert(a.empty == Ternary.no);
429     assert(a.alignedReallocate(b, 256, 512));
430     assert(b.length == 256);
431     assert(a.alignedReallocate(b, 42, 512));
432     assert(b.length == 42);
433     assert((() pure nothrow @safe @nogc => a.owns(b))() == Ternary.yes);
434     assert((() pure nothrow @safe @nogc => a.owns(null))() == Ternary.no);
435     // Ensure deallocate inherits from parent allocators
436     assert((() nothrow @nogc => a.deallocate(b))());
437     assert(a.empty == Ternary.yes);
438 
439     // Test that deallocateAll inherits from parents
440     auto c = a.allocate(42);
441     assert(c.length == 42);
442     assert((() pure nothrow @safe @nogc => a.expand(c, 58))());
443     assert(c.length == 100);
444     assert(a.empty == Ternary.no);
445     assert((() nothrow @nogc => a.deallocateAll())());
446     assert(a.empty == Ternary.yes);
447 }
448 
449 @system unittest
450 {
451     import std.experimental.allocator.gc_allocator : GCAllocator;
452     import std.typecons : Ternary;
453 
454     shared Segregator!(1024 * 4, GCAllocator, GCAllocator) a;
455 
456     auto b = a.allocate(201);
457     assert(b.length == 201);
458 
459     void[] p;
460     assert((() nothrow @safe @nogc => a.resolveInternalPointer(&b[0], p))() == Ternary.yes);
461     assert((() nothrow @safe @nogc => a.resolveInternalPointer(null, p))() == Ternary.no);
462 
463     // Ensure deallocate inherits from parent allocators
464     assert((() nothrow @nogc => a.deallocate(b))());
465 }
466 
467 @system unittest
468 {
469     import std.experimental.allocator.building_blocks.bitmapped_block : BitmappedBlockWithInternalPointers;
470     import std.typecons : Ternary;
471 
472     alias A =
473         Segregator!(
474             10_240, BitmappedBlockWithInternalPointers!(4096),
475             BitmappedBlockWithInternalPointers!(4096)
476         );
477 
478     A a = A(
479             BitmappedBlockWithInternalPointers!(4096)(new ubyte[4096 * 1024]),
480             BitmappedBlockWithInternalPointers!(4096)(new ubyte[4096 * 1024])
481     );
482 
483     assert((() nothrow @safe @nogc => a.empty)() == Ternary.yes);
484     auto b = a.allocate(201);
485     assert(b.length == 201);
486     assert((() nothrow @safe @nogc => a.empty)() == Ternary.no);
487     assert((() nothrow @nogc => a.deallocate(b))());
488 }
489 
490 // Test that reallocate infers from parent
491 @system unittest
492 {
493     import std.experimental.allocator.mallocator : Mallocator;
494 
495     alias a = Segregator!(10_240, Mallocator, Mallocator).instance;
496 
497     auto b = a.allocate(42);
498     assert(b.length == 42);
499     assert((() nothrow @nogc => a.reallocate(b, 100))());
500     assert(b.length == 100);
501     assert((() nothrow @nogc => a.deallocate(b))());
502 }
503 
504 @system unittest
505 {
506     import std.experimental.allocator.building_blocks.region : BorrowedRegion;
507     import std.typecons : Ternary;
508 
509     auto a = Segregator!(10_240, BorrowedRegion!(), BorrowedRegion!())(
510                 BorrowedRegion!()(new ubyte[4096 * 1024]),
511                 BorrowedRegion!()(new ubyte[4096 * 1024]));
512 
513     assert((() nothrow @safe @nogc => a.empty)() == Ternary.yes);
514     auto b = a.alignedAllocate(42, 8);
515     assert(b.length == 42);
516     assert((() nothrow @nogc => a.alignedReallocate(b, 100, 8))());
517     assert(b.length == 100);
518     assert((() nothrow @safe @nogc => a.empty)() == Ternary.no);
519     assert((() nothrow @nogc => a.deallocate(b))());
520 }