diff --git a/src/containers/cyclicbuffer.d b/src/containers/cyclicbuffer.d index ee16790..40538af 100644 --- a/src/containers/cyclicbuffer.d +++ b/src/containers/cyclicbuffer.d @@ -10,7 +10,7 @@ module containers.cyclicbuffer; private import core.exception : onRangeError; private import std.experimental.allocator.mallocator : Mallocator; private import std.range.primitives : empty, front, back, popFront, popBack; -private import containers.internal.node : shouldAddGCRange; +private import containers.internal.node : shouldAddGCRange,isNoGCAllocator; /** * Array that provides constant time (amortized) appending and popping @@ -23,11 +23,19 @@ private import containers.internal.node : shouldAddGCRange; */ struct CyclicBuffer(T, Allocator = Mallocator, bool supportGC = shouldAddGCRange!T) { - @disable this(this); + + static if(isNoGCAllocator!(Allocator) && !supportGC) { + @nogc: + } +static if (__VERSION__ > 2086) { + @disable this(ref CyclicBuffer); +} else { + this(this) @disable; +} private import std.conv : emplace; private import std.experimental.allocator.common : stateSize; - private import std.traits : isImplicitlyConvertible, hasElaborateDestructor; + private import std.traits : isImplicitlyConvertible, hasElaborateDestructor,hasFunctionAttributes; static if (stateSize!Allocator != 0) { diff --git a/src/containers/dynamicarray.d b/src/containers/dynamicarray.d index 90c16f4..0ba0e2a 100644 --- a/src/containers/dynamicarray.d +++ b/src/containers/dynamicarray.d @@ -8,8 +8,8 @@ module containers.dynamicarray; private import core.lifetime : move, moveEmplace, copyEmplace, emplace; -private import std.traits : isCopyable; -private import containers.internal.node : shouldAddGCRange; +private import std.traits : isCopyable,hasFunctionAttributes; +private import containers.internal.node : shouldAddGCRange, isNoGCAllocator; private import std.experimental.allocator.mallocator : Mallocator; /** @@ -24,10 +24,16 @@ private import std.experimental.allocator.mallocator : Mallocator; */ struct DynamicArray(T, Allocator = Mallocator, bool supportGC = shouldAddGCRange!T) { + static if(isNoGCAllocator!(Allocator) && !supportGC) { + @nogc: + } + this(this) @disable; private import std.experimental.allocator.common : stateSize; + + static if (is(typeof((T[] a, const T[] b) => a[0 .. b.length] = b[0 .. $]))) { /// Either `const(T)` or `T`. @@ -64,9 +70,6 @@ struct DynamicArray(T, Allocator = Mallocator, bool supportGC = shouldAddGCRange ~this() { - import std.experimental.allocator.mallocator : Mallocator; - import containers.internal.node : shouldAddGCRange; - if (arr is null) return; @@ -113,9 +116,6 @@ struct DynamicArray(T, Allocator = Mallocator, bool supportGC = shouldAddGCRange */ void insertBack(T value) { - import std.experimental.allocator.mallocator : Mallocator; - import containers.internal.node : shouldAddGCRange; - if (arr.length == 0) { arr = cast(typeof(arr)) allocator.allocate(T.sizeof * 4); @@ -275,8 +275,14 @@ struct DynamicArray(T, Allocator = Mallocator, bool supportGC = shouldAddGCRange foreach (ref target; toFill) emplace(&target); } - else - toFill[] = T.init; + else { + foreach (ref target; toFill){ + target = T.init; + } + //it not work in 2.102.2, see: https://issues.dlang.org/show_bug.cgi?id=24196 + // toFill[] = T.init; + } + } /** @@ -346,8 +352,8 @@ struct DynamicArray(T, Allocator = Mallocator, bool supportGC = shouldAddGCRange } else { - import core.exception : RangeError; - throw new RangeError("Out of range index used to remove element"); + import core.exception : onRangeError; + onRangeError("Out of range index used to remove element"); } } @@ -692,7 +698,7 @@ version(emsi_containers_unittest) @nogc unittest assert(Counter.count == 3); } -version(emsi_containers_unittest) @nogc unittest +version(emsi_containers_unittest) @nogc unittest { struct S { int i = 42; @disable this(this); } DynamicArray!S a; diff --git a/src/containers/hashmap.d b/src/containers/hashmap.d index eb9c684..e44134f 100644 --- a/src/containers/hashmap.d +++ b/src/containers/hashmap.d @@ -9,9 +9,9 @@ module containers.hashmap; private import core.lifetime : move; private import containers.internal.hash; -private import containers.internal.node : shouldAddGCRange; +private import containers.internal.node : shouldAddGCRange,isNoGCAllocator; private import std.experimental.allocator.mallocator : Mallocator; -private import std.traits : isBasicType, Unqual; +private import std.traits : isBasicType, Unqual,hasFunctionAttributes; /** * Associative array / hash map. @@ -27,7 +27,14 @@ struct HashMap(K, V, Allocator = Mallocator, alias hashFunction = generateHash!K bool supportGC = shouldAddGCRange!K || shouldAddGCRange!V, bool storeHash = true) { + static if(isNoGCAllocator!(Allocator) && !supportGC) { + @nogc: + } +static if (__VERSION__ > 2086) { + @disable this(ref HashMap); +} else { this(this) @disable; +} private import std.experimental.allocator.common : stateSize; @@ -71,9 +78,7 @@ struct HashMap(K, V, Allocator = Mallocator, alias hashFunction = generateHash!K static if (is(typeof(allocator is null))) assert(allocator !is null, "Allocator must not be null"); } - } - else - { + } else { /** * Constructs an HashMap with an initial bucket count of bucketCount. bucketCount * must be a power of two. @@ -663,7 +668,7 @@ version(emsi_containers_unittest) unittest string name; } - void someFunc(const scope ref HashMap!(string,Foo) map) @safe + void someFunc(const ref HashMap!(string,Foo) map) @safe { foreach (kv; map.byKeyValue()) { diff --git a/src/containers/hashset.d b/src/containers/hashset.d index cf1c3e8..7779f78 100644 --- a/src/containers/hashset.d +++ b/src/containers/hashset.d @@ -8,7 +8,7 @@ module containers.hashset; private import containers.internal.hash : generateHash, hashToIndex; -private import containers.internal.node : shouldAddGCRange; +private import containers.internal.node : shouldAddGCRange,isNoGCAllocator; private import std.experimental.allocator.mallocator : Mallocator; private import std.traits : isBasicType; @@ -25,7 +25,14 @@ struct HashSet(T, Allocator = Mallocator, alias hashFunction = generateHash!T, bool supportGC = shouldAddGCRange!T, bool storeHash = !isBasicType!T) { + static if(isNoGCAllocator!(Allocator) && !supportGC) { + @nogc: + } +static if (__VERSION__ > 2086) { + @disable this(ref HashSet); +} else { this(this) @disable; +} private import std.experimental.allocator.common : stateSize; diff --git a/src/containers/immutablehashset.d b/src/containers/immutablehashset.d index e6cf5ea..cbd6f81 100644 --- a/src/containers/immutablehashset.d +++ b/src/containers/immutablehashset.d @@ -7,6 +7,9 @@ module containers.immutablehashset; +private import containers.internal.node : shouldAddGCRange; + + /** * The immutable hash set is useful for constructing a read-only collection that * supports quickly determining if an element is present. @@ -20,7 +23,11 @@ struct ImmutableHashSet(T, alias hashFunction) /// @disable this(); /// - @disable this(this); +static if (__VERSION__ > 2086) { + @disable this(ref ImmutableHashSet); +} else { + this(this) @disable; +} /** * Constructs an immutable hash set from the given values. The values must @@ -155,7 +162,6 @@ private: import std.experimental.allocator.mallocator : Mallocator; import std.traits : isBasicType, hasMember; - import containers.internal.node : shouldAddGCRange; import core.memory : GC; static struct Node diff --git a/src/containers/internal/node.d b/src/containers/internal/node.d index d18ec04..1941c8f 100644 --- a/src/containers/internal/node.d +++ b/src/containers/internal/node.d @@ -53,7 +53,7 @@ version (X86_64) template shouldNullSlot(T) { import std.traits; - enum shouldNullSlot = isPointer!T || is (T == class) || is (T == interface) || isDynamicArray!T + enum shouldNullSlot = isPointer!T || is (T == class) || is (T == interface) || isDynamicArray!T || is(T == delegate); // closures or class method shoulde be null for GC recycle } @@ -63,6 +63,14 @@ template shouldAddGCRange(T) enum shouldAddGCRange = hasIndirections!T; } + +template isNoGCAllocator(Allocator) +{ + import std.traits : hasFunctionAttributes; + enum isNoGCAllocator = hasFunctionAttributes!(Allocator.deallocate, "@nogc") + && hasFunctionAttributes!(Allocator.allocate, "@nogc"); +} + static assert (shouldAddGCRange!string); static assert (!shouldAddGCRange!int); diff --git a/src/containers/openhashset.d b/src/containers/openhashset.d index 8a74afa..8ff746c 100644 --- a/src/containers/openhashset.d +++ b/src/containers/openhashset.d @@ -7,7 +7,7 @@ module containers.openhashset; private import containers.internal.hash; -private import containers.internal.node : shouldAddGCRange; +private import containers.internal.node : shouldAddGCRange,isNoGCAllocator; private import std.experimental.allocator.common : stateSize; private import std.experimental.allocator.mallocator : Mallocator; @@ -25,10 +25,17 @@ private import std.experimental.allocator.mallocator : Mallocator; struct OpenHashSet(T, Allocator = Mallocator, alias hashFunction = generateHash!T, bool supportGC = shouldAddGCRange!T) { + static if(isNoGCAllocator!(Allocator) && !supportGC) { + @nogc: + } /** * Disallow copy construction */ +static if (__VERSION__ > 2086) { + @disable this(ref OpenHashSet); +} else { this(this) @disable; +} static if (stateSize!Allocator != 0) { diff --git a/src/containers/simdset.d b/src/containers/simdset.d index 8effa3a..5270ac3 100644 --- a/src/containers/simdset.d +++ b/src/containers/simdset.d @@ -7,7 +7,7 @@ module containers.simdset; private import std.experimental.allocator.mallocator : Mallocator; - +private import containers.internal.node : isNoGCAllocator; /** * Set implementation that is well suited for small sets and simple items. * @@ -24,10 +24,18 @@ private import std.experimental.allocator.mallocator : Mallocator; version (D_InlineAsm_X86_64) struct SimdSet(T, Allocator = Mallocator) if (T.sizeof == 1 || T.sizeof == 2 || T.sizeof == 4 || T.sizeof == 8) { + static if(isNoGCAllocator!(Allocator)) { + @nogc: + } +static if (__VERSION__ > 2086) { + @disable this(ref SimdSet); +} else { this(this) @disable; +} private import std.experimental.allocator.common : stateSize; + static if (stateSize!Allocator != 0) { /// No default construction if an allocator must be provided. diff --git a/src/containers/slist.d b/src/containers/slist.d index a0d1de8..bdcee73 100644 --- a/src/containers/slist.d +++ b/src/containers/slist.d @@ -7,7 +7,7 @@ module containers.slist; -private import containers.internal.node : shouldAddGCRange; +private import containers.internal.node : shouldAddGCRange,isNoGCAllocator; private import std.experimental.allocator.mallocator : Mallocator; /** @@ -20,8 +20,15 @@ private import std.experimental.allocator.mallocator : Mallocator; */ struct SList(T, Allocator = Mallocator, bool supportGC = shouldAddGCRange!T) { + static if(isNoGCAllocator!(Allocator) && !supportGC) { + @nogc: + } /// Disable copying. +static if (__VERSION__ > 2086) { + @disable this(ref SList); +} else { this(this) @disable; +} private import std.experimental.allocator.common : stateSize; diff --git a/src/containers/treemap.d b/src/containers/treemap.d index b59ebc2..18c3179 100644 --- a/src/containers/treemap.d +++ b/src/containers/treemap.d @@ -7,7 +7,7 @@ module containers.treemap; -private import containers.internal.node : shouldAddGCRange; +private import containers.internal.node : shouldAddGCRange,isNoGCAllocator; private import std.experimental.allocator.mallocator : Mallocator; /** @@ -23,7 +23,15 @@ private import std.experimental.allocator.mallocator : Mallocator; struct TreeMap(K, V, Allocator = Mallocator, alias less = "a < b", bool supportGC = shouldAddGCRange!K || shouldAddGCRange!V, size_t cacheLineSize = 64) { + static if(isNoGCAllocator!(Allocator) && !supportGC){ + @nogc: + } + +static if (__VERSION__ > 2086) { + @disable this(ref TreeMap); +} else { this(this) @disable; +} private import std.experimental.allocator.common : stateSize; diff --git a/src/containers/ttree.d b/src/containers/ttree.d index 5e7e908..ce7d0f7 100644 --- a/src/containers/ttree.d +++ b/src/containers/ttree.d @@ -7,7 +7,7 @@ module containers.ttree; -private import containers.internal.node : shouldAddGCRange; +private import containers.internal.node : shouldAddGCRange,isNoGCAllocator; private import containers.internal.mixins : AllocatorState; private import std.experimental.allocator.mallocator : Mallocator; @@ -37,11 +37,18 @@ private import std.experimental.allocator.mallocator : Mallocator; struct TTree(T, Allocator = Mallocator, bool allowDuplicates = false, alias less = "a < b", bool supportGC = shouldAddGCRange!T, size_t cacheLineSize = 64) { + static if(isNoGCAllocator!(Allocator) && !supportGC) { + @nogc: + } /** * T-Trees are not copyable due to the way they manage memory and interact * with allocators. */ +static if (__VERSION__ > 2086) { + @disable this(ref TTree); +} else { this(this) @disable; +} static if (stateSize!Allocator != 0) { @@ -899,7 +906,7 @@ private: return r; } - void rotateLeft(ref Node* root, AllocatorType allocator) @safe + void rotateLeft(ref Node* root, AllocatorType allocator) @trusted { Node* newRoot; if (right.left !is null && right.right is null) @@ -927,7 +934,7 @@ private: cleanup(newRoot, root, allocator); } - void rotateRight(ref Node* root, AllocatorType allocator) @safe + void rotateRight(ref Node* root, AllocatorType allocator) @trusted { Node* newRoot; if (left.right !is null && left.left is null) @@ -944,6 +951,7 @@ private: } else { + newRoot = left; newRoot.parent = this.parent; left = newRoot.right; diff --git a/src/containers/unrolledlist.d b/src/containers/unrolledlist.d index 15d963b..abb68b1 100644 --- a/src/containers/unrolledlist.d +++ b/src/containers/unrolledlist.d @@ -8,7 +8,7 @@ module containers.unrolledlist; private import core.lifetime : move; -private import containers.internal.node : shouldAddGCRange; +private import containers.internal.node : shouldAddGCRange,isNoGCAllocator; private import std.experimental.allocator.mallocator : Mallocator; version (X86_64) @@ -32,7 +32,15 @@ version (X86_64) struct UnrolledList(T, Allocator = Mallocator, bool supportGC = shouldAddGCRange!T, size_t cacheLineSize = 64) { + static if(isNoGCAllocator!(Allocator) && !supportGC) { + @nogc: + } + +static if (__VERSION__ > 2086) { + @disable this(ref UnrolledList); +} else { this(this) @disable; +} private import std.experimental.allocator.common : stateSize;