From 44c8e1b39dc43478fe2b9feb5470a590ffe4ac2e Mon Sep 17 00:00:00 2001 From: Berea George-Alexandru Date: Fri, 16 May 2025 23:27:31 +0300 Subject: [PATCH 1/2] Added nothrow @nogc @safe to the allocate function and all the functions it calls --- .../building_blocks/kernighan_ritchie.d | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/std/experimental/allocator/building_blocks/kernighan_ritchie.d b/std/experimental/allocator/building_blocks/kernighan_ritchie.d index 167cf1bc6bc..ccc7f3fd434 100644 --- a/std/experimental/allocator/building_blocks/kernighan_ritchie.d +++ b/std/experimental/allocator/building_blocks/kernighan_ritchie.d @@ -111,34 +111,38 @@ struct KRRegion(ParentAllocator = NullAllocator) this(this) @disable; + nothrow @nogc @safe void[] payload() inout { - return (cast(ubyte*) &this)[0 .. size]; + return (() @trusted => (cast(ubyte*) &this)[0 .. size])(); } + nothrow @nogc @safe bool adjacent(in Node* right) const { assert(right); auto p = payload; - return p.ptr < right && right < p.ptr + p.length + Node.sizeof; + return p.ptr < right && right < (() @trusted => (p.ptr + p.length + Node.sizeof))(); } + nothrow @nogc @safe bool coalesce(void* memoryEnd = null) { // Coalesce the last node before the memory end with any possible gap if (memoryEnd - && memoryEnd < payload.ptr + payload.length + Node.sizeof) + && memoryEnd < (() @trusted => (payload.ptr + payload.length + Node.sizeof))()) { - size += memoryEnd - (payload.ptr + payload.length); + size += (() @trusted => (memoryEnd - (payload.ptr + payload.length)))(); return true; } if (!adjacent(next)) return false; - size = (cast(ubyte*) next + next.size) - cast(ubyte*) &this; + size = (() @trusted => ((cast(ubyte*) next + next.size) - cast(ubyte*) &this))(); next = next.next; return true; } + nothrow @nogc @safe Tuple!(void[], Node*) allocateHere(size_t bytes) { assert(bytes >= Node.sizeof); @@ -152,7 +156,7 @@ struct KRRegion(ParentAllocator = NullAllocator) if (leftover >= Node.sizeof) { // There's room for another node - auto newNode = cast(Node*) ((cast(ubyte*) &this) + bytes); + auto newNode = (() @trusted => cast(Node*) ((cast(ubyte*) &this) + bytes))(); newNode.size = leftover; newNode.next = next == &this ? newNode : next; assert(next); @@ -174,8 +178,8 @@ struct KRRegion(ParentAllocator = NullAllocator) else alias parent = ParentAllocator.instance; private void[] payload; private Node* root; - private bool regionMode() const { return bytesUsedRegionMode != size_t.max; } - private void cancelRegionMode() { bytesUsedRegionMode = size_t.max; } + nothrow @nogc @safe private bool regionMode() const { return bytesUsedRegionMode != size_t.max; } + nothrow @nogc @safe private void cancelRegionMode() { bytesUsedRegionMode = size_t.max; } private size_t bytesUsedRegionMode = 0; auto byNodePtr() @@ -257,6 +261,7 @@ struct KRRegion(ParentAllocator = NullAllocator) } } + nothrow @nogc @safe private Node* sortFreelist(Node* root) { // Find a monotonic run @@ -274,6 +279,7 @@ struct KRRegion(ParentAllocator = NullAllocator) return merge(root, tail); } + nothrow @nogc @safe private Node* merge(Node* left, Node* right) { assert(left != right); @@ -290,6 +296,7 @@ struct KRRegion(ParentAllocator = NullAllocator) return result; } + nothrow @nogc @safe private void coalesceAndMakeCircular() { for (auto n = root;;) @@ -368,6 +375,7 @@ struct KRRegion(ParentAllocator = NullAllocator) Otherwise, sorts the free list accumulated so far and switches strategy for future allocations to KR style. */ + nothrow @nogc @safe void switchToFreeList() { if (!regionMode) return; @@ -396,6 +404,7 @@ struct KRRegion(ParentAllocator = NullAllocator) Returns: A word-aligned buffer of `n` bytes, or `null`. */ + nothrow @nogc @safe void[] allocate(size_t n) { if (!n || !root) return null; @@ -413,7 +422,7 @@ struct KRRegion(ParentAllocator = NullAllocator) immutable balance = root.size - actualBytes; if (balance >= Node.sizeof) { - auto newRoot = cast(Node*) (result + actualBytes); + auto newRoot = (() @trusted => cast(Node*) ((cast(ubyte*) result) + actualBytes))(); newRoot.next = root.next; newRoot.size = balance; root = newRoot; @@ -423,7 +432,7 @@ struct KRRegion(ParentAllocator = NullAllocator) root = null; switchToFreeList; } - return result[0 .. n]; + return (() @trusted => result[0 .. n])(); } // Not enough memory, switch to freelist mode and fall through @@ -554,6 +563,7 @@ struct KRRegion(ParentAllocator = NullAllocator) at the front of the free list. These blocks get coalesced, whether `allocateAll` succeeds or fails due to fragmentation. */ + nothrow @nogc @safe void[] allocateAll() { if (regionMode) switchToFreeList; From 73020b172ae46fb40edb6f66d719404b7c1dea44 Mon Sep 17 00:00:00 2001 From: Berea George-Alexandru Date: Sat, 17 May 2025 14:13:35 +0300 Subject: [PATCH 2/2] Removed unnecesary lamdas --- .../building_blocks/kernighan_ritchie.d | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/std/experimental/allocator/building_blocks/kernighan_ritchie.d b/std/experimental/allocator/building_blocks/kernighan_ritchie.d index ccc7f3fd434..3d04bb6f053 100644 --- a/std/experimental/allocator/building_blocks/kernighan_ritchie.d +++ b/std/experimental/allocator/building_blocks/kernighan_ritchie.d @@ -111,33 +111,33 @@ struct KRRegion(ParentAllocator = NullAllocator) this(this) @disable; - nothrow @nogc @safe + nothrow @nogc @trusted void[] payload() inout { - return (() @trusted => (cast(ubyte*) &this)[0 .. size])(); + return (cast(ubyte*) &this)[0 .. size]; } - nothrow @nogc @safe + nothrow @nogc @trusted bool adjacent(in Node* right) const { assert(right); auto p = payload; - return p.ptr < right && right < (() @trusted => (p.ptr + p.length + Node.sizeof))(); + return p.ptr < right && right < p.ptr + p.length + Node.sizeof; } - nothrow @nogc @safe + nothrow @nogc @trusted bool coalesce(void* memoryEnd = null) { // Coalesce the last node before the memory end with any possible gap if (memoryEnd - && memoryEnd < (() @trusted => (payload.ptr + payload.length + Node.sizeof))()) + && memoryEnd < payload.ptr + payload.length + Node.sizeof) { - size += (() @trusted => (memoryEnd - (payload.ptr + payload.length)))(); + size += memoryEnd - (payload.ptr + payload.length); return true; } if (!adjacent(next)) return false; - size = (() @trusted => ((cast(ubyte*) next + next.size) - cast(ubyte*) &this))(); + size = (cast(ubyte*) next + next.size) - cast(ubyte*) &this; next = next.next; return true; }