diff --git a/src/passes/Heap2Local.cpp b/src/passes/Heap2Local.cpp index 9c7ca32cf26..a92fd20af0e 100644 --- a/src/passes/Heap2Local.cpp +++ b/src/passes/Heap2Local.cpp @@ -857,6 +857,12 @@ struct Struct2Local : PostWalker { } if (curr->desc) { + auto descTrap = [&]() { + replaceCurrent(builder.blockify(builder.makeDrop(curr->ref), + builder.makeDrop(curr->desc), + builder.makeUnreachable())); + }; + // If we are doing a ref.cast_desc of the optimized allocation, but the // allocation does not have a descriptor, then we know the cast must fail. // We also know the cast must fail (except for nulls it might let through) @@ -888,21 +894,16 @@ struct Struct2Local : PostWalker { } else { // Either the cast does not allow nulls or we know the value isn't // null anyway, so the cast certainly fails. - replaceCurrent(builder.blockify(builder.makeDrop(curr->ref), - builder.makeDrop(curr->desc), - builder.makeUnreachable())); + descTrap(); } - } else { - assert(allocIsCastRef); + } else if (allocIsCastRef) { if (!Type::isSubType(allocation->type, curr->type)) { // The cast fails, so it must trap. We mark such failing casts as // fully consuming their inputs, so we cannot just emit the explicit // descriptor equality check below because it would appear to be able // to propagate the optimized allocation on to the parent (as a null // value, which might not validate). - replaceCurrent(builder.blockify(builder.makeDrop(curr->ref), - builder.makeDrop(curr->desc), - builder.makeUnreachable())); + descTrap(); } else { // The cast succeeds iff the optimized allocation's descriptor is the // same as the given descriptor and traps otherwise. @@ -915,6 +916,15 @@ struct Struct2Local : PostWalker { builder.makeRefNull(allocation->type.getHeapType()), builder.makeUnreachable()))); } + } else { + // The allocation is neither the ref nor the descriptor inputs to this + // cast. This can happen if a previous operation led to the StructNew + // being dropped, as a result if it being used in unreachable code (it + // ends up happening because some of the initial analysis, like Parents, + // is stale; we could also recompute Parents after each Struct2Local, + // but it is simple enough to handle this with a trap). + assert(curr->type == Type::unreachable); + descTrap(); } } else { // We know this RefCast receives our allocation, so we can see whether it diff --git a/test/lit/passes/heap2local-desc.wast b/test/lit/passes/heap2local-desc.wast index 639285c3092..aee2d11f1a2 100644 --- a/test/lit/passes/heap2local-desc.wast +++ b/test/lit/passes/heap2local-desc.wast @@ -1144,3 +1144,77 @@ ) ) +;; A chain of descriptors, where initial optimizations influence later ones. +(module + (rec + ;; CHECK: (rec + ;; CHECK-NEXT: (type $A (shared (descriptor $B (struct)))) + (type $A (shared (descriptor $B (struct)))) + ;; CHECK: (type $B (sub (shared (describes $A (descriptor $C (struct)))))) + (type $B (sub (shared (describes $A (descriptor $C (struct)))))) + ;; CHECK: (type $C (sub (shared (describes $B (struct))))) + (type $C (sub (shared (describes $B (struct))))) + ) + + ;; CHECK: (type $3 (func (result (ref (shared any))))) + + ;; CHECK: (func $test (type $3) (result (ref (shared any))) + ;; CHECK-NEXT: (local $temp (ref $C)) + ;; CHECK-NEXT: (local $1 (ref (shared none))) + ;; CHECK-NEXT: (local $2 (ref (shared none))) + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (block (result (ref null (shared none))) + ;; CHECK-NEXT: (ref.null (shared none)) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (block + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (block + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (block (result (ref null (shared none))) + ;; CHECK-NEXT: (local.set $2 + ;; CHECK-NEXT: (ref.as_non_null + ;; CHECK-NEXT: (ref.null (shared none)) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $1 + ;; CHECK-NEXT: (local.get $2) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (ref.null (shared none)) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (ref.null (shared none)) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (unreachable) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (block (result (ref null (shared none))) + ;; CHECK-NEXT: (ref.null (shared none)) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (unreachable) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + (func $test (result (ref (shared any))) + (local $temp (ref $C)) + (local.set $temp + ;; We optimize this first, making the |local.get| below unreachable, and + ;; making that inner ref.cast_desc unreachable, which leads to the + ;; |struct.new_default $B| being dropped, and in particular having a new + ;; parent (the drop). We should not get confused and error internally. + (struct.new_default $C) + ) + (ref.cast_desc (ref $B) + (ref.cast_desc (ref $B) + (struct.new_default $B + (ref.null (shared none)) + ) + (local.get $temp) + ) + (struct.new_default $C) + ) + ) +) +