From d4d05ceb418c525b0d07e76b81b8694ac2f5b309 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 16 Sep 2020 06:12:54 +0200 Subject: [PATCH] [deps] V8: cherry-pick 71736859756b2bd0444bdb0a87a Original commit message: [heap] Add large_object_threshold to AllocateRaw This commit adds a check in Heap::AllocateRaw when setting the large_object variable, when the AllocationType is of type kCode, to take into account the size of the CodeSpace's area size. The motivation for this change is that without this check it is possible that size_in_bytes is less than 128, and hence not considered a large object, but it might be larger than the available space in code_space->AreaSize(), which will cause the object to be created in the CodeLargeObjectSpace. This will later cause a segmentation fault when calling the following chain of functions: if (!large_object) { MemoryChunk::FromHeapObject(heap_object) ->GetCodeObjectRegistry() ->RegisterNewlyAllocatedCodeObject(heap_object.address()); } We (Red Hat) ran into this issue when running Node.js v12.16.1 in combination with yarn on aarch64 (this was the only architecture that this happed on). Bug: v8:10808 Change-Id: I0c396b0eb64bc4cc91d9a3be521254f3130eac7b Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2390665 Commit-Queue: Ulan Degenbaev Reviewed-by: Ulan Degenbaev Cr-Commit-Position: refs/heads/master@{#69876} Refs: https://github.com/v8/v8/commit/71736859756b2bd0444bdb0a87a61a0b090cbba2 --- deps/v8/src/heap/heap-inl.h | 13 +++-- deps/v8/src/heap/heap.h | 6 ++- deps/v8/test/cctest/heap/heap-tester.h | 1 + deps/v8/test/cctest/heap/test-heap.cc | 69 ++++++++++++++++++++++++++ 4 files changed, 83 insertions(+), 6 deletions(-) diff --git a/deps/v8/src/heap/heap-inl.h b/deps/v8/src/heap/heap-inl.h index 39f5ec6c66e7f5..b56ebc03d58417 100644 --- a/deps/v8/src/heap/heap-inl.h +++ b/deps/v8/src/heap/heap-inl.h @@ -192,7 +192,12 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type, IncrementObjectCounters(); #endif - bool large_object = size_in_bytes > kMaxRegularHeapObjectSize; + size_t large_object_threshold = + AllocationType::kCode == type + ? std::min(kMaxRegularHeapObjectSize, code_space()->AreaSize()) + : kMaxRegularHeapObjectSize; + bool large_object = + static_cast(size_in_bytes) > large_object_threshold; HeapObject object; AllocationResult allocation; @@ -225,10 +230,10 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type, allocation = old_space_->AllocateRaw(size_in_bytes, alignment, origin); } } else if (AllocationType::kCode == type) { - if (size_in_bytes <= code_space()->AreaSize() && !large_object) { - allocation = code_space_->AllocateRawUnaligned(size_in_bytes); - } else { + if (large_object) { allocation = code_lo_space_->AllocateRaw(size_in_bytes); + } else { + allocation = code_space_->AllocateRawUnaligned(size_in_bytes); } } else if (AllocationType::kMap == type) { allocation = map_space_->AllocateRawUnaligned(size_in_bytes); diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 888d174c02fb4c..0165fa6970faa4 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -1404,8 +1404,10 @@ class Heap { // Heap object allocation tracking. ========================================== // =========================================================================== - void AddHeapObjectAllocationTracker(HeapObjectAllocationTracker* tracker); - void RemoveHeapObjectAllocationTracker(HeapObjectAllocationTracker* tracker); + V8_EXPORT_PRIVATE void AddHeapObjectAllocationTracker( + HeapObjectAllocationTracker* tracker); + V8_EXPORT_PRIVATE void RemoveHeapObjectAllocationTracker( + HeapObjectAllocationTracker* tracker); bool has_heap_object_allocation_tracker() const { return !allocation_trackers_.empty(); } diff --git a/deps/v8/test/cctest/heap/heap-tester.h b/deps/v8/test/cctest/heap/heap-tester.h index 998e3ff011fa3e..0061ce9d94eed7 100644 --- a/deps/v8/test/cctest/heap/heap-tester.h +++ b/deps/v8/test/cctest/heap/heap-tester.h @@ -11,6 +11,7 @@ // Tests that should have access to private methods of {v8::internal::Heap}. // Those tests need to be defined using HEAP_TEST(Name) { ... }. #define HEAP_TEST_METHODS(V) \ + V(CodeLargeObjectSpace) \ V(CompactionFullAbortedPage) \ V(CompactionPartiallyAbortedPage) \ V(CompactionPartiallyAbortedPageIntraAbortedPointers) \ diff --git a/deps/v8/test/cctest/heap/test-heap.cc b/deps/v8/test/cctest/heap/test-heap.cc index b9a4b2101cc775..bbf62f85eb3fb1 100644 --- a/deps/v8/test/cctest/heap/test-heap.cc +++ b/deps/v8/test/cctest/heap/test-heap.cc @@ -7002,6 +7002,75 @@ TEST(Regress978156) { marking_state->GreyToBlack(filler); } +TEST(GarbageCollectionWithLocalHeap) { + FLAG_local_heaps = true; + ManualGCScope manual_gc_scope; + CcTest::InitializeVM(); + + Heap* heap = CcTest::i_isolate()->heap(); + + LocalHeap local_heap(heap); + CcTest::CollectGarbage(OLD_SPACE); + { ParkedScope parked_scope(&local_heap); } + CcTest::CollectGarbage(OLD_SPACE); +} + +TEST(Regress10698) { + ManualGCScope manual_gc_scope; + CcTest::InitializeVM(); + Heap* heap = CcTest::i_isolate()->heap(); + Factory* factory = CcTest::i_isolate()->factory(); + HandleScope handle_scope(CcTest::i_isolate()); + // This is modeled after the manual allocation folding of heap numbers in + // JSON parser (See commit ba7b25e). + // Step 1. Allocate a byte array in the old space. + Handle array = + factory->NewByteArray(kTaggedSize, AllocationType::kOld); + // Step 2. Start incremental marking. + SimulateIncrementalMarking(heap, false); + // Step 3. Allocate another byte array. It will be black. + factory->NewByteArray(kTaggedSize, AllocationType::kOld); + Address address = reinterpret_cast
(array->GetDataStartAddress()); + HeapObject filler = HeapObject::FromAddress(address); + // Step 4. Set the filler at the end of the first array. + // It will have an impossible markbit pattern because the second markbit + // will be taken from the second array. + filler.set_map_after_allocation(*factory->one_pointer_filler_map()); +} + +class TestAllocationTracker : public HeapObjectAllocationTracker { + public: + explicit TestAllocationTracker(int expected_size) + : expected_size_(expected_size) {} + + void AllocationEvent(Address addr, int size) { + CHECK(expected_size_ == size); + address_ = addr; + } + + Address address() { return address_; } + + private: + int expected_size_; + Address address_; +}; + +HEAP_TEST(CodeLargeObjectSpace) { + Heap* heap = CcTest::heap(); + int size_in_bytes = kMaxRegularHeapObjectSize + kSystemPointerSize; + TestAllocationTracker allocation_tracker{size_in_bytes}; + heap->AddHeapObjectAllocationTracker(&allocation_tracker); + + AllocationResult allocation = heap->AllocateRaw( + size_in_bytes, AllocationType::kCode, AllocationOrigin::kGeneratedCode, + AllocationAlignment::kCodeAligned); + + CHECK(allocation.ToObjectChecked().address() == allocation_tracker.address()); + heap->CreateFillerObjectAt(allocation.ToObjectChecked().address(), + size_in_bytes, ClearRecordedSlots::kNo); + heap->RemoveHeapObjectAllocationTracker(&allocation_tracker); +} + } // namespace heap } // namespace internal } // namespace v8