Skip to content

Commit f454e14

Browse files
committed
deps: V8: cherry-pick ac0fe8ec from upstream
Original commit message: [interpreter] Remove TryInstallOptimizedCode Removes the interrupt check and runtime call to TryInstallOptimizedCode from the optimization marker checks (i.e. CompileLazy and InterpreterEntryTrampoline). Instead, we rely on the other interrupt sources (in particular stack checks at function entries and loop headers) to install optimized code for us. This will hopefully not cause regressions, as we have plenty of other interrupt checks, but it may delay optimized code execution for some function by one function call. Bug: v8:6933 Change-Id: Ieadfff7ae2078d2a84085294158ad9a706eb9c64 Reviewed-on: https://chromium-review.googlesource.com/723475 Reviewed-by: Ross McIlroy <rmcilroy@chromium.org> Commit-Queue: Leszek Swirski <leszeks@chromium.org> Cr-Commit-Position: refs/heads/master@{#48667} Ref: https://bugs.chromium.org/p/v8/issues/detail?id=6933 Ref: v8/v8@ac0fe8e PR-URL: #17695 Reviewed-By: Michaël Zasso <targos@protonmail.com> Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl> Reviewed-By: James M Snell <jasnell@gmail.com>
1 parent bbaea12 commit f454e14

File tree

11 files changed

+25
-106
lines changed

11 files changed

+25
-106
lines changed

common.gypi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727

2828
# Reset this number to 0 on major V8 upgrades.
2929
# Increment by one for each non-official patch applied to deps/v8.
30-
'v8_embedder_string': '-node.4',
30+
'v8_embedder_string': '-node.5',
3131

3232
# Enable disassembler for `--print-code` v8 options
3333
'v8_enable_disassembler': 1,

deps/v8/src/builtins/arm/builtins-arm.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -782,22 +782,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
782782
Runtime::kCompileOptimized_Concurrent);
783783

784784
{
785-
// Otherwise, the marker is InOptimizationQueue.
785+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
786+
// that an interrupt will eventually update the slot with optimized code.
786787
if (FLAG_debug_code) {
787788
__ cmp(
788789
optimized_code_entry,
789790
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
790791
__ Assert(eq, kExpectedOptimizationSentinel);
791792
}
792-
// Checking whether the queued function is ready for install is
793-
// optional, since we come across interrupts and stack checks elsewhere.
794-
// However, not checking may delay installing ready functions, and
795-
// always checking would be quite expensive. A good compromise is to
796-
// first check against stack limit as a cue for an interrupt signal.
797-
__ LoadRoot(scratch2, Heap::kStackLimitRootIndex);
798-
__ cmp(sp, Operand(scratch2));
799-
__ b(hs, &fallthrough);
800-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
793+
__ jmp(&fallthrough);
801794
}
802795
}
803796

deps/v8/src/builtins/arm64/builtins-arm64.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -788,22 +788,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
788788
Runtime::kCompileOptimized_Concurrent);
789789

790790
{
791-
// Otherwise, the marker is InOptimizationQueue.
791+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
792+
// that an interrupt will eventually update the slot with optimized code.
792793
if (FLAG_debug_code) {
793794
__ Cmp(
794795
optimized_code_entry,
795796
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
796797
__ Assert(eq, kExpectedOptimizationSentinel);
797798
}
798-
799-
// Checking whether the queued function is ready for install is optional,
800-
// since we come across interrupts and stack checks elsewhere. However,
801-
// not checking may delay installing ready functions, and always checking
802-
// would be quite expensive. A good compromise is to first check against
803-
// stack limit as a cue for an interrupt signal.
804-
__ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
805-
__ B(hs, &fallthrough);
806-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
799+
__ B(&fallthrough);
807800
}
808801
}
809802

deps/v8/src/builtins/ia32/builtins-ia32.cc

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -698,24 +698,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
698698
Runtime::kCompileOptimized_Concurrent);
699699

700700
{
701-
// Otherwise, the marker is InOptimizationQueue.
701+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
702+
// that an interrupt will eventually update the slot with optimized code.
702703
if (FLAG_debug_code) {
703704
__ cmp(
704705
optimized_code_entry,
705706
Immediate(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
706707
__ Assert(equal, kExpectedOptimizationSentinel);
707708
}
708-
709-
// Checking whether the queued function is ready for install is optional,
710-
// since we come across interrupts and stack checks elsewhere. However,
711-
// not checking may delay installing ready functions, and always checking
712-
// would be quite expensive. A good compromise is to first check against
713-
// stack limit as a cue for an interrupt signal.
714-
ExternalReference stack_limit =
715-
ExternalReference::address_of_stack_limit(masm->isolate());
716-
__ cmp(esp, Operand::StaticVariable(stack_limit));
717-
__ j(above_equal, &fallthrough);
718-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
709+
__ jmp(&fallthrough);
719710
}
720711
}
721712

deps/v8/src/builtins/mips/builtins-mips.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -760,21 +760,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
760760
Runtime::kCompileOptimized_Concurrent);
761761

762762
{
763-
// Otherwise, the marker is InOptimizationQueue.
763+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
764+
// that an interrupt will eventually update the slot with optimized code.
764765
if (FLAG_debug_code) {
765766
__ Assert(
766767
eq, kExpectedOptimizationSentinel, optimized_code_entry,
767768
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
768769
}
769-
770-
// Checking whether the queued function is ready for install is optional,
771-
// since we come across interrupts and stack checks elsewhere. However,
772-
// not checking may delay installing ready functions, and always checking
773-
// would be quite expensive. A good compromise is to first check against
774-
// stack limit as a cue for an interrupt signal.
775-
__ LoadRoot(at, Heap::kStackLimitRootIndex);
776-
__ Branch(&fallthrough, hs, sp, Operand(at));
777-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
770+
__ jmp(&fallthrough);
778771
}
779772
}
780773

deps/v8/src/builtins/mips64/builtins-mips64.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -760,21 +760,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
760760
Runtime::kCompileOptimized_Concurrent);
761761

762762
{
763-
// Otherwise, the marker is InOptimizationQueue.
763+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
764+
// that an interrupt will eventually update the slot with optimized code.
764765
if (FLAG_debug_code) {
765766
__ Assert(
766767
eq, kExpectedOptimizationSentinel, optimized_code_entry,
767768
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
768769
}
769-
770-
// Checking whether the queued function is ready for install is optional,
771-
// since we come across interrupts and stack checks elsewhere. However,
772-
// not checking may delay installing ready functions, and always checking
773-
// would be quite expensive. A good compromise is to first check against
774-
// stack limit as a cue for an interrupt signal.
775-
__ LoadRoot(t0, Heap::kStackLimitRootIndex);
776-
__ Branch(&fallthrough, hs, sp, Operand(t0));
777-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
770+
__ jmp(&fallthrough);
778771
}
779772
}
780773

deps/v8/src/builtins/ppc/builtins-ppc.cc

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -780,23 +780,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
780780
Runtime::kCompileOptimized_Concurrent);
781781

782782
{
783-
// Otherwise, the marker is InOptimizationQueue.
783+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
784+
// that an interrupt will eventually update the slot with optimized code.
784785
if (FLAG_debug_code) {
785786
__ CmpSmiLiteral(
786787
optimized_code_entry,
787788
Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
788789
__ Assert(eq, kExpectedOptimizationSentinel);
789790
}
790-
791-
// Checking whether the queued function is ready for install is optional,
792-
// since we come across interrupts and stack checks elsewhere. However,
793-
// not checking may delay installing ready functions, and always checking
794-
// would be quite expensive. A good compromise is to first check against
795-
// stack limit as a cue for an interrupt signal.
796-
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
797-
__ cmpl(sp, ip);
798-
__ bge(&fallthrough);
799-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
791+
__ b(&fallthrough);
800792
}
801793
}
802794

deps/v8/src/builtins/s390/builtins-s390.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -783,22 +783,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
783783
Runtime::kCompileOptimized_Concurrent);
784784

785785
{
786-
// Otherwise, the marker is InOptimizationQueue.
786+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
787+
// that an interrupt will eventually update the slot with optimized code.
787788
if (FLAG_debug_code) {
788789
__ CmpSmiLiteral(
789790
optimized_code_entry,
790791
Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
791792
__ Assert(eq, kExpectedOptimizationSentinel);
792793
}
793-
794-
// Checking whether the queued function is ready for install is optional,
795-
// since we come across interrupts and stack checks elsewhere. However,
796-
// not checking may delay installing ready functions, and always checking
797-
// would be quite expensive. A good compromise is to first check against
798-
// stack limit as a cue for an interrupt signal.
799-
__ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex));
800-
__ bge(&fallthrough, Label::kNear);
801-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
794+
__ b(&fallthrough, Label::kNear);
802795
}
803796
}
804797

deps/v8/src/builtins/x64/builtins-x64.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -781,21 +781,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
781781
Runtime::kCompileOptimized_Concurrent);
782782

783783
{
784-
// Otherwise, the marker is InOptimizationQueue.
784+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
785+
// that an interrupt will eventually update the slot with optimized code.
785786
if (FLAG_debug_code) {
786787
__ SmiCompare(optimized_code_entry,
787788
Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
788789
__ Assert(equal, kExpectedOptimizationSentinel);
789790
}
790-
791-
// Checking whether the queued function is ready for install is optional,
792-
// since we come across interrupts and stack checks elsewhere. However,
793-
// not checking may delay installing ready functions, and always checking
794-
// would be quite expensive. A good compromise is to first check against
795-
// stack limit as a cue for an interrupt signal.
796-
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
797-
__ j(above_equal, &fallthrough);
798-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
791+
__ jmp(&fallthrough);
799792
}
800793
}
801794

deps/v8/src/runtime/runtime-compiler.cc

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -302,27 +302,6 @@ RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
302302
return NULL;
303303
}
304304

305-
306-
RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
307-
HandleScope scope(isolate);
308-
DCHECK_EQ(1, args.length());
309-
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
310-
311-
// First check if this is a real stack overflow.
312-
StackLimitCheck check(isolate);
313-
if (check.JsHasOverflowed(kStackSpaceRequiredForCompilation * KB)) {
314-
return isolate->StackOverflow();
315-
}
316-
317-
// Only try to install optimized functions if the interrupt was InstallCode.
318-
if (isolate->stack_guard()->CheckAndClearInstallCode()) {
319-
isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
320-
}
321-
322-
return (function->IsOptimized()) ? function->code()
323-
: function->shared()->code();
324-
}
325-
326305
static Object* CompileGlobalEval(Isolate* isolate, Handle<String> source,
327306
Handle<SharedFunctionInfo> outer_info,
328307
LanguageMode language_mode,

0 commit comments

Comments
 (0)