Index: src/x64/code-stubs-x64.cc |
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc |
index 9813657e1a80f05c3b947268f52fa9ec82be55f8..9a0ba101ac5ade5e1c05fa7b26f08dba20f48e46 100644 |
--- a/src/x64/code-stubs-x64.cc |
+++ b/src/x64/code-stubs-x64.cc |
@@ -773,6 +773,9 @@ void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
void BinaryOpStub::Generate(MacroAssembler* masm) { |
+ // Explicitly allow generation of nested stubs. It is safe here. |
+ AllowStubCallsScope allow_stub_calls(masm, true); |
+ |
switch (operands_type_) { |
case BinaryOpIC::UNINITIALIZED: |
GenerateTypeTransition(masm); |
@@ -3349,7 +3352,7 @@ bool CEntryStub::NeedsImmovableCode() { |
} |
-bool CEntryStub::CompilingCallsToThisStubIsGCSafe() { |
+bool CEntryStub::IsPregenerated() { |
return result_size_ == 1; |
} |
@@ -5578,7 +5581,7 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { |
}; |
-bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() { |
+bool RecordWriteStub::IsPregenerated() { |
for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
!entry->object.is(no_reg); |
entry++) { |