| Index: src/x64/code-stubs-x64.cc
|
| diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
|
| index da1d740ed5a1bd88900007db93a30512134f976c..8bc4b59ca4ff1877d881d1df1e49fa514c33fb34 100644
|
| --- a/src/x64/code-stubs-x64.cc
|
| +++ b/src/x64/code-stubs-x64.cc
|
| @@ -2115,10 +2115,30 @@ void CallICStub::Generate(MacroAssembler* masm) {
|
|
|
| // The checks. First, does rdi match the recorded monomorphic target?
|
| __ SmiToInteger32(rdx, rdx);
|
| - __ cmpp(rdi, FieldOperand(rbx, rdx, times_pointer_size,
|
| - FixedArray::kHeaderSize));
|
| + __ movp(rcx,
|
| + FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
|
| +
|
| + // We don't know that we have a weak cell. We might have a private symbol
|
| + // or an AllocationSite, but the memory is safe to examine.
|
| + // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
|
| + // FixedArray.
|
| + // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
|
| + // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
|
| + // computed, meaning that it can't appear to be a pointer. If the low bit is
|
| + // 0, then hash is computed, but the 0 bit prevents the field from appearing
|
| + // to be a pointer.
|
| + STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
|
| + STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
|
| + WeakCell::kValueOffset &&
|
| + WeakCell::kValueOffset == Symbol::kHashFieldSlot);
|
| +
|
| + __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
|
| __ j(not_equal, &extra_checks_or_miss);
|
|
|
| + // The compare above could have been a SMI/SMI comparison. Guard against this
|
| + // convincing us that we have a monomorphic JSFunction.
|
| + __ JumpIfSmi(rdi, &extra_checks_or_miss);
|
| +
|
| __ bind(&have_js_function);
|
| if (CallAsMethod()) {
|
| EmitContinueIfStrictOrNative(masm, &cont);
|
| @@ -2147,8 +2167,6 @@ void CallICStub::Generate(MacroAssembler* masm) {
|
| __ bind(&extra_checks_or_miss);
|
| Label uninitialized, miss;
|
|
|
| - __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
|
| - FixedArray::kHeaderSize));
|
| __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
|
| __ j(equal, &slow_start);
|
|
|
| @@ -2191,14 +2209,20 @@ void CallICStub::Generate(MacroAssembler* masm) {
|
| // Update stats.
|
| __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1));
|
|
|
| - // Store the function.
|
| - __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
|
| - rdi);
|
| + // Store the function. Use a stub since we need a frame for allocation.
|
| + // rbx - vector
|
| + // rdx - slot (needs to be in smi form)
|
| + // rdi - function
|
| + {
|
| + FrameScope scope(masm, StackFrame::INTERNAL);
|
| + CreateWeakCellStub create_stub(isolate);
|
| +
|
| + __ Integer32ToSmi(rdx, rdx);
|
| + __ Push(rdi);
|
| + __ CallStub(&create_stub);
|
| + __ Pop(rdi);
|
| + }
|
|
|
| - // Update the write barrier.
|
| - __ movp(rax, rdi);
|
| - __ RecordWriteArray(rbx, rax, rdx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
|
| - OMIT_SMI_CHECK);
|
| __ jmp(&have_js_function);
|
|
|
| // We are here because tracing is on or we encountered a MISS case we can't
|
| @@ -2260,6 +2284,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
|
| // It is important that the store buffer overflow stubs are generated first.
|
| ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
|
| CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
|
| + CreateWeakCellStub::GenerateAheadOfTime(isolate);
|
| BinaryOpICStub::GenerateAheadOfTime(isolate);
|
| BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
|
| }
|
|
|