| Index: src/arm64/code-stubs-arm64.cc
|
| diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc
|
| index 40bd0504c27046d283d829f2d8c89e28be979940..a3ab71d61e40c05ff4510b15374492c3641cfc81 100644
|
| --- a/src/arm64/code-stubs-arm64.cc
|
| +++ b/src/arm64/code-stubs-arm64.cc
|
| @@ -1971,212 +1971,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
|
| __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
|
| }
|
|
|
| -// Note: feedback_vector and slot are clobbered after the call.
|
| -static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
|
| - Register slot) {
|
| - __ Add(feedback_vector, feedback_vector,
|
| - Operand::UntagSmiAndScale(slot, kPointerSizeLog2));
|
| - __ Add(feedback_vector, feedback_vector,
|
| - Operand(FixedArray::kHeaderSize + kPointerSize));
|
| - __ Ldr(slot, FieldMemOperand(feedback_vector, 0));
|
| - __ Add(slot, slot, Operand(Smi::FromInt(1)));
|
| - __ Str(slot, FieldMemOperand(feedback_vector, 0));
|
| -}
|
| -
|
| -void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
|
| - // x0 - number of arguments
|
| - // x1 - function
|
| - // x3 - slot id
|
| - // x2 - vector
|
| - // x4 - allocation site (loaded from vector[slot])
|
| - Register function = x1;
|
| - Register feedback_vector = x2;
|
| - Register index = x3;
|
| - Register allocation_site = x4;
|
| - Register scratch = x5;
|
| -
|
| - __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch);
|
| - __ Cmp(function, scratch);
|
| - __ B(ne, miss);
|
| -
|
| - // Increment the call count for monomorphic function calls.
|
| - IncrementCallCount(masm, feedback_vector, index);
|
| -
|
| - // Set up arguments for the array constructor stub.
|
| - Register allocation_site_arg = feedback_vector;
|
| - Register new_target_arg = index;
|
| - __ Mov(allocation_site_arg, allocation_site);
|
| - __ Mov(new_target_arg, function);
|
| - ArrayConstructorStub stub(masm->isolate());
|
| - __ TailCallStub(&stub);
|
| -}
|
| -
|
| -
|
| -void CallICStub::Generate(MacroAssembler* masm) {
|
| - ASM_LOCATION("CallICStub");
|
| -
|
| - // x0 - number of arguments
|
| - // x1 - function
|
| - // x3 - slot id (Smi)
|
| - // x2 - vector
|
| - Label extra_checks_or_miss, call, call_function, call_count_incremented;
|
| -
|
| - Register function = x1;
|
| - Register feedback_vector = x2;
|
| - Register index = x3;
|
| -
|
| - // The checks. First, does x1 match the recorded monomorphic target?
|
| - __ Add(x4, feedback_vector,
|
| - Operand::UntagSmiAndScale(index, kPointerSizeLog2));
|
| - __ Ldr(x4, FieldMemOperand(x4, FixedArray::kHeaderSize));
|
| -
|
| - // We don't know that we have a weak cell. We might have a private symbol
|
| - // or an AllocationSite, but the memory is safe to examine.
|
| - // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
|
| - // FixedArray.
|
| - // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
|
| - // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
|
| - // computed, meaning that it can't appear to be a pointer. If the low bit is
|
| - // 0, then hash is computed, but the 0 bit prevents the field from appearing
|
| - // to be a pointer.
|
| - STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
|
| - STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
|
| - WeakCell::kValueOffset &&
|
| - WeakCell::kValueOffset == Symbol::kHashFieldSlot);
|
| -
|
| - __ Ldr(x5, FieldMemOperand(x4, WeakCell::kValueOffset));
|
| - __ Cmp(x5, function);
|
| - __ B(ne, &extra_checks_or_miss);
|
| -
|
| - // The compare above could have been a SMI/SMI comparison. Guard against this
|
| - // convincing us that we have a monomorphic JSFunction.
|
| - __ JumpIfSmi(function, &extra_checks_or_miss);
|
| -
|
| - __ Bind(&call_function);
|
| -
|
| - // Increment the call count for monomorphic function calls.
|
| - IncrementCallCount(masm, feedback_vector, index);
|
| -
|
| - __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
|
| - tail_call_mode()),
|
| - RelocInfo::CODE_TARGET);
|
| -
|
| - __ bind(&extra_checks_or_miss);
|
| - Label uninitialized, miss, not_allocation_site;
|
| -
|
| - __ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &call);
|
| -
|
| - __ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset));
|
| - __ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, ¬_allocation_site);
|
| -
|
| - HandleArrayCase(masm, &miss);
|
| -
|
| - __ bind(¬_allocation_site);
|
| -
|
| - // The following cases attempt to handle MISS cases without going to the
|
| - // runtime.
|
| - if (FLAG_trace_ic) {
|
| - __ jmp(&miss);
|
| - }
|
| -
|
| - // TODO(mvstanton): the code below is effectively disabled. Investigate.
|
| - __ JumpIfRoot(x4, Heap::kuninitialized_symbolRootIndex, &miss);
|
| -
|
| - // We are going megamorphic. If the feedback is a JSFunction, it is fine
|
| - // to handle it here. More complex cases are dealt with in the runtime.
|
| - __ AssertNotSmi(x4);
|
| - __ JumpIfNotObjectType(x4, x5, x5, JS_FUNCTION_TYPE, &miss);
|
| - __ Add(x4, feedback_vector,
|
| - Operand::UntagSmiAndScale(index, kPointerSizeLog2));
|
| - __ LoadRoot(x5, Heap::kmegamorphic_symbolRootIndex);
|
| - __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
|
| -
|
| - __ Bind(&call);
|
| -
|
| - // Increment the call count for megamorphic function calls.
|
| - IncrementCallCount(masm, feedback_vector, index);
|
| -
|
| - __ Bind(&call_count_incremented);
|
| - __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
|
| - RelocInfo::CODE_TARGET);
|
| -
|
| - __ bind(&uninitialized);
|
| -
|
| - // We are going monomorphic, provided we actually have a JSFunction.
|
| - __ JumpIfSmi(function, &miss);
|
| -
|
| - // Goto miss case if we do not have a function.
|
| - __ JumpIfNotObjectType(function, x5, x5, JS_FUNCTION_TYPE, &miss);
|
| -
|
| - // Make sure the function is not the Array() function, which requires special
|
| - // behavior on MISS.
|
| - __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, x5);
|
| - __ Cmp(function, x5);
|
| - __ B(eq, &miss);
|
| -
|
| - // Make sure the function belongs to the same native context.
|
| - __ Ldr(x4, FieldMemOperand(function, JSFunction::kContextOffset));
|
| - __ Ldr(x4, ContextMemOperand(x4, Context::NATIVE_CONTEXT_INDEX));
|
| - __ Ldr(x5, NativeContextMemOperand());
|
| - __ Cmp(x4, x5);
|
| - __ B(ne, &miss);
|
| -
|
| - // Store the function. Use a stub since we need a frame for allocation.
|
| - // x2 - vector
|
| - // x3 - slot
|
| - // x1 - function
|
| - // x0 - number of arguments
|
| - {
|
| - FrameScope scope(masm, StackFrame::INTERNAL);
|
| - CreateWeakCellStub create_stub(masm->isolate());
|
| - __ SmiTag(x0);
|
| - __ Push(x0);
|
| - __ Push(feedback_vector, index);
|
| -
|
| - __ Push(cp, function);
|
| - __ CallStub(&create_stub);
|
| - __ Pop(cp, function);
|
| -
|
| - __ Pop(feedback_vector, index);
|
| - __ Pop(x0);
|
| - __ SmiUntag(x0);
|
| - }
|
| -
|
| - __ B(&call_function);
|
| -
|
| - // We are here because tracing is on or we encountered a MISS case we can't
|
| - // handle here.
|
| - __ bind(&miss);
|
| - GenerateMiss(masm);
|
| -
|
| - // The runtime increments the call count in the vector for us.
|
| - __ B(&call_count_incremented);
|
| -}
|
| -
|
| -
|
| -void CallICStub::GenerateMiss(MacroAssembler* masm) {
|
| - ASM_LOCATION("CallICStub[Miss]");
|
| -
|
| - FrameScope scope(masm, StackFrame::INTERNAL);
|
| -
|
| - // Preserve the number of arguments as Smi.
|
| - __ SmiTag(x0);
|
| -
|
| - // Push the receiver and the function and feedback info.
|
| - __ Push(x0, x1, x2, x3);
|
| -
|
| - // Call the entry.
|
| - __ CallRuntime(Runtime::kCallIC_Miss);
|
| -
|
| - // Move result to edi and exit the internal frame.
|
| - __ Mov(x1, x0);
|
| -
|
| - // Restore number of arguments.
|
| - __ Pop(x0);
|
| - __ SmiUntag(x0);
|
| -}
|
| -
|
| -
|
| void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
|
| // If the receiver is a smi trigger the non-string case.
|
| if (check_mode_ == RECEIVER_IS_UNKNOWN) {
|
|
|