| Index: src/x64/code-stubs-x64.cc
|
| diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
|
| index 9cf02fafd9473b5fd0f7d7ed23fa3bf524a573bf..d1ea51bf8bc3b435acab16642190cf0a04a73cfc 100644
|
| --- a/src/x64/code-stubs-x64.cc
|
| +++ b/src/x64/code-stubs-x64.cc
|
| @@ -166,6 +166,26 @@ void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
|
| }
|
|
|
|
|
| +void StringLengthStub::InitializeInterfaceDescriptor(
|
| + Isolate* isolate,
|
| + CodeStubInterfaceDescriptor* descriptor) {
|
| + static Register registers[] = { rax, rcx };
|
| + descriptor->register_param_count_ = 2;
|
| + descriptor->register_params_ = registers;
|
| + descriptor->deoptimization_handler_ = NULL;
|
| +}
|
| +
|
| +
|
| +void KeyedStringLengthStub::InitializeInterfaceDescriptor(
|
| + Isolate* isolate,
|
| + CodeStubInterfaceDescriptor* descriptor) {
|
| + static Register registers[] = { rdx, rax };
|
| + descriptor->register_param_count_ = 2;
|
| + descriptor->register_params_ = registers;
|
| + descriptor->deoptimization_handler_ = NULL;
|
| +}
|
| +
|
| +
|
| void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
|
| Isolate* isolate,
|
| CodeStubInterfaceDescriptor* descriptor) {
|
| @@ -913,35 +933,6 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -void StringLengthStub::Generate(MacroAssembler* masm) {
|
| - Label miss;
|
| - Register receiver;
|
| - if (kind() == Code::KEYED_LOAD_IC) {
|
| - // ----------- S t a t e -------------
|
| - // -- rax : key
|
| - // -- rdx : receiver
|
| - // -- rsp[0] : return address
|
| - // -----------------------------------
|
| - __ Cmp(rax, masm->isolate()->factory()->length_string());
|
| - __ j(not_equal, &miss);
|
| - receiver = rdx;
|
| - } else {
|
| - ASSERT(kind() == Code::LOAD_IC);
|
| - // ----------- S t a t e -------------
|
| - // -- rax : receiver
|
| - // -- rcx : name
|
| - // -- rsp[0] : return address
|
| - // -----------------------------------
|
| - receiver = rax;
|
| - }
|
| -
|
| - StubCompiler::GenerateLoadStringLength(masm, receiver, r8, r9, &miss);
|
| - __ bind(&miss);
|
| - StubCompiler::TailCallBuiltin(
|
| - masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
|
| -}
|
| -
|
| -
|
| void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| // -- rax : value
|
| @@ -1062,7 +1053,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
| +void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
|
| // Stack layout:
|
| // rsp[0] : return address
|
| // rsp[8] : number of parameters (tagged)
|
| @@ -1123,7 +1114,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
| __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
|
|
|
| // 3. Arguments object.
|
| - __ addq(r8, Immediate(Heap::kArgumentsObjectSize));
|
| + __ addq(r8, Immediate(Heap::kSloppyArgumentsObjectSize));
|
|
|
| // Do the allocation of all three objects in one go.
|
| __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
|
| @@ -1137,7 +1128,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
| __ testq(rbx, rbx);
|
| __ j(not_zero, &has_mapped_parameters, Label::kNear);
|
|
|
| - const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX;
|
| + const int kIndex = Context::SLOPPY_ARGUMENTS_BOILERPLATE_INDEX;
|
| __ movp(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
|
| __ jmp(©, Label::kNear);
|
|
|
| @@ -1174,7 +1165,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
| // Set up the elements pointer in the allocated arguments object.
|
| // If we allocated a parameter map, edi will point there, otherwise to the
|
| // backing store.
|
| - __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
|
| + __ lea(rdi, Operand(rax, Heap::kSloppyArgumentsObjectSize));
|
| __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
|
|
|
| // rax = address of new object (tagged)
|
| @@ -1187,7 +1178,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
| __ testq(rbx, rbx);
|
| __ j(zero, &skip_parameter_map);
|
|
|
| - __ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex);
|
| + __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
|
| // rbx contains the untagged argument count. Add 2 and tag to write.
|
| __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
|
| __ Integer64PlusConstantToSmi(r9, rbx, 2);
|
| @@ -1280,7 +1271,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
|
| +void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
|
| // rsp[0] : return address
|
| // rsp[8] : number of parameters
|
| // rsp[16] : receiver displacement
|
| @@ -1343,7 +1334,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
| __ j(zero, &add_arguments_object, Label::kNear);
|
| __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
|
| __ bind(&add_arguments_object);
|
| - __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
|
| + __ addq(rcx, Immediate(Heap::kStrictArgumentsObjectSize));
|
|
|
| // Do the allocation of both objects in one go.
|
| __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
|
| @@ -1352,7 +1343,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
| __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
|
| __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
|
| const int offset =
|
| - Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
|
| + Context::SlotOffset(Context::STRICT_ARGUMENTS_BOILERPLATE_INDEX);
|
| __ movp(rdi, Operand(rdi, offset));
|
|
|
| // Copy the JS object part.
|
| @@ -1378,7 +1369,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
|
|
| // Set up the elements pointer in the allocated arguments object and
|
| // initialize the header in the elements fixed array.
|
| - __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
|
| + __ lea(rdi, Operand(rax, Heap::kStrictArgumentsObjectSize));
|
| __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
|
| __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
|
| __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
|
| @@ -2267,7 +2258,8 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
|
|
|
| void CallFunctionStub::Generate(MacroAssembler* masm) {
|
| // rbx : feedback vector
|
| - // rdx : (only if rbx is not undefined) slot in feedback vector (Smi)
|
| + // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
|
| + // vector (Smi)
|
| // rdi : the function to call
|
| Isolate* isolate = masm->isolate();
|
| Label slow, non_function, wrap, cont;
|
| @@ -2327,7 +2319,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
| if (RecordCallTarget()) {
|
| // If there is a call target cache, mark it megamorphic in the
|
| // non-function case. MegamorphicSentinel is an immortal immovable
|
| - // object (undefined) so no write barrier is needed.
|
| + // object (megamorphic symbol) so no write barrier is needed.
|
| __ SmiToInteger32(rdx, rdx);
|
| __ Move(FieldOperand(rbx, rdx, times_pointer_size,
|
| FixedArray::kHeaderSize),
|
| @@ -2379,7 +2371,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
| void CallConstructStub::Generate(MacroAssembler* masm) {
|
| // rax : number of arguments
|
| // rbx : feedback vector
|
| - // rdx : (only if rbx is not undefined) slot in feedback vector (Smi)
|
| + // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
|
| + // vector (Smi)
|
| // rdi : constructor function
|
| Label slow, non_function_call;
|
|
|
| @@ -4537,7 +4530,7 @@ void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
|
| // remembered set.
|
| CheckNeedsToInformIncrementalMarker(
|
| masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
|
| - InformIncrementalMarker(masm, mode);
|
| + InformIncrementalMarker(masm);
|
| regs_.Restore(masm);
|
| __ RememberedSetHelper(object_,
|
| address_,
|
| @@ -4550,13 +4543,13 @@ void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
|
|
|
| CheckNeedsToInformIncrementalMarker(
|
| masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
|
| - InformIncrementalMarker(masm, mode);
|
| + InformIncrementalMarker(masm);
|
| regs_.Restore(masm);
|
| __ ret(0);
|
| }
|
|
|
|
|
| -void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
|
| +void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
|
| regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
|
| Register address =
|
| arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
|
| @@ -4572,18 +4565,10 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
|
|
|
| AllowExternalCallThatCantCauseGC scope(masm);
|
| __ PrepareCallCFunction(argument_count);
|
| - if (mode == INCREMENTAL_COMPACTION) {
|
| - __ CallCFunction(
|
| - ExternalReference::incremental_evacuation_record_write_function(
|
| - masm->isolate()),
|
| - argument_count);
|
| - } else {
|
| - ASSERT(mode == INCREMENTAL);
|
| - __ CallCFunction(
|
| - ExternalReference::incremental_marking_record_write_function(
|
| - masm->isolate()),
|
| - argument_count);
|
| - }
|
| + __ CallCFunction(
|
| + ExternalReference::incremental_marking_record_write_function(
|
| + masm->isolate()),
|
| + argument_count);
|
| regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
|
| }
|
|
|
| @@ -5003,15 +4988,14 @@ void ArrayConstructorStub::GenerateDispatchToArrayStub(
|
| void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| // -- rax : argc
|
| - // -- rbx : feedback vector (fixed array or undefined)
|
| + // -- rbx : feedback vector (fixed array or megamorphic symbol)
|
| // -- rdx : slot index (if ebx is fixed array)
|
| // -- rdi : constructor
|
| // -- rsp[0] : return address
|
| // -- rsp[8] : last argument
|
| // -----------------------------------
|
| - Handle<Object> undefined_sentinel(
|
| - masm->isolate()->heap()->undefined_value(),
|
| - masm->isolate());
|
| + Handle<Object> megamorphic_sentinel =
|
| + TypeFeedbackInfo::MegamorphicSentinel(masm->isolate());
|
|
|
| if (FLAG_debug_code) {
|
| // The array construct code is only set for the global and natives
|
| @@ -5026,24 +5010,26 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
| __ CmpObjectType(rcx, MAP_TYPE, rcx);
|
| __ Check(equal, kUnexpectedInitialMapForArrayFunction);
|
|
|
| - // We should either have undefined in rbx or a valid fixed array.
|
| + // We should either have the megamorphic symbol in rbx or a valid
|
| + // fixed array.
|
| Label okay_here;
|
| Handle<Map> fixed_array_map = masm->isolate()->factory()->fixed_array_map();
|
| - __ Cmp(rbx, undefined_sentinel);
|
| + __ Cmp(rbx, megamorphic_sentinel);
|
| __ j(equal, &okay_here);
|
| __ Cmp(FieldOperand(rbx, 0), fixed_array_map);
|
| __ Assert(equal, kExpectedFixedArrayInRegisterRbx);
|
|
|
| - // rdx should be a smi if we don't have undefined in rbx.
|
| + // rdx should be a smi if we don't have the megamorphic symbol in rbx.
|
| __ AssertSmi(rdx);
|
|
|
| __ bind(&okay_here);
|
| }
|
|
|
| Label no_info;
|
| - // If the feedback slot is undefined, or contains anything other than an
|
| - // AllocationSite, call an array constructor that doesn't use AllocationSites.
|
| - __ Cmp(rbx, undefined_sentinel);
|
| + // If the feedback slot is the megamorphic sentinel, or contains anything
|
| + // other than an AllocationSite, call an array constructor that doesn't use
|
| + // AllocationSites.
|
| + __ Cmp(rbx, megamorphic_sentinel);
|
| __ j(equal, &no_info);
|
| __ SmiToInteger32(rdx, rdx);
|
| __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
|
|
|