| Index: src/code-stubs.cc
|
| diff --git a/src/code-stubs.cc b/src/code-stubs.cc
|
| index 2c87107ede4b3d538909e10ac19fdbc768cdce15..c97ba3db2d59ed3458505b1f359f3e8432db8a34 100644
|
| --- a/src/code-stubs.cc
|
| +++ b/src/code-stubs.cc
|
| @@ -617,16 +617,16 @@ void StringLengthStub::GenerateAssembly(
|
| assembler.Return(result);
|
| }
|
|
|
| -#define BINARY_OP_STUB(Name) \
|
| - void Name::GenerateAssembly(compiler::CodeAssemblerState* state) const { \
|
| - typedef BinaryOpWithVectorDescriptor Descriptor; \
|
| - CodeStubAssembler assembler(state); \
|
| - assembler.Return(Generate(&assembler, \
|
| - assembler.Parameter(Descriptor::kLeft), \
|
| - assembler.Parameter(Descriptor::kRight), \
|
| - assembler.Parameter(Descriptor::kSlot), \
|
| - assembler.Parameter(Descriptor::kVector), \
|
| - assembler.Parameter(Descriptor::kContext))); \
|
| +#define BINARY_OP_STUB(Name) \
|
| + void Name::GenerateAssembly(compiler::CodeAssemblerState* state) const { \
|
| + typedef BinaryOpWithVectorDescriptor Descriptor; \
|
| + CodeStubAssembler assembler(state); \
|
| + assembler.Return(Generate( \
|
| + &assembler, assembler.Parameter(Descriptor::kLeft), \
|
| + assembler.Parameter(Descriptor::kRight), \
|
| + assembler.ChangeUint32ToWord(assembler.Parameter(Descriptor::kSlot)), \
|
| + assembler.Parameter(Descriptor::kVector), \
|
| + assembler.Parameter(Descriptor::kContext))); \
|
| }
|
| BINARY_OP_STUB(AddWithFeedbackStub)
|
| BINARY_OP_STUB(SubtractWithFeedbackStub)
|
| @@ -1232,27 +1232,26 @@ compiler::Node* DivideWithFeedbackStub::Generate(
|
|
|
| // Do floating point division if {divisor} is zero.
|
| assembler->GotoIf(
|
| - assembler->WordEqual(divisor, assembler->IntPtrConstant(0)),
|
| - &bailout);
|
| + assembler->WordEqual(divisor, assembler->SmiConstant(0)), &bailout);
|
|
|
| // Do floating point division {dividend} is zero and {divisor} is
|
| // negative.
|
| Label dividend_is_zero(assembler), dividend_is_not_zero(assembler);
|
| assembler->Branch(
|
| - assembler->WordEqual(dividend, assembler->IntPtrConstant(0)),
|
| + assembler->WordEqual(dividend, assembler->SmiConstant(0)),
|
| ÷nd_is_zero, ÷nd_is_not_zero);
|
|
|
| assembler->Bind(÷nd_is_zero);
|
| {
|
| assembler->GotoIf(
|
| - assembler->IntPtrLessThan(divisor, assembler->IntPtrConstant(0)),
|
| + assembler->SmiLessThan(divisor, assembler->SmiConstant(0)),
|
| &bailout);
|
| assembler->Goto(÷nd_is_not_zero);
|
| }
|
| assembler->Bind(÷nd_is_not_zero);
|
|
|
| - Node* untagged_divisor = assembler->SmiUntag(divisor);
|
| - Node* untagged_dividend = assembler->SmiUntag(dividend);
|
| + Node* untagged_divisor = assembler->SmiToWord32(divisor);
|
| + Node* untagged_dividend = assembler->SmiToWord32(dividend);
|
|
|
| // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
|
| // if the Smi size is 31) and {divisor} is -1.
|
| @@ -1282,7 +1281,7 @@ compiler::Node* DivideWithFeedbackStub::Generate(
|
| &bailout);
|
| var_type_feedback.Bind(
|
| assembler->Int32Constant(BinaryOperationFeedback::kSignedSmall));
|
| - var_result.Bind(assembler->SmiTag(untagged_result));
|
| + var_result.Bind(assembler->SmiFromWord32(untagged_result));
|
| assembler->Goto(&end);
|
|
|
| // Bailout: convert {dividend} and {divisor} to double and do double
|
| @@ -2144,7 +2143,7 @@ void LoadIndexedInterceptorStub::GenerateAssembly(
|
| Node* context = assembler.Parameter(Descriptor::kContext);
|
|
|
| Label if_keyispositivesmi(&assembler), if_keyisinvalid(&assembler);
|
| - assembler.Branch(assembler.WordIsPositiveSmi(key), &if_keyispositivesmi,
|
| + assembler.Branch(assembler.TaggedIsPositiveSmi(key), &if_keyispositivesmi,
|
| &if_keyisinvalid);
|
| assembler.Bind(&if_keyispositivesmi);
|
| assembler.TailCallRuntime(Runtime::kLoadElementWithInterceptor, context,
|
| @@ -2206,7 +2205,7 @@ compiler::Node* FastCloneShallowObjectStub::GenerateFastPath(
|
| Node* boilerplate_map = assembler->LoadMap(boilerplate);
|
| Node* instance_size = assembler->LoadMapInstanceSize(boilerplate_map);
|
| Node* size_in_words = assembler->WordShr(object_size, kPointerSizeLog2);
|
| - assembler->GotoUnless(assembler->Word32Equal(instance_size, size_in_words),
|
| + assembler->GotoUnless(assembler->WordEqual(instance_size, size_in_words),
|
| call_runtime);
|
|
|
| Node* copy = assembler->Allocate(allocation_size);
|
| @@ -2582,10 +2581,11 @@ compiler::Node* FastNewClosureStub::Generate(CodeStubAssembler* assembler,
|
| assembler->isolate()->builtins()->builtin(Builtins::kCompileLazy));
|
| Node* lazy_builtin = assembler->HeapConstant(lazy_builtin_handle);
|
| Node* lazy_builtin_entry = assembler->IntPtrAdd(
|
| - lazy_builtin,
|
| + assembler->BitcastTaggedToWord(lazy_builtin),
|
| assembler->IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
|
| assembler->StoreObjectFieldNoWriteBarrier(
|
| - result, JSFunction::kCodeEntryOffset, lazy_builtin_entry);
|
| + result, JSFunction::kCodeEntryOffset, lazy_builtin_entry,
|
| + MachineType::PointerRepresentation());
|
| assembler->StoreObjectFieldNoWriteBarrier(result,
|
| JSFunction::kNextFunctionLinkOffset,
|
| assembler->UndefinedConstant());
|
| @@ -2595,9 +2595,11 @@ compiler::Node* FastNewClosureStub::Generate(CodeStubAssembler* assembler,
|
|
|
| void FastNewClosureStub::GenerateAssembly(
|
| compiler::CodeAssemblerState* state) const {
|
| + typedef compiler::Node Node;
|
| CodeStubAssembler assembler(state);
|
| - assembler.Return(
|
| - Generate(&assembler, assembler.Parameter(0), assembler.Parameter(1)));
|
| + Node* shared = assembler.Parameter(Descriptor::kSharedFunctionInfo);
|
| + Node* context = assembler.Parameter(Descriptor::kContext);
|
| + assembler.Return(Generate(&assembler, shared, context));
|
| }
|
|
|
| // static
|
| @@ -2606,21 +2608,23 @@ compiler::Node* FastNewFunctionContextStub::Generate(
|
| compiler::Node* slots, compiler::Node* context) {
|
| typedef compiler::Node Node;
|
|
|
| + slots = assembler->ChangeUint32ToWord(slots);
|
| +
|
| + // TODO(ishell): Use CSA::OptimalParameterMode() here.
|
| + CodeStubAssembler::ParameterMode mode = CodeStubAssembler::INTPTR_PARAMETERS;
|
| Node* min_context_slots =
|
| - assembler->Int32Constant(Context::MIN_CONTEXT_SLOTS);
|
| - Node* length = assembler->Int32Add(slots, min_context_slots);
|
| - Node* size = assembler->Int32Add(
|
| - assembler->Word32Shl(length, assembler->Int32Constant(kPointerSizeLog2)),
|
| - assembler->Int32Constant(FixedArray::kHeaderSize));
|
| + assembler->IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
|
| + Node* length = assembler->IntPtrAdd(slots, min_context_slots);
|
| + Node* size =
|
| + assembler->GetFixedArrayAllocationSize(length, FAST_ELEMENTS, mode);
|
|
|
| // Create a new closure from the given function info in new space
|
| Node* function_context = assembler->Allocate(size);
|
|
|
| assembler->StoreMapNoWriteBarrier(function_context,
|
| Heap::kFunctionContextMapRootIndex);
|
| - assembler->StoreObjectFieldNoWriteBarrier(function_context,
|
| - Context::kLengthOffset,
|
| - assembler->SmiFromWord32(length));
|
| + assembler->StoreObjectFieldNoWriteBarrier(
|
| + function_context, Context::kLengthOffset, assembler->SmiTag(length));
|
|
|
| // Set up the fixed slots.
|
| assembler->StoreFixedArrayElement(function_context, Context::CLOSURE_INDEX,
|
| @@ -2642,9 +2646,10 @@ compiler::Node* FastNewFunctionContextStub::Generate(
|
| assembler->BuildFastFixedArrayForEach(
|
| function_context, FAST_ELEMENTS, min_context_slots, length,
|
| [assembler, undefined](Node* context, Node* offset) {
|
| - assembler->StoreNoWriteBarrier(MachineType::PointerRepresentation(),
|
| - context, offset, undefined);
|
| - });
|
| + assembler->StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
|
| + offset, undefined);
|
| + },
|
| + mode);
|
|
|
| return function_context;
|
| }
|
| @@ -2654,7 +2659,7 @@ void FastNewFunctionContextStub::GenerateAssembly(
|
| typedef compiler::Node Node;
|
| CodeStubAssembler assembler(state);
|
| Node* function = assembler.Parameter(Descriptor::kFunction);
|
| - Node* slots = assembler.Parameter(FastNewFunctionContextDescriptor::kSlots);
|
| + Node* slots = assembler.Parameter(Descriptor::kSlots);
|
| Node* context = assembler.Parameter(Descriptor::kContext);
|
|
|
| assembler.Return(Generate(&assembler, function, slots, context));
|
| @@ -2731,14 +2736,10 @@ compiler::Node* NonEmptyShallowClone(CodeStubAssembler* assembler,
|
| typedef compiler::Node Node;
|
| typedef CodeStubAssembler::ParameterMode ParameterMode;
|
|
|
| - ParameterMode param_mode = CodeStubAssembler::SMI_PARAMETERS;
|
| + ParameterMode param_mode = assembler->OptimalParameterMode();
|
|
|
| Node* length = assembler->LoadJSArrayLength(boilerplate);
|
| -
|
| - if (assembler->Is64()) {
|
| - capacity = assembler->SmiUntag(capacity);
|
| - param_mode = CodeStubAssembler::INTEGER_PARAMETERS;
|
| - }
|
| + capacity = assembler->UntagParameter(capacity, param_mode);
|
|
|
| Node *array, *elements;
|
| std::tie(array, elements) =
|
| @@ -2756,9 +2757,7 @@ compiler::Node* NonEmptyShallowClone(CodeStubAssembler* assembler,
|
| assembler->LoadObjectField(boilerplate_elements, offset));
|
| }
|
|
|
| - if (assembler->Is64()) {
|
| - length = assembler->SmiUntag(length);
|
| - }
|
| + length = assembler->UntagParameter(length, param_mode);
|
|
|
| assembler->Comment("copy boilerplate elements");
|
| assembler->CopyFixedArrayElements(kind, boilerplate_elements, elements,
|
|
|