Chromium Code Reviews| Index: src/code-stub-assembler.cc |
| diff --git a/src/code-stub-assembler.cc b/src/code-stub-assembler.cc |
| index dac2939e991c59a5f54fc979d6872d4ec0f2b3a3..7e2a97fb0d777e3208e5a8bcc9f02e33f61e492d 100644 |
| --- a/src/code-stub-assembler.cc |
| +++ b/src/code-stub-assembler.cc |
| @@ -527,6 +527,12 @@ Node* CodeStubAssembler::TaggedIsSmi(Node* a) { |
| IntPtrConstant(0)); |
| } |
| +Node* CodeStubAssembler::TaggedIsNotSmi(Node* a) { |
| + return WordNotEqual( |
| + WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)), |
| + IntPtrConstant(0)); |
| +} |
| + |
| Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) { |
| return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)), |
| IntPtrConstant(0)); |
| @@ -655,8 +661,9 @@ void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true, |
| if_true, if_false); |
| } |
| -void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context, |
| - Label* if_true, Label* if_false) { |
| +void CodeStubAssembler::BranchIfFastJSArray( |
| + Node* object, Node* context, CodeStubAssembler::FastJSArrayAccessMode mode, |
| + Label* if_true, Label* if_false) { |
| // Bailout if receiver is a Smi. |
| GotoIf(TaggedIsSmi(object), if_false); |
| @@ -672,8 +679,9 @@ void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context, |
| GotoUnless(IsFastElementsKind(elements_kind), if_false); |
| // Check prototype chain if receiver does not have packed elements. |
| - GotoUnless(IsHoleyFastElementsKind(elements_kind), if_true); |
| - |
| + if (mode == FastJSArrayAccessMode::INBOUNDS_READ) { |
| + GotoUnless(IsHoleyFastElementsKind(elements_kind), if_true); |
| + } |
| BranchIfPrototypesHaveNoElements(map, if_true, if_false); |
| } |
| @@ -1362,6 +1370,80 @@ Node* CodeStubAssembler::StoreFixedDoubleArrayElement( |
| return StoreNoWriteBarrier(rep, object, offset, value); |
| } |
| +Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* context, |
| + Node* array, |
| + CodeStubArguments& args, |
| + Variable& arg_index, |
| + Label* bailout) { |
| + Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); |
| + Label pre_bailout(this); |
| + Label success(this); |
| + Variable elements(this, MachineRepresentation::kTagged); |
| + ParameterMode mode = OptimalParameterMode(); |
| + Variable capacity(this, OptimalParameterRepresentation()); |
| + Variable length(this, OptimalParameterRepresentation()); |
| + length.Bind(UntagParameter(LoadJSArrayLength(array), mode)); |
| + elements.Bind(LoadElements(array)); |
| + capacity.Bind( |
|
Jakob Kummerow
2016/11/23 17:17:06
I think |capacity| could simply be a Node*, I see
danno
2016/11/29 14:39:59
Done.
|
| + UntagParameter(LoadFixedArrayBaseLength(elements.value()), mode)); |
| + |
| + // Resize the capacity of the fixed array if it doesn't fit. |
| + CodeStubAssembler::VariableList grow_vars({&capacity, &elements}, zone()); |
| + Label fits(this, grow_vars); |
| + Node* first = arg_index.value(); |
| + Node* growth = IntPtrSubFoldConstants(args.GetLength(), first); |
| + Node* new_length = IntPtrAdd( |
| + mode == INTPTR_PARAMETERS ? growth : SmiTag(growth), length.value()); |
| + GotoUnless(IntPtrGreaterThanOrEqual(new_length, capacity.value()), &fits); |
| + Node* new_capacity = CalculateNewElementsCapacity( |
| + IntPtrAdd(new_length, IntPtrOrSmiConstant(1, mode)), mode); |
| + elements.Bind(GrowElementsCapacity(array, elements.value(), kind, kind, |
| + capacity.value(), new_capacity, mode, |
| + &pre_bailout)); |
| + Goto(&fits); |
| + Bind(&fits); |
| + |
| + // Push each argument onto the end of the array now that there is enough |
| + // capacity. |
| + CodeStubAssembler::VariableList push_vars({&length, &elements}, zone()); |
| + args.ForEach( |
| + push_vars, |
| + [kind, array, context, mode, &length, &elements, &pre_bailout]( |
|
Jakob Kummerow
2016/11/23 17:17:06
nit: |array| and |context| appear to be unused, dr
danno
2016/11/29 14:39:59
Done.
|
| + CodeStubAssembler* assembler, Node* arg) { |
| + if (IsFastSmiElementsKind(kind)) { |
| + assembler->GotoUnless(assembler->TaggedIsSmi(arg), &pre_bailout); |
|
Jakob Kummerow
2016/11/23 17:17:05
nit: now that you have TaggedIsNotSmi, you could u
danno
2016/11/29 14:39:59
Done.
|
| + } else if (IsFastDoubleElementsKind(kind)) { |
| + assembler->GotoIfNotNumber(arg, &pre_bailout); |
| + } |
| + if (IsFastDoubleElementsKind(kind)) { |
| + assembler->StoreFixedDoubleArrayElement( |
| + elements.value(), length.value(), |
| + assembler->ChangeNumberToFloat64(arg), mode); |
| + } else { |
| + WriteBarrierMode barrier_mode = IsFastSmiElementsKind(kind) |
| + ? SKIP_WRITE_BARRIER |
| + : UPDATE_WRITE_BARRIER; |
| + assembler->StoreFixedArrayElement(elements.value(), length.value(), |
| + arg, barrier_mode, mode); |
| + } |
| + assembler->Increment(length, 1, mode); |
| + }, |
| + first, nullptr); |
| + length.Bind(TagParameter(length.value(), mode)); |
| + StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length.value()); |
| + Goto(&success); |
| + |
| + Bind(&pre_bailout); |
| + length.Bind(TagParameter(length.value(), mode)); |
| + Node* diff = SmiSub(length.value(), LoadJSArrayLength(array)); |
| + StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length.value()); |
| + arg_index.Bind(IntPtrAdd(arg_index.value(), SmiUntag(diff))); |
| + Goto(bailout); |
| + |
| + Bind(&success); |
| + return length.value(); |
| +} |
| + |
| Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) { |
| Node* result = Allocate(HeapNumber::kSize, kNone); |
| Heap::RootListIndex heap_map_index = |
| @@ -2130,9 +2212,8 @@ void CodeStubAssembler::CopyStringCharacters( |
| rep, to_string, |
| index_same ? offset : current_to_offset.value(), value); |
| if (!index_same) { |
| - current_to_offset.Bind(assembler->IntPtrAdd( |
| - current_to_offset.value(), |
| - assembler->IntPtrConstant(to_increment))); |
| + assembler->Increment(current_to_offset, to_increment, |
| + INTPTR_PARAMETERS); |
|
Jakob Kummerow
2016/11/23 17:17:05
nit: INTPTR_PARAMETERS is the default, no need to
danno
2016/11/29 14:39:59
Done.
|
| } |
| }, |
| from_increment, IndexAdvanceMode::kPost); |
| @@ -2554,6 +2635,25 @@ Node* CodeStubAssembler::ToThisString(Node* context, Node* value, |
| return var_value.value(); |
| } |
| +Node* CodeStubAssembler::ChangeNumberToFloat64(compiler::Node* value) { |
| + Variable result(this, MachineRepresentation::kFloat64); |
| + Label smi(this); |
| + Label done(this, &result); |
| + GotoIf(TaggedIsSmi(value), &smi); |
| + result.Bind( |
| + LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64())); |
|
Jakob Kummerow
2016/11/23 17:17:05
This doesn't canonicalize the hole NaN. Use Float6
danno
2016/11/29 14:39:59
Done.
|
| + Goto(&done); |
| + |
| + Bind(&smi); |
| + { |
| + result.Bind(ChangeInt32ToFloat64(SmiUntag(value))); |
| + Goto(&done); |
| + } |
| + |
| + Bind(&done); |
| + return result.value(); |
| +} |
| + |
| Node* CodeStubAssembler::ToThisValue(Node* context, Node* value, |
| PrimitiveType primitive_type, |
| char const* method_name) { |
| @@ -3970,6 +4070,16 @@ void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) { |
| } |
| } |
| +void CodeStubAssembler::Increment(Variable& variable, int value, |
| + ParameterMode mode) { |
| + DCHECK_IMPLIES(mode == INTPTR_PARAMETERS, |
| + variable.rep() == MachineType::PointerRepresentation()); |
| + DCHECK_IMPLIES(mode == SMI_PARAMETERS, |
| + variable.rep() == MachineRepresentation::kTagged || |
| + variable.rep() == MachineRepresentation::kTaggedSigned); |
| + variable.Bind(IntPtrAdd(variable.value(), IntPtrOrSmiConstant(value, mode))); |
| +} |
| + |
| void CodeStubAssembler::Use(Label* label) { |
| GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label); |
| } |
| @@ -4080,7 +4190,6 @@ void CodeStubAssembler::NameDictionaryLookup(Node* dictionary, |
| Goto(&loop); |
| Bind(&loop); |
| { |
| - Node* count = var_count.value(); |
| Node* entry = var_entry.value(); |
| Node* index = EntryToIndex<Dictionary>(entry); |
| @@ -4092,10 +4201,9 @@ void CodeStubAssembler::NameDictionaryLookup(Node* dictionary, |
| GotoIf(WordEqual(current, unique_name), if_found); |
| // See Dictionary::NextProbe(). |
| - count = IntPtrAdd(count, IntPtrConstant(1)); |
| - entry = WordAnd(IntPtrAdd(entry, count), mask); |
| + Increment(var_count); |
| + entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask); |
| - var_count.Bind(count); |
| var_entry.Bind(entry); |
| Goto(&loop); |
| } |
| @@ -4160,7 +4268,6 @@ void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary, |
| Goto(&loop); |
| Bind(&loop); |
| { |
| - Node* count = var_count.value(); |
| Node* entry = var_entry->value(); |
| Node* index = EntryToIndex<Dictionary>(entry); |
| @@ -4188,10 +4295,9 @@ void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary, |
| Bind(&next_probe); |
| // See Dictionary::NextProbe(). |
| - count = IntPtrAdd(count, IntPtrConstant(1)); |
| - entry = WordAnd(IntPtrAdd(entry, count), mask); |
| + Increment(var_count); |
| + entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask); |
| - var_count.Bind(count); |
| var_entry->Bind(entry); |
| Goto(&loop); |
| } |
| @@ -7247,11 +7353,11 @@ void CodeStubAssembler::BuildFastLoop( |
| Bind(&loop); |
| { |
| if (mode == IndexAdvanceMode::kPre) { |
| - var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment))); |
| + Increment(var, increment); |
| } |
| body(this, var.value()); |
| if (mode == IndexAdvanceMode::kPost) { |
| - var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment))); |
| + Increment(var, increment); |
| } |
| Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop); |
| } |
| @@ -8913,6 +9019,22 @@ compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) { |
| return var_result.value(); |
| } |
| +void CodeStubAssembler::GotoIfNotNumber(compiler::Node* input, |
| + Label* is_not_number) { |
| + Label is_number(this); |
| + GotoIf(TaggedIsSmi(input), &is_number); |
| + Node* input_map = LoadMap(input); |
| + Branch(Word32NotEqual(input_map, HeapNumberMapConstant()), is_not_number, |
|
Jakob Kummerow
2016/11/23 17:17:06
Maps are words, not word32s! Even better:
Branch(
danno
2016/11/29 14:39:59
Done.
|
| + &is_number); |
| + Bind(&is_number); |
| +} |
| + |
| +void CodeStubAssembler::GotoIfNumber(compiler::Node* input, Label* is_number) { |
| + GotoIf(TaggedIsSmi(input), is_number); |
| + Node* input_map = LoadMap(input); |
| + GotoIf(Word32Equal(input_map, HeapNumberMapConstant()), is_number); |
|
Jakob Kummerow
2016/11/23 17:17:06
Same here
danno
2016/11/29 14:39:59
Done.
|
| +} |
| + |
| compiler::Node* CodeStubAssembler::CreateArrayIterator( |
| compiler::Node* array, compiler::Node* array_map, |
| compiler::Node* array_type, compiler::Node* context, IterationKind mode) { |
| @@ -8969,7 +9091,8 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator( |
| Bind(&if_isgeneric); |
| { |
| Label if_isfast(this), if_isslow(this); |
| - BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); |
| + BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ, |
| + &if_isfast, &if_isslow); |
| Bind(&if_isfast); |
| { |
| @@ -9003,7 +9126,10 @@ compiler::Node* CodeStubAssembler::CreateArrayIterator( |
| Bind(&if_isgeneric); |
| { |
| Label if_isfast(this), if_isslow(this); |
| - BranchIfFastJSArray(array, context, &if_isfast, &if_isslow); |
| + BranchIfFastJSArray( |
| + array, context, |
| + CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ, &if_isfast, |
|
Jakob Kummerow
2016/11/23 17:17:06
nit: s/CodeStubAssembler:://
danno
2016/11/29 14:39:59
Done.
|
| + &if_isslow); |
| Bind(&if_isfast); |
| { |
| @@ -9147,13 +9273,13 @@ CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler, |
| } |
| } |
| -compiler::Node* CodeStubArguments::GetReceiver() { |
| +compiler::Node* CodeStubArguments::GetReceiver() const { |
| return assembler_->Load(MachineType::AnyTagged(), arguments_, |
| assembler_->IntPtrConstant(kPointerSize)); |
| } |
| compiler::Node* CodeStubArguments::AtIndex( |
| - compiler::Node* index, CodeStubAssembler::ParameterMode mode) { |
| + compiler::Node* index, CodeStubAssembler::ParameterMode mode) const { |
| typedef compiler::Node Node; |
| Node* negated_index = assembler_->IntPtrSubFoldConstants( |
| assembler_->IntPtrOrSmiConstant(0, mode), index); |
| @@ -9162,7 +9288,7 @@ compiler::Node* CodeStubArguments::AtIndex( |
| return assembler_->Load(MachineType::AnyTagged(), arguments_, offset); |
| } |
| -compiler::Node* CodeStubArguments::AtIndex(int index) { |
| +compiler::Node* CodeStubArguments::AtIndex(int index) const { |
| return AtIndex(assembler_->IntPtrConstant(index)); |
| } |