Chromium Code Reviews| Index: src/code-stub-assembler.cc |
| diff --git a/src/code-stub-assembler.cc b/src/code-stub-assembler.cc |
| index 688f4c1a29d8ae961e7bf50c223b2f40ba27b6fb..065cafc2b29c3abb6a8a47ffea942f7dfe7403b4 100644 |
| --- a/src/code-stub-assembler.cc |
| +++ b/src/code-stub-assembler.cc |
| @@ -1583,6 +1583,9 @@ Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value, |
| Node* CodeStubAssembler::AllocateSeqOneByteString(int length, |
| AllocationFlags flags) { |
| Comment("AllocateSeqOneByteString"); |
| + if (length == 0) { |
| + return LoadRoot(Heap::kempty_stringRootIndex); |
| + } |
| Node* result = Allocate(SeqOneByteString::SizeFor(length), flags); |
| DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex)); |
| StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex); |
| @@ -1602,8 +1605,10 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, |
| Variable var_result(this, MachineRepresentation::kTagged); |
| // Compute the SeqOneByteString size and check if it fits into new space. |
| - Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred), |
| - if_join(this); |
| + Label if_lengthiszero(this), if_sizeissmall(this), |
| + if_notsizeissmall(this, Label::kDeferred), if_join(this); |
| + GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); |
| + |
| Node* raw_size = GetArrayAllocationSize( |
| length, UINT8_ELEMENTS, mode, |
| SeqOneByteString::kHeaderSize + kObjectAlignmentMask); |
| @@ -1636,6 +1641,12 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, |
| Goto(&if_join); |
| } |
| + Bind(&if_lengthiszero); |
| + { |
| + var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex)); |
| + Goto(&if_join); |
| + } |
| + |
| Bind(&if_join); |
| return var_result.value(); |
| } |
| @@ -1643,6 +1654,9 @@ Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, |
| Node* CodeStubAssembler::AllocateSeqTwoByteString(int length, |
| AllocationFlags flags) { |
| Comment("AllocateSeqTwoByteString"); |
| + if (length == 0) { |
| + return LoadRoot(Heap::kempty_stringRootIndex); |
| + } |
| Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags); |
| DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex)); |
| StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex); |
| @@ -1662,8 +1676,10 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, |
| Variable var_result(this, MachineRepresentation::kTagged); |
| // Compute the SeqTwoByteString size and check if it fits into new space. |
| - Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred), |
| - if_join(this); |
| + Label if_lengthiszero(this), if_sizeissmall(this), |
| + if_notsizeissmall(this, Label::kDeferred), if_join(this); |
| + GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); |
| + |
| Node* raw_size = GetArrayAllocationSize( |
| length, UINT16_ELEMENTS, mode, |
| SeqOneByteString::kHeaderSize + kObjectAlignmentMask); |
| @@ -1698,6 +1714,12 @@ Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, |
| Goto(&if_join); |
| } |
| + Bind(&if_lengthiszero); |
| + { |
| + var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex)); |
| + Goto(&if_join); |
| + } |
| + |
| Bind(&if_join); |
| return var_result.value(); |
| } |
| @@ -3149,14 +3171,29 @@ Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index) { |
| Bind(&if_stringisnotexternal); |
| { |
| - // The {string} is a SlicedString, continue with its parent. |
| - Node* string_offset = |
| - LoadAndUntagObjectField(string, SlicedString::kOffsetOffset); |
| - Node* string_parent = |
| - LoadObjectField(string, SlicedString::kParentOffset); |
| - var_index.Bind(IntPtrAdd(index, string_offset)); |
| - var_string.Bind(string_parent); |
| - Goto(&loop); |
| + Label if_stringissliced(this), if_stringisthin(this); |
| + Branch( |
| + Word32Equal(Word32And(string_instance_type, |
| + Int32Constant(kStringRepresentationMask)), |
| + Int32Constant(kSlicedStringTag)), |
| + &if_stringissliced, &if_stringisthin); |
| + Bind(&if_stringissliced); |
| + { |
| + // The {string} is a SlicedString, continue with its parent. |
| + Node* string_offset = |
| + LoadAndUntagObjectField(string, SlicedString::kOffsetOffset); |
| + Node* string_parent = |
| + LoadObjectField(string, SlicedString::kParentOffset); |
| + var_index.Bind(IntPtrAdd(index, string_offset)); |
| + var_string.Bind(string_parent); |
| + Goto(&loop); |
| + } |
| + Bind(&if_stringisthin); |
| + { |
| + // The {string} is a ThinString, continue with its actual value. |
| + var_string.Bind(LoadObjectField(string, ThinString::kActualOffset)); |
| + Goto(&loop); |
| + } |
| } |
| } |
| } |
| @@ -3332,7 +3369,8 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from, |
| // and put the underlying string into var_string. |
| // If the string is not indirect, it can only be sequential or external. |
| - STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); |
| + STATIC_ASSERT(kIsIndirectStringMask == |
| + (kSlicedStringTag & kConsStringTag & kThinStringTag)); |
| STATIC_ASSERT(kIsIndirectStringMask != 0); |
| Label underlying_unpacked(this); |
| GotoIf(Word32Equal( |
| @@ -3342,11 +3380,11 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from, |
| // The subject string is either a sliced or cons string. |
|
Igor Sheludko
2016/12/20 23:46:58
or thin string.
Jakob Kummerow
2017/01/04 12:45:06
Done.
|
| - Label sliced_string(this); |
| + Label sliced_string(this), thin_or_sliced(this); |
| GotoIf(Word32NotEqual( |
| - Word32And(instance_type, Int32Constant(kSlicedNotConsMask)), |
| - Int32Constant(0)), |
| - &sliced_string); |
| + Word32And(instance_type, Int32Constant(kStringRepresentationMask)), |
| + Int32Constant(kConsStringTag)), |
| + &thin_or_sliced); |
| // Cons string. Check whether it is flat, then fetch first part. |
| // Flat cons strings have an empty second part. |
| @@ -3362,6 +3400,18 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from, |
| Goto(&underlying_unpacked); |
| } |
| + Bind(&thin_or_sliced); |
| + { |
| + GotoIf(Word32Equal(Word32And(instance_type, |
|
Igor Sheludko
2016/12/20 23:46:58
Maybe reuse the result of "Word32And(instance_type
Jakob Kummerow
2017/01/04 12:45:06
Done.
|
| + Int32Constant(kStringRepresentationMask)), |
| + Int32Constant(kSlicedStringTag)), |
| + &sliced_string); |
| + Node* actual_string = LoadObjectField(string, ThinString::kActualOffset); |
| + var_string.Bind(actual_string); |
| + var_instance_type.Bind(LoadInstanceType(actual_string)); |
| + Goto(&underlying_unpacked); |
| + } |
| + |
| Bind(&sliced_string); |
| { |
| // Fetch parent and correct start index by offset. |
| @@ -4318,17 +4368,19 @@ void CodeStubAssembler::Use(Label* label) { |
| void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex, |
| Variable* var_index, Label* if_keyisunique, |
| - Label* if_bailout) { |
| + Variable* var_unique, Label* if_bailout) { |
| DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep()); |
| + DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep()); |
| Comment("TryToName"); |
| - Label if_hascachedindex(this), if_keyisnotindex(this); |
| + Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this); |
| // Handle Smi and HeapNumber keys. |
| var_index->Bind(TryToIntptr(key, &if_keyisnotindex)); |
| Goto(if_keyisindex); |
| Bind(&if_keyisnotindex); |
| Node* key_map = LoadMap(key); |
| + var_unique->Bind(key); |
| // Symbols are unique. |
| GotoIf(IsSymbolMap(key_map), if_keyisunique); |
| Node* key_instance_type = LoadMapInstanceType(key_map); |
| @@ -4345,6 +4397,12 @@ void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex, |
| Node* not_an_index = |
| Word32And(hash, Int32Constant(Name::kIsNotArrayIndexMask)); |
| GotoIf(Word32Equal(not_an_index, Int32Constant(0)), if_bailout); |
| + // Check if we have a ThinString. |
| + GotoIf(Word32Equal(key_instance_type, Int32Constant(THIN_STRING_TYPE)), |
| + &if_thinstring); |
| + GotoIf( |
| + Word32Equal(key_instance_type, Int32Constant(THIN_ONE_BYTE_STRING_TYPE)), |
| + &if_thinstring); |
| // Finally, check if |key| is internalized. |
| STATIC_ASSERT(kNotInternalizedTag != 0); |
| Node* not_internalized = |
| @@ -4352,6 +4410,10 @@ void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex, |
| GotoIf(Word32NotEqual(not_internalized, Int32Constant(0)), if_bailout); |
| Goto(if_keyisunique); |
| + Bind(&if_thinstring); |
| + var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset)); |
| + Goto(if_keyisunique); |
| + |
| Bind(&if_hascachedindex); |
| var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash)); |
| Goto(if_keyisindex); |
| @@ -5201,9 +5263,11 @@ void CodeStubAssembler::TryPrototypeChainLookup( |
| } |
| Variable var_index(this, MachineType::PointerRepresentation()); |
| + Variable var_unique(this, MachineRepresentation::kTagged); |
| Label if_keyisindex(this), if_iskeyunique(this); |
| - TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, if_bailout); |
| + TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique, |
| + if_bailout); |
| Bind(&if_iskeyunique); |
| { |
| @@ -5225,8 +5289,8 @@ void CodeStubAssembler::TryPrototypeChainLookup( |
| Label next_proto(this); |
| lookup_property_in_holder(receiver, var_holder.value(), holder_map, |
| - holder_instance_type, key, &next_proto, |
| - if_bailout); |
| + holder_instance_type, var_unique.value(), |
| + &next_proto, if_bailout); |
| Bind(&next_proto); |
| // Bailout if it can be an integer indexed exotic case. |