Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 #include "src/code-stub-assembler.h" | 4 #include "src/code-stub-assembler.h" |
| 5 #include "src/code-factory.h" | 5 #include "src/code-factory.h" |
| 6 #include "src/frames-inl.h" | 6 #include "src/frames-inl.h" |
| 7 #include "src/frames.h" | 7 #include "src/frames.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 187 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) { | 187 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) { |
| 188 Comment("IntPtrRoundUpToPowerOfTwo32"); | 188 Comment("IntPtrRoundUpToPowerOfTwo32"); |
| 189 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u))); | 189 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u))); |
| 190 value = IntPtrSub(value, IntPtrConstant(1)); | 190 value = IntPtrSub(value, IntPtrConstant(1)); |
| 191 for (int i = 1; i <= 16; i *= 2) { | 191 for (int i = 1; i <= 16; i *= 2) { |
| 192 value = WordOr(value, WordShr(value, IntPtrConstant(i))); | 192 value = WordOr(value, WordShr(value, IntPtrConstant(i))); |
| 193 } | 193 } |
| 194 return IntPtrAdd(value, IntPtrConstant(1)); | 194 return IntPtrAdd(value, IntPtrConstant(1)); |
| 195 } | 195 } |
| 196 | 196 |
| 197 Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) { | |
| 198 return (mode == SMI_PARAMETERS) ? TaggedIsSmi(value) : Int32Constant(1); | |
| 199 } | |
| 200 | |
| 197 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) { | 201 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) { |
| 198 // value && !(value & (value - 1)) | 202 // value && !(value & (value - 1)) |
| 199 return WordEqual( | 203 return WordEqual( |
| 200 Select( | 204 Select( |
| 201 WordEqual(value, IntPtrConstant(0)), | 205 WordEqual(value, IntPtrConstant(0)), |
| 202 [=] { return IntPtrConstant(1); }, | 206 [=] { return IntPtrConstant(1); }, |
| 203 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }, | 207 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }, |
| 204 MachineType::PointerRepresentation()), | 208 MachineType::PointerRepresentation()), |
| 205 IntPtrConstant(0)); | 209 IntPtrConstant(0)); |
| 206 } | 210 } |
| (...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 430 | 434 |
| 431 Node* CodeStubAssembler::SmiTag(Node* value) { | 435 Node* CodeStubAssembler::SmiTag(Node* value) { |
| 432 int32_t constant_value; | 436 int32_t constant_value; |
| 433 if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) { | 437 if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) { |
| 434 return SmiConstant(Smi::FromInt(constant_value)); | 438 return SmiConstant(Smi::FromInt(constant_value)); |
| 435 } | 439 } |
| 436 return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant())); | 440 return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant())); |
| 437 } | 441 } |
| 438 | 442 |
| 439 Node* CodeStubAssembler::SmiUntag(Node* value) { | 443 Node* CodeStubAssembler::SmiUntag(Node* value) { |
| 444 CSA_SLOW_ASSERT(this, TaggedIsSmi(value)); | |
| 440 return WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()); | 445 return WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()); |
| 441 } | 446 } |
| 442 | 447 |
| 443 Node* CodeStubAssembler::SmiToWord32(Node* value) { | 448 Node* CodeStubAssembler::SmiToWord32(Node* value) { |
| 449 CSA_SLOW_ASSERT(this, TaggedIsSmi(value)); | |
| 444 Node* result = SmiUntag(value); | 450 Node* result = SmiUntag(value); |
| 445 return TruncateWordToWord32(result); | 451 return TruncateWordToWord32(result); |
| 446 } | 452 } |
| 447 | 453 |
| 448 Node* CodeStubAssembler::SmiToFloat64(Node* value) { | 454 Node* CodeStubAssembler::SmiToFloat64(Node* value) { |
| 455 CSA_SLOW_ASSERT(this, TaggedIsSmi(value)); | |
| 449 return ChangeInt32ToFloat64(SmiToWord32(value)); | 456 return ChangeInt32ToFloat64(SmiToWord32(value)); |
| 450 } | 457 } |
| 451 | 458 |
| 452 Node* CodeStubAssembler::SmiMax(Node* a, Node* b) { | 459 Node* CodeStubAssembler::SmiMax(Node* a, Node* b) { |
| 453 return SelectTaggedConstant(SmiLessThan(a, b), b, a); | 460 return SelectTaggedConstant(SmiLessThan(a, b), b, a); |
| 454 } | 461 } |
| 455 | 462 |
| 456 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) { | 463 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) { |
| 457 return SelectTaggedConstant(SmiLessThan(a, b), a, b); | 464 return SelectTaggedConstant(SmiLessThan(a, b), a, b); |
| 458 } | 465 } |
| 459 | 466 |
| 460 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) { | 467 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) { |
| 468 CSA_SLOW_ASSERT(this, TaggedIsSmi(a)); | |
| 469 CSA_SLOW_ASSERT(this, TaggedIsSmi(b)); | |
| 461 VARIABLE(var_result, MachineRepresentation::kTagged); | 470 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 462 Label return_result(this, &var_result), | 471 Label return_result(this, &var_result), |
| 463 return_minuszero(this, Label::kDeferred), | 472 return_minuszero(this, Label::kDeferred), |
| 464 return_nan(this, Label::kDeferred); | 473 return_nan(this, Label::kDeferred); |
| 465 | 474 |
| 466 // Untag {a} and {b}. | 475 // Untag {a} and {b}. |
| 467 a = SmiToWord32(a); | 476 a = SmiToWord32(a); |
| 468 b = SmiToWord32(b); | 477 b = SmiToWord32(b); |
| 469 | 478 |
| 470 // Return NaN if {b} is zero. | 479 // Return NaN if {b} is zero. |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 510 | 519 |
| 511 BIND(&return_minuszero); | 520 BIND(&return_minuszero); |
| 512 var_result.Bind(MinusZeroConstant()); | 521 var_result.Bind(MinusZeroConstant()); |
| 513 Goto(&return_result); | 522 Goto(&return_result); |
| 514 | 523 |
| 515 BIND(&return_nan); | 524 BIND(&return_nan); |
| 516 var_result.Bind(NanConstant()); | 525 var_result.Bind(NanConstant()); |
| 517 Goto(&return_result); | 526 Goto(&return_result); |
| 518 | 527 |
| 519 BIND(&return_result); | 528 BIND(&return_result); |
| 529 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
| 520 return var_result.value(); | 530 return var_result.value(); |
| 521 } | 531 } |
| 522 | 532 |
| 523 Node* CodeStubAssembler::SmiMul(Node* a, Node* b) { | 533 Node* CodeStubAssembler::SmiMul(Node* a, Node* b) { |
| 524 VARIABLE(var_result, MachineRepresentation::kTagged); | 534 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 525 VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64); | 535 VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64); |
| 526 VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64); | 536 VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64); |
| 527 Label return_result(this, &var_result); | 537 Label return_result(this, &var_result); |
| 528 | 538 |
| 529 // Both {a} and {b} are Smis. Convert them to integers and multiply. | 539 // Both {a} and {b} are Smis. Convert them to integers and multiply. |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 571 { | 581 { |
| 572 var_lhs_float64.Bind(SmiToFloat64(a)); | 582 var_lhs_float64.Bind(SmiToFloat64(a)); |
| 573 var_rhs_float64.Bind(SmiToFloat64(b)); | 583 var_rhs_float64.Bind(SmiToFloat64(b)); |
| 574 Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value()); | 584 Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value()); |
| 575 Node* result = AllocateHeapNumberWithValue(value); | 585 Node* result = AllocateHeapNumberWithValue(value); |
| 576 var_result.Bind(result); | 586 var_result.Bind(result); |
| 577 Goto(&return_result); | 587 Goto(&return_result); |
| 578 } | 588 } |
| 579 | 589 |
| 580 BIND(&return_result); | 590 BIND(&return_result); |
| 591 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
| 581 return var_result.value(); | 592 return var_result.value(); |
| 582 } | 593 } |
| 583 | 594 |
| 584 Node* CodeStubAssembler::TrySmiDiv(Node* dividend, Node* divisor, | 595 Node* CodeStubAssembler::TrySmiDiv(Node* dividend, Node* divisor, |
| 585 Label* bailout) { | 596 Label* bailout) { |
| 597 CSA_SLOW_ASSERT(this, TaggedIsSmi(dividend)); | |
| 598 CSA_SLOW_ASSERT(this, TaggedIsSmi(divisor)); | |
| 599 | |
| 586 // Both {a} and {b} are Smis. Bailout to floating point division if {divisor} | 600 // Both {a} and {b} are Smis. Bailout to floating point division if {divisor} |
| 587 // is zero. | 601 // is zero. |
| 588 GotoIf(WordEqual(divisor, SmiConstant(0)), bailout); | 602 GotoIf(WordEqual(divisor, SmiConstant(0)), bailout); |
| 589 | 603 |
| 590 // Do floating point division if {dividend} is zero and {divisor} is | 604 // Do floating point division if {dividend} is zero and {divisor} is |
| 591 // negative. | 605 // negative. |
| 592 Label dividend_is_zero(this), dividend_is_not_zero(this); | 606 Label dividend_is_zero(this), dividend_is_not_zero(this); |
| 593 Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero, | 607 Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero, |
| 594 ÷nd_is_not_zero); | 608 ÷nd_is_not_zero); |
| 595 | 609 |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 653 } | 667 } |
| 654 | 668 |
| 655 Node* CodeStubAssembler::WordIsWordAligned(Node* word) { | 669 Node* CodeStubAssembler::WordIsWordAligned(Node* word) { |
| 656 return WordEqual(IntPtrConstant(0), | 670 return WordEqual(IntPtrConstant(0), |
| 657 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1))); | 671 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1))); |
| 658 } | 672 } |
| 659 | 673 |
| 660 void CodeStubAssembler::BranchIfPrototypesHaveNoElements( | 674 void CodeStubAssembler::BranchIfPrototypesHaveNoElements( |
| 661 Node* receiver_map, Label* definitely_no_elements, | 675 Node* receiver_map, Label* definitely_no_elements, |
| 662 Label* possibly_elements) { | 676 Label* possibly_elements) { |
| 677 CSA_SLOW_ASSERT(this, IsMap(receiver_map)); | |
| 663 VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map); | 678 VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map); |
| 664 Label loop_body(this, &var_map); | 679 Label loop_body(this, &var_map); |
| 665 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex); | 680 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex); |
| 666 Goto(&loop_body); | 681 Goto(&loop_body); |
| 667 | 682 |
| 668 BIND(&loop_body); | 683 BIND(&loop_body); |
| 669 { | 684 { |
| 670 Node* map = var_map.value(); | 685 Node* map = var_map.value(); |
| 671 Node* prototype = LoadMapPrototype(map); | 686 Node* prototype = LoadMapPrototype(map); |
| 672 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements); | 687 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements); |
| (...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 970 return Load(rep, frame_pointer, IntPtrConstant(offset)); | 985 return Load(rep, frame_pointer, IntPtrConstant(offset)); |
| 971 } | 986 } |
| 972 | 987 |
| 973 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, | 988 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, |
| 974 MachineType rep) { | 989 MachineType rep) { |
| 975 return Load(rep, buffer, IntPtrConstant(offset)); | 990 return Load(rep, buffer, IntPtrConstant(offset)); |
| 976 } | 991 } |
| 977 | 992 |
| 978 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, | 993 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, |
| 979 MachineType rep) { | 994 MachineType rep) { |
| 995 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 980 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); | 996 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); |
| 981 } | 997 } |
| 982 | 998 |
| 983 Node* CodeStubAssembler::LoadObjectField(Node* object, Node* offset, | 999 Node* CodeStubAssembler::LoadObjectField(Node* object, Node* offset, |
| 984 MachineType rep) { | 1000 MachineType rep) { |
| 1001 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 985 return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag))); | 1002 return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag))); |
| 986 } | 1003 } |
| 987 | 1004 |
| 988 Node* CodeStubAssembler::LoadAndUntagObjectField(Node* object, int offset) { | 1005 Node* CodeStubAssembler::LoadAndUntagObjectField(Node* object, int offset) { |
| 989 if (Is64()) { | 1006 if (Is64()) { |
| 990 #if V8_TARGET_LITTLE_ENDIAN | 1007 #if V8_TARGET_LITTLE_ENDIAN |
| 991 offset += kPointerSize / 2; | 1008 offset += kPointerSize / 2; |
| 992 #endif | 1009 #endif |
| 993 return ChangeInt32ToInt64( | 1010 return ChangeInt32ToInt64( |
| 994 LoadObjectField(object, offset, MachineType::Int32())); | 1011 LoadObjectField(object, offset, MachineType::Int32())); |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1051 return StoreNoWriteBarrier(MachineRepresentation::kWord32, base, | 1068 return StoreNoWriteBarrier(MachineRepresentation::kWord32, base, |
| 1052 IntPtrConstant(payload_offset), | 1069 IntPtrConstant(payload_offset), |
| 1053 TruncateInt64ToInt32(value)); | 1070 TruncateInt64ToInt32(value)); |
| 1054 } else { | 1071 } else { |
| 1055 return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base, | 1072 return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base, |
| 1056 IntPtrConstant(offset), SmiTag(value)); | 1073 IntPtrConstant(offset), SmiTag(value)); |
| 1057 } | 1074 } |
| 1058 } | 1075 } |
| 1059 | 1076 |
| 1060 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) { | 1077 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) { |
| 1078 CSA_SLOW_ASSERT(this, IsHeapNumber(object)); | |
| 1061 return LoadObjectField(object, HeapNumber::kValueOffset, | 1079 return LoadObjectField(object, HeapNumber::kValueOffset, |
| 1062 MachineType::Float64()); | 1080 MachineType::Float64()); |
| 1063 } | 1081 } |
| 1064 | 1082 |
| 1065 Node* CodeStubAssembler::LoadMap(Node* object) { | 1083 Node* CodeStubAssembler::LoadMap(Node* object) { |
| 1084 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 1066 return LoadObjectField(object, HeapObject::kMapOffset); | 1085 return LoadObjectField(object, HeapObject::kMapOffset); |
| 1067 } | 1086 } |
| 1068 | 1087 |
| 1069 Node* CodeStubAssembler::LoadInstanceType(Node* object) { | 1088 Node* CodeStubAssembler::LoadInstanceType(Node* object) { |
| 1070 return LoadMapInstanceType(LoadMap(object)); | 1089 return LoadMapInstanceType(LoadMap(object)); |
| 1071 } | 1090 } |
| 1072 | 1091 |
| 1073 Node* CodeStubAssembler::HasInstanceType(Node* object, | 1092 Node* CodeStubAssembler::HasInstanceType(Node* object, |
| 1074 InstanceType instance_type) { | 1093 InstanceType instance_type) { |
| 1075 return Word32Equal(LoadInstanceType(object), Int32Constant(instance_type)); | 1094 return Word32Equal(LoadInstanceType(object), Int32Constant(instance_type)); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1110 CSA_SLOW_ASSERT(this, IsMap(map)); | 1129 CSA_SLOW_ASSERT(this, IsMap(map)); |
| 1111 return LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()); | 1130 return LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()); |
| 1112 } | 1131 } |
| 1113 | 1132 |
| 1114 Node* CodeStubAssembler::LoadMapBitField3(Node* map) { | 1133 Node* CodeStubAssembler::LoadMapBitField3(Node* map) { |
| 1115 CSA_SLOW_ASSERT(this, IsMap(map)); | 1134 CSA_SLOW_ASSERT(this, IsMap(map)); |
| 1116 return LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()); | 1135 return LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()); |
| 1117 } | 1136 } |
| 1118 | 1137 |
| 1119 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) { | 1138 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) { |
| 1139 CSA_SLOW_ASSERT(this, IsMap(map)); | |
| 1120 return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8()); | 1140 return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8()); |
| 1121 } | 1141 } |
| 1122 | 1142 |
| 1123 Node* CodeStubAssembler::LoadMapElementsKind(Node* map) { | 1143 Node* CodeStubAssembler::LoadMapElementsKind(Node* map) { |
| 1124 CSA_SLOW_ASSERT(this, IsMap(map)); | 1144 CSA_SLOW_ASSERT(this, IsMap(map)); |
| 1125 Node* bit_field2 = LoadMapBitField2(map); | 1145 Node* bit_field2 = LoadMapBitField2(map); |
| 1126 return DecodeWord32<Map::ElementsKindBits>(bit_field2); | 1146 return DecodeWord32<Map::ElementsKindBits>(bit_field2); |
| 1127 } | 1147 } |
| 1128 | 1148 |
| 1129 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) { | 1149 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) { |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1193 result.Bind( | 1213 result.Bind( |
| 1194 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset)); | 1214 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset)); |
| 1195 Goto(&loop); | 1215 Goto(&loop); |
| 1196 } | 1216 } |
| 1197 BIND(&done); | 1217 BIND(&done); |
| 1198 return result.value(); | 1218 return result.value(); |
| 1199 } | 1219 } |
| 1200 | 1220 |
| 1201 Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField( | 1221 Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField( |
| 1202 Node* shared, int offset, ParameterMode mode) { | 1222 Node* shared, int offset, ParameterMode mode) { |
| 1223 CSA_SLOW_ASSERT(this, HasInstanceType(shared, SHARED_FUNCTION_INFO_TYPE)); | |
| 1203 if (Is64()) { | 1224 if (Is64()) { |
| 1204 Node* result = LoadObjectField(shared, offset, MachineType::Int32()); | 1225 Node* result = LoadObjectField(shared, offset, MachineType::Int32()); |
| 1205 if (mode == SMI_PARAMETERS) { | 1226 if (mode == SMI_PARAMETERS) { |
| 1206 result = SmiTag(result); | 1227 result = SmiTag(result); |
| 1207 } else { | 1228 } else { |
| 1208 result = ChangeUint32ToWord(result); | 1229 result = ChangeUint32ToWord(result); |
| 1209 } | 1230 } |
| 1210 return result; | 1231 return result; |
| 1211 } else { | 1232 } else { |
| 1212 Node* result = LoadObjectField(shared, offset); | 1233 Node* result = LoadObjectField(shared, offset); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1253 Node* value = LoadWeakCellValueUnchecked(weak_cell); | 1274 Node* value = LoadWeakCellValueUnchecked(weak_cell); |
| 1254 if (if_cleared != nullptr) { | 1275 if (if_cleared != nullptr) { |
| 1255 GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared); | 1276 GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared); |
| 1256 } | 1277 } |
| 1257 return value; | 1278 return value; |
| 1258 } | 1279 } |
| 1259 | 1280 |
| 1260 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, | 1281 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, |
| 1261 int additional_offset, | 1282 int additional_offset, |
| 1262 ParameterMode parameter_mode) { | 1283 ParameterMode parameter_mode) { |
| 1284 CSA_SLOW_ASSERT(this, IsFixedArray(object)); | |
| 1263 int32_t header_size = | 1285 int32_t header_size = |
| 1264 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1286 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
| 1265 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1287 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
| 1266 parameter_mode, header_size); | 1288 parameter_mode, header_size); |
| 1267 return Load(MachineType::AnyTagged(), object, offset); | 1289 return Load(MachineType::AnyTagged(), object, offset); |
| 1268 } | 1290 } |
| 1269 | 1291 |
| 1270 Node* CodeStubAssembler::LoadFixedTypedArrayElement( | 1292 Node* CodeStubAssembler::LoadFixedTypedArrayElement( |
| 1271 Node* data_pointer, Node* index_node, ElementsKind elements_kind, | 1293 Node* data_pointer, Node* index_node, ElementsKind elements_kind, |
| 1272 ParameterMode parameter_mode) { | 1294 ParameterMode parameter_mode) { |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1327 return AllocateHeapNumberWithValue(value); | 1349 return AllocateHeapNumberWithValue(value); |
| 1328 default: | 1350 default: |
| 1329 UNREACHABLE(); | 1351 UNREACHABLE(); |
| 1330 return nullptr; | 1352 return nullptr; |
| 1331 } | 1353 } |
| 1332 } | 1354 } |
| 1333 | 1355 |
| 1334 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( | 1356 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( |
| 1335 Node* object, Node* index_node, int additional_offset, | 1357 Node* object, Node* index_node, int additional_offset, |
| 1336 ParameterMode parameter_mode) { | 1358 ParameterMode parameter_mode) { |
| 1359 CSA_SLOW_ASSERT(this, IsFixedArray(object)); | |
| 1360 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode)); | |
| 1337 int32_t header_size = | 1361 int32_t header_size = |
| 1338 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1362 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
| 1339 #if V8_TARGET_LITTLE_ENDIAN | 1363 #if V8_TARGET_LITTLE_ENDIAN |
| 1340 if (Is64()) { | 1364 if (Is64()) { |
| 1341 header_size += kPointerSize / 2; | 1365 header_size += kPointerSize / 2; |
| 1342 } | 1366 } |
| 1343 #endif | 1367 #endif |
| 1344 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1368 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
| 1345 parameter_mode, header_size); | 1369 parameter_mode, header_size); |
| 1346 if (Is64()) { | 1370 if (Is64()) { |
| 1347 return Load(MachineType::Int32(), object, offset); | 1371 return Load(MachineType::Int32(), object, offset); |
| 1348 } else { | 1372 } else { |
| 1349 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset)); | 1373 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset)); |
| 1350 } | 1374 } |
| 1351 } | 1375 } |
| 1352 | 1376 |
| 1353 Node* CodeStubAssembler::LoadFixedDoubleArrayElement( | 1377 Node* CodeStubAssembler::LoadFixedDoubleArrayElement( |
| 1354 Node* object, Node* index_node, MachineType machine_type, | 1378 Node* object, Node* index_node, MachineType machine_type, |
| 1355 int additional_offset, ParameterMode parameter_mode, Label* if_hole) { | 1379 int additional_offset, ParameterMode parameter_mode, Label* if_hole) { |
| 1380 CSA_SLOW_ASSERT(this, IsFixedDoubleArray(object)); | |
| 1381 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode)); | |
| 1356 CSA_ASSERT(this, IsFixedDoubleArray(object)); | 1382 CSA_ASSERT(this, IsFixedDoubleArray(object)); |
| 1357 int32_t header_size = | 1383 int32_t header_size = |
| 1358 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1384 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag; |
| 1359 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS, | 1385 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS, |
| 1360 parameter_mode, header_size); | 1386 parameter_mode, header_size); |
| 1361 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type); | 1387 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type); |
| 1362 } | 1388 } |
| 1363 | 1389 |
| 1364 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset, | 1390 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset, |
| 1365 Label* if_hole, | 1391 Label* if_hole, |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 1380 } | 1406 } |
| 1381 } | 1407 } |
| 1382 if (machine_type.IsNone()) { | 1408 if (machine_type.IsNone()) { |
| 1383 // This means the actual value is not needed. | 1409 // This means the actual value is not needed. |
| 1384 return nullptr; | 1410 return nullptr; |
| 1385 } | 1411 } |
| 1386 return Load(machine_type, base, offset); | 1412 return Load(machine_type, base, offset); |
| 1387 } | 1413 } |
| 1388 | 1414 |
| 1389 Node* CodeStubAssembler::LoadContextElement(Node* context, int slot_index) { | 1415 Node* CodeStubAssembler::LoadContextElement(Node* context, int slot_index) { |
| 1416 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
| 1390 int offset = Context::SlotOffset(slot_index); | 1417 int offset = Context::SlotOffset(slot_index); |
| 1391 return Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)); | 1418 return Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)); |
| 1392 } | 1419 } |
| 1393 | 1420 |
| 1394 Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) { | 1421 Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) { |
| 1422 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
| 1395 Node* offset = | 1423 Node* offset = |
| 1396 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), | 1424 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| 1397 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); | 1425 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); |
| 1398 return Load(MachineType::AnyTagged(), context, offset); | 1426 return Load(MachineType::AnyTagged(), context, offset); |
| 1399 } | 1427 } |
| 1400 | 1428 |
| 1401 Node* CodeStubAssembler::StoreContextElement(Node* context, int slot_index, | 1429 Node* CodeStubAssembler::StoreContextElement(Node* context, int slot_index, |
| 1402 Node* value) { | 1430 Node* value) { |
| 1431 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
| 1403 int offset = Context::SlotOffset(slot_index); | 1432 int offset = Context::SlotOffset(slot_index); |
| 1404 return Store(context, IntPtrConstant(offset), value); | 1433 return Store(context, IntPtrConstant(offset), value); |
| 1405 } | 1434 } |
| 1406 | 1435 |
| 1407 Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index, | 1436 Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index, |
| 1408 Node* value) { | 1437 Node* value) { |
| 1438 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
| 1409 Node* offset = | 1439 Node* offset = |
| 1410 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), | 1440 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| 1411 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); | 1441 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); |
| 1412 return Store(context, offset, value); | 1442 return Store(context, offset, value); |
| 1413 } | 1443 } |
| 1414 | 1444 |
| 1415 Node* CodeStubAssembler::StoreContextElementNoWriteBarrier(Node* context, | 1445 Node* CodeStubAssembler::StoreContextElementNoWriteBarrier(Node* context, |
| 1416 int slot_index, | 1446 int slot_index, |
| 1417 Node* value) { | 1447 Node* value) { |
| 1448 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
| 1418 int offset = Context::SlotOffset(slot_index); | 1449 int offset = Context::SlotOffset(slot_index); |
| 1419 return StoreNoWriteBarrier(MachineRepresentation::kTagged, context, | 1450 return StoreNoWriteBarrier(MachineRepresentation::kTagged, context, |
| 1420 IntPtrConstant(offset), value); | 1451 IntPtrConstant(offset), value); |
| 1421 } | 1452 } |
| 1422 | 1453 |
| 1423 Node* CodeStubAssembler::LoadNativeContext(Node* context) { | 1454 Node* CodeStubAssembler::LoadNativeContext(Node* context) { |
| 1455 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
| 1424 return LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX); | 1456 return LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX); |
| 1425 } | 1457 } |
| 1426 | 1458 |
| 1427 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind, | 1459 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind, |
| 1428 Node* native_context) { | 1460 Node* native_context) { |
| 1429 CSA_ASSERT(this, IsNativeContext(native_context)); | 1461 CSA_ASSERT(this, IsNativeContext(native_context)); |
| 1430 return LoadContextElement(native_context, Context::ArrayMapIndex(kind)); | 1462 return LoadContextElement(native_context, Context::ArrayMapIndex(kind)); |
| 1431 } | 1463 } |
| 1432 | 1464 |
| 1433 Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function, | 1465 Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function, |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 1445 GotoIfNot(IsMap(proto_or_map), &done); | 1477 GotoIfNot(IsMap(proto_or_map), &done); |
| 1446 | 1478 |
| 1447 var_result.Bind(LoadMapPrototype(proto_or_map)); | 1479 var_result.Bind(LoadMapPrototype(proto_or_map)); |
| 1448 Goto(&done); | 1480 Goto(&done); |
| 1449 | 1481 |
| 1450 BIND(&done); | 1482 BIND(&done); |
| 1451 return var_result.value(); | 1483 return var_result.value(); |
| 1452 } | 1484 } |
| 1453 | 1485 |
| 1454 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { | 1486 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { |
| 1487 CSA_SLOW_ASSERT(this, IsHeapNumber(object)); | |
| 1455 return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value, | 1488 return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value, |
| 1456 MachineRepresentation::kFloat64); | 1489 MachineRepresentation::kFloat64); |
| 1457 } | 1490 } |
| 1458 | 1491 |
| 1459 Node* CodeStubAssembler::StoreObjectField( | 1492 Node* CodeStubAssembler::StoreObjectField( |
| 1460 Node* object, int offset, Node* value) { | 1493 Node* object, int offset, Node* value) { |
| 1494 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 1461 DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead. | 1495 DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead. |
| 1462 return Store(object, IntPtrConstant(offset - kHeapObjectTag), value); | 1496 return Store(object, IntPtrConstant(offset - kHeapObjectTag), value); |
| 1463 } | 1497 } |
| 1464 | 1498 |
| 1465 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset, | 1499 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset, |
| 1466 Node* value) { | 1500 Node* value) { |
| 1501 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 1467 int const_offset; | 1502 int const_offset; |
| 1468 if (ToInt32Constant(offset, const_offset)) { | 1503 if (ToInt32Constant(offset, const_offset)) { |
| 1469 return StoreObjectField(object, const_offset, value); | 1504 return StoreObjectField(object, const_offset, value); |
| 1470 } | 1505 } |
| 1471 return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), | 1506 return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), |
| 1472 value); | 1507 value); |
| 1473 } | 1508 } |
| 1474 | 1509 |
| 1475 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( | 1510 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( |
| 1476 Node* object, int offset, Node* value, MachineRepresentation rep) { | 1511 Node* object, int offset, Node* value, MachineRepresentation rep) { |
| 1512 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 1477 return StoreNoWriteBarrier(rep, object, | 1513 return StoreNoWriteBarrier(rep, object, |
| 1478 IntPtrConstant(offset - kHeapObjectTag), value); | 1514 IntPtrConstant(offset - kHeapObjectTag), value); |
| 1479 } | 1515 } |
| 1480 | 1516 |
| 1481 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( | 1517 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( |
| 1482 Node* object, Node* offset, Node* value, MachineRepresentation rep) { | 1518 Node* object, Node* offset, Node* value, MachineRepresentation rep) { |
| 1519 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 1483 int const_offset; | 1520 int const_offset; |
| 1484 if (ToInt32Constant(offset, const_offset)) { | 1521 if (ToInt32Constant(offset, const_offset)) { |
| 1485 return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep); | 1522 return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep); |
| 1486 } | 1523 } |
| 1487 return StoreNoWriteBarrier( | 1524 return StoreNoWriteBarrier( |
| 1488 rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value); | 1525 rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value); |
| 1489 } | 1526 } |
| 1490 | 1527 |
| 1491 Node* CodeStubAssembler::StoreMap(Node* object, Node* map) { | 1528 Node* CodeStubAssembler::StoreMap(Node* object, Node* map) { |
| 1529 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 1492 CSA_SLOW_ASSERT(this, IsMap(map)); | 1530 CSA_SLOW_ASSERT(this, IsMap(map)); |
| 1493 return StoreWithMapWriteBarrier( | 1531 return StoreWithMapWriteBarrier( |
| 1494 object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map); | 1532 object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map); |
| 1495 } | 1533 } |
| 1496 | 1534 |
| 1497 Node* CodeStubAssembler::StoreMapNoWriteBarrier( | 1535 Node* CodeStubAssembler::StoreMapNoWriteBarrier( |
| 1498 Node* object, Heap::RootListIndex map_root_index) { | 1536 Node* object, Heap::RootListIndex map_root_index) { |
| 1499 return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index)); | 1537 return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index)); |
| 1500 } | 1538 } |
| 1501 | 1539 |
| 1502 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) { | 1540 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) { |
| 1541 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 1503 CSA_SLOW_ASSERT(this, IsMap(map)); | 1542 CSA_SLOW_ASSERT(this, IsMap(map)); |
| 1504 return StoreNoWriteBarrier( | 1543 return StoreNoWriteBarrier( |
| 1505 MachineRepresentation::kTagged, object, | 1544 MachineRepresentation::kTagged, object, |
| 1506 IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map); | 1545 IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map); |
| 1507 } | 1546 } |
| 1508 | 1547 |
| 1509 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset, | 1548 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset, |
| 1510 Heap::RootListIndex root_index) { | 1549 Heap::RootListIndex root_index) { |
| 1511 if (Heap::RootIsImmortalImmovable(root_index)) { | 1550 if (Heap::RootIsImmortalImmovable(root_index)) { |
| 1512 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index)); | 1551 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index)); |
| 1513 } else { | 1552 } else { |
| 1514 return StoreObjectField(object, offset, LoadRoot(root_index)); | 1553 return StoreObjectField(object, offset, LoadRoot(root_index)); |
| 1515 } | 1554 } |
| 1516 } | 1555 } |
| 1517 | 1556 |
| 1518 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node, | 1557 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node, |
| 1519 Node* value, | 1558 Node* value, |
| 1520 WriteBarrierMode barrier_mode, | 1559 WriteBarrierMode barrier_mode, |
| 1521 int additional_offset, | 1560 int additional_offset, |
| 1522 ParameterMode parameter_mode) { | 1561 ParameterMode parameter_mode) { |
| 1562 CSA_SLOW_ASSERT(this, IsFixedArray(object)); | |
| 1563 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode)); | |
| 1523 DCHECK(barrier_mode == SKIP_WRITE_BARRIER || | 1564 DCHECK(barrier_mode == SKIP_WRITE_BARRIER || |
| 1524 barrier_mode == UPDATE_WRITE_BARRIER); | 1565 barrier_mode == UPDATE_WRITE_BARRIER); |
| 1525 int header_size = | 1566 int header_size = |
| 1526 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1567 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
| 1527 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1568 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
| 1528 parameter_mode, header_size); | 1569 parameter_mode, header_size); |
| 1529 if (barrier_mode == SKIP_WRITE_BARRIER) { | 1570 if (barrier_mode == SKIP_WRITE_BARRIER) { |
| 1530 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, | 1571 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, |
| 1531 value); | 1572 value); |
| 1532 } else { | 1573 } else { |
| 1533 return Store(object, offset, value); | 1574 return Store(object, offset, value); |
| 1534 } | 1575 } |
| 1535 } | 1576 } |
| 1536 | 1577 |
| 1537 Node* CodeStubAssembler::StoreFixedDoubleArrayElement( | 1578 Node* CodeStubAssembler::StoreFixedDoubleArrayElement( |
| 1538 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) { | 1579 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) { |
| 1539 CSA_ASSERT(this, IsFixedDoubleArray(object)); | 1580 CSA_ASSERT(this, IsFixedDoubleArray(object)); |
| 1581 CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode)); | |
| 1540 Node* offset = | 1582 Node* offset = |
| 1541 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode, | 1583 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode, |
| 1542 FixedArray::kHeaderSize - kHeapObjectTag); | 1584 FixedArray::kHeaderSize - kHeapObjectTag); |
| 1543 MachineRepresentation rep = MachineRepresentation::kFloat64; | 1585 MachineRepresentation rep = MachineRepresentation::kFloat64; |
| 1544 return StoreNoWriteBarrier(rep, object, offset, value); | 1586 return StoreNoWriteBarrier(rep, object, offset, value); |
| 1545 } | 1587 } |
| 1546 | 1588 |
| 1547 Node* CodeStubAssembler::EnsureArrayPushable(Node* receiver, Label* bailout) { | 1589 Node* CodeStubAssembler::EnsureArrayPushable(Node* receiver, Label* bailout) { |
| 1548 // Disallow pushing onto prototypes. It might be the JSArray prototype. | 1590 // Disallow pushing onto prototypes. It might be the JSArray prototype. |
| 1549 // Disallow pushing onto non-extensible objects. | 1591 // Disallow pushing onto non-extensible objects. |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1587 kind, capacity, new_capacity, mode, | 1629 kind, capacity, new_capacity, mode, |
| 1588 bailout)); | 1630 bailout)); |
| 1589 Goto(&fits); | 1631 Goto(&fits); |
| 1590 BIND(&fits); | 1632 BIND(&fits); |
| 1591 } | 1633 } |
| 1592 | 1634 |
| 1593 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, | 1635 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, |
| 1594 CodeStubArguments& args, | 1636 CodeStubArguments& args, |
| 1595 Variable& arg_index, | 1637 Variable& arg_index, |
| 1596 Label* bailout) { | 1638 Label* bailout) { |
| 1639 CSA_SLOW_ASSERT(this, IsJSArray(array)); | |
| 1597 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); | 1640 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); |
| 1598 Label pre_bailout(this); | 1641 Label pre_bailout(this); |
| 1599 Label success(this); | 1642 Label success(this); |
| 1600 VARIABLE(var_tagged_length, MachineRepresentation::kTagged); | 1643 VARIABLE(var_tagged_length, MachineRepresentation::kTagged); |
| 1601 ParameterMode mode = OptimalParameterMode(); | 1644 ParameterMode mode = OptimalParameterMode(); |
| 1602 VARIABLE(var_length, OptimalParameterRepresentation(), | 1645 VARIABLE(var_length, OptimalParameterRepresentation(), |
| 1603 TaggedToParameter(LoadJSArrayLength(array), mode)); | 1646 TaggedToParameter(LoadJSArrayLength(array), mode)); |
| 1604 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); | 1647 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); |
| 1605 | 1648 |
| 1606 // Resize the capacity of the fixed array if it doesn't fit. | 1649 // Resize the capacity of the fixed array if it doesn't fit. |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1657 Float64SilenceNaN(double_value), mode); | 1700 Float64SilenceNaN(double_value), mode); |
| 1658 } else { | 1701 } else { |
| 1659 WriteBarrierMode barrier_mode = | 1702 WriteBarrierMode barrier_mode = |
| 1660 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; | 1703 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; |
| 1661 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode); | 1704 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode); |
| 1662 } | 1705 } |
| 1663 } | 1706 } |
| 1664 | 1707 |
| 1665 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, | 1708 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, |
| 1666 Node* value, Label* bailout) { | 1709 Node* value, Label* bailout) { |
| 1710 CSA_SLOW_ASSERT(this, IsJSArray(array)); | |
| 1667 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); | 1711 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); |
| 1668 ParameterMode mode = OptimalParameterMode(); | 1712 ParameterMode mode = OptimalParameterMode(); |
| 1669 VARIABLE(var_length, OptimalParameterRepresentation(), | 1713 VARIABLE(var_length, OptimalParameterRepresentation(), |
| 1670 TaggedToParameter(LoadJSArrayLength(array), mode)); | 1714 TaggedToParameter(LoadJSArrayLength(array), mode)); |
| 1671 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); | 1715 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); |
| 1672 | 1716 |
| 1673 // Resize the capacity of the fixed array if it doesn't fit. | 1717 // Resize the capacity of the fixed array if it doesn't fit. |
| 1674 Node* growth = IntPtrOrSmiConstant(1, mode); | 1718 Node* growth = IntPtrOrSmiConstant(1, mode); |
| 1675 PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(), | 1719 PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(), |
| 1676 &var_elements, growth, bailout); | 1720 &var_elements, growth, bailout); |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1716 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot, | 1760 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot, |
| 1717 IntPtrConstant(String::kEmptyHashField), | 1761 IntPtrConstant(String::kEmptyHashField), |
| 1718 MachineType::PointerRepresentation()); | 1762 MachineType::PointerRepresentation()); |
| 1719 return result; | 1763 return result; |
| 1720 } | 1764 } |
| 1721 | 1765 |
| 1722 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, | 1766 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, |
| 1723 ParameterMode mode, | 1767 ParameterMode mode, |
| 1724 AllocationFlags flags) { | 1768 AllocationFlags flags) { |
| 1725 Comment("AllocateSeqOneByteString"); | 1769 Comment("AllocateSeqOneByteString"); |
| 1770 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
| 1771 CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode)); | |
| 1726 VARIABLE(var_result, MachineRepresentation::kTagged); | 1772 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 1727 | 1773 |
| 1728 // Compute the SeqOneByteString size and check if it fits into new space. | 1774 // Compute the SeqOneByteString size and check if it fits into new space. |
| 1729 Label if_lengthiszero(this), if_sizeissmall(this), | 1775 Label if_lengthiszero(this), if_sizeissmall(this), |
| 1730 if_notsizeissmall(this, Label::kDeferred), if_join(this); | 1776 if_notsizeissmall(this, Label::kDeferred), if_join(this); |
| 1731 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); | 1777 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); |
| 1732 | 1778 |
| 1733 Node* raw_size = GetArrayAllocationSize( | 1779 Node* raw_size = GetArrayAllocationSize( |
| 1734 length, UINT8_ELEMENTS, mode, | 1780 length, UINT8_ELEMENTS, mode, |
| 1735 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); | 1781 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1786 // Initialize both used and unused parts of hash field slot at once. | 1832 // Initialize both used and unused parts of hash field slot at once. |
| 1787 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot, | 1833 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot, |
| 1788 IntPtrConstant(String::kEmptyHashField), | 1834 IntPtrConstant(String::kEmptyHashField), |
| 1789 MachineType::PointerRepresentation()); | 1835 MachineType::PointerRepresentation()); |
| 1790 return result; | 1836 return result; |
| 1791 } | 1837 } |
| 1792 | 1838 |
| 1793 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, | 1839 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, |
| 1794 ParameterMode mode, | 1840 ParameterMode mode, |
| 1795 AllocationFlags flags) { | 1841 AllocationFlags flags) { |
| 1842 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
| 1843 CSA_SLOW_ASSERT(this, MatchesParameterMode(length, mode)); | |
| 1796 Comment("AllocateSeqTwoByteString"); | 1844 Comment("AllocateSeqTwoByteString"); |
| 1797 VARIABLE(var_result, MachineRepresentation::kTagged); | 1845 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 1798 | 1846 |
| 1799 // Compute the SeqTwoByteString size and check if it fits into new space. | 1847 // Compute the SeqTwoByteString size and check if it fits into new space. |
| 1800 Label if_lengthiszero(this), if_sizeissmall(this), | 1848 Label if_lengthiszero(this), if_sizeissmall(this), |
| 1801 if_notsizeissmall(this, Label::kDeferred), if_join(this); | 1849 if_notsizeissmall(this, Label::kDeferred), if_join(this); |
| 1802 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); | 1850 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); |
| 1803 | 1851 |
| 1804 Node* raw_size = GetArrayAllocationSize( | 1852 Node* raw_size = GetArrayAllocationSize( |
| 1805 length, UINT16_ELEMENTS, mode, | 1853 length, UINT16_ELEMENTS, mode, |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1841 Goto(&if_join); | 1889 Goto(&if_join); |
| 1842 } | 1890 } |
| 1843 | 1891 |
| 1844 BIND(&if_join); | 1892 BIND(&if_join); |
| 1845 return var_result.value(); | 1893 return var_result.value(); |
| 1846 } | 1894 } |
| 1847 | 1895 |
| 1848 Node* CodeStubAssembler::AllocateSlicedString( | 1896 Node* CodeStubAssembler::AllocateSlicedString( |
| 1849 Heap::RootListIndex map_root_index, Node* length, Node* parent, | 1897 Heap::RootListIndex map_root_index, Node* length, Node* parent, |
| 1850 Node* offset) { | 1898 Node* offset) { |
| 1899 CSA_ASSERT(this, IsString(parent)); | |
| 1851 CSA_ASSERT(this, TaggedIsSmi(length)); | 1900 CSA_ASSERT(this, TaggedIsSmi(length)); |
| 1901 CSA_ASSERT(this, TaggedIsSmi(offset)); | |
| 1852 Node* result = Allocate(SlicedString::kSize); | 1902 Node* result = Allocate(SlicedString::kSize); |
| 1853 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); | 1903 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); |
| 1854 StoreMapNoWriteBarrier(result, map_root_index); | 1904 StoreMapNoWriteBarrier(result, map_root_index); |
| 1855 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length, | 1905 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length, |
| 1856 MachineRepresentation::kTagged); | 1906 MachineRepresentation::kTagged); |
| 1857 // Initialize both used and unused parts of hash field slot at once. | 1907 // Initialize both used and unused parts of hash field slot at once. |
| 1858 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot, | 1908 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot, |
| 1859 IntPtrConstant(String::kEmptyHashField), | 1909 IntPtrConstant(String::kEmptyHashField), |
| 1860 MachineType::PointerRepresentation()); | 1910 MachineType::PointerRepresentation()); |
| 1861 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent, | 1911 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent, |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 1874 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent, | 1924 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent, |
| 1875 Node* offset) { | 1925 Node* offset) { |
| 1876 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent, | 1926 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent, |
| 1877 offset); | 1927 offset); |
| 1878 } | 1928 } |
| 1879 | 1929 |
| 1880 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index, | 1930 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index, |
| 1881 Node* length, Node* first, | 1931 Node* length, Node* first, |
| 1882 Node* second, | 1932 Node* second, |
| 1883 AllocationFlags flags) { | 1933 AllocationFlags flags) { |
| 1934 CSA_ASSERT(this, IsString(first)); | |
| 1935 CSA_ASSERT(this, IsString(second)); | |
| 1884 CSA_ASSERT(this, TaggedIsSmi(length)); | 1936 CSA_ASSERT(this, TaggedIsSmi(length)); |
| 1885 Node* result = Allocate(ConsString::kSize, flags); | 1937 Node* result = Allocate(ConsString::kSize, flags); |
| 1886 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); | 1938 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); |
| 1887 StoreMapNoWriteBarrier(result, map_root_index); | 1939 StoreMapNoWriteBarrier(result, map_root_index); |
| 1888 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length, | 1940 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length, |
| 1889 MachineRepresentation::kTagged); | 1941 MachineRepresentation::kTagged); |
| 1890 // Initialize both used and unused parts of hash field slot at once. | 1942 // Initialize both used and unused parts of hash field slot at once. |
| 1891 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot, | 1943 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot, |
| 1892 IntPtrConstant(String::kEmptyHashField), | 1944 IntPtrConstant(String::kEmptyHashField), |
| 1893 MachineType::PointerRepresentation()); | 1945 MachineType::PointerRepresentation()); |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 1913 | 1965 |
| 1914 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first, | 1966 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first, |
| 1915 Node* second, | 1967 Node* second, |
| 1916 AllocationFlags flags) { | 1968 AllocationFlags flags) { |
| 1917 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first, | 1969 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first, |
| 1918 second, flags); | 1970 second, flags); |
| 1919 } | 1971 } |
| 1920 | 1972 |
| 1921 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left, | 1973 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left, |
| 1922 Node* right, AllocationFlags flags) { | 1974 Node* right, AllocationFlags flags) { |
| 1975 CSA_ASSERT(this, IsFixedArray(context)); | |
| 1976 CSA_ASSERT(this, IsString(left)); | |
| 1977 CSA_ASSERT(this, IsString(right)); | |
| 1923 CSA_ASSERT(this, TaggedIsSmi(length)); | 1978 CSA_ASSERT(this, TaggedIsSmi(length)); |
| 1924 // Added string can be a cons string. | 1979 // Added string can be a cons string. |
| 1925 Comment("Allocating ConsString"); | 1980 Comment("Allocating ConsString"); |
| 1926 Node* left_instance_type = LoadInstanceType(left); | 1981 Node* left_instance_type = LoadInstanceType(left); |
| 1927 Node* right_instance_type = LoadInstanceType(right); | 1982 Node* right_instance_type = LoadInstanceType(right); |
| 1928 | 1983 |
| 1929 // Compute intersection and difference of instance types. | 1984 // Compute intersection and difference of instance types. |
| 1930 Node* anded_instance_types = | 1985 Node* anded_instance_types = |
| 1931 Word32And(left_instance_type, right_instance_type); | 1986 Word32And(left_instance_type, right_instance_type); |
| 1932 Node* xored_instance_types = | 1987 Node* xored_instance_types = |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1968 result.Bind(AllocateTwoByteConsString(length, left, right, flags)); | 2023 result.Bind(AllocateTwoByteConsString(length, left, right, flags)); |
| 1969 Goto(&done); | 2024 Goto(&done); |
| 1970 | 2025 |
| 1971 BIND(&done); | 2026 BIND(&done); |
| 1972 | 2027 |
| 1973 return result.value(); | 2028 return result.value(); |
| 1974 } | 2029 } |
| 1975 | 2030 |
| 1976 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length, | 2031 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length, |
| 1977 Node* index, Node* input) { | 2032 Node* index, Node* input) { |
| 2033 CSA_ASSERT(this, IsFixedArray(context)); | |
| 2034 CSA_ASSERT(this, TaggedIsSmi(index)); | |
| 2035 CSA_ASSERT(this, TaggedIsSmi(length)); | |
| 2036 CSA_ASSERT(this, IsString(input)); | |
| 2037 | |
| 2038 #ifdef DEBUG | |
| 1978 Node* const max_length = | 2039 Node* const max_length = |
| 1979 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray)); | 2040 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray)); |
| 1980 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length)); | 2041 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length)); |
| 1981 USE(max_length); | 2042 #endif // DEBUG |
| 1982 | 2043 |
| 1983 // Allocate the JSRegExpResult. | 2044 // Allocate the JSRegExpResult. |
| 1984 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove | 2045 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove |
| 1985 // unneeded store of elements. | 2046 // unneeded store of elements. |
| 1986 Node* const result = Allocate(JSRegExpResult::kSize); | 2047 Node* const result = Allocate(JSRegExpResult::kSize); |
| 1987 | 2048 |
| 1988 // TODO(jgruber): Store map as Heap constant? | 2049 // TODO(jgruber): Store map as Heap constant? |
| 1989 Node* const native_context = LoadNativeContext(context); | 2050 Node* const native_context = LoadNativeContext(context); |
| 1990 Node* const map = | 2051 Node* const map = |
| 1991 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX); | 2052 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX); |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2064 kHeapObjectTag)); | 2125 kHeapObjectTag)); |
| 2065 Node* end_address = IntPtrAdd( | 2126 Node* end_address = IntPtrAdd( |
| 2066 result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag))); | 2127 result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag))); |
| 2067 StoreFieldsNoWriteBarrier(start_address, end_address, filler); | 2128 StoreFieldsNoWriteBarrier(start_address, end_address, filler); |
| 2068 return result; | 2129 return result; |
| 2069 } | 2130 } |
| 2070 | 2131 |
| 2071 Node* CodeStubAssembler::CopyNameDictionary(Node* dictionary, | 2132 Node* CodeStubAssembler::CopyNameDictionary(Node* dictionary, |
| 2072 Label* large_object_fallback) { | 2133 Label* large_object_fallback) { |
| 2073 Comment("Copy boilerplate property dict"); | 2134 Comment("Copy boilerplate property dict"); |
| 2135 CSA_SLOW_ASSERT(this, IsDictionary(dictionary)); | |
| 2074 Label done(this); | 2136 Label done(this); |
| 2075 Node* length = SmiUntag(LoadFixedArrayBaseLength(dictionary)); | 2137 Node* length = SmiUntag(LoadFixedArrayBaseLength(dictionary)); |
| 2076 GotoIf( | 2138 GotoIf( |
| 2077 IntPtrGreaterThan(length, IntPtrConstant(FixedArray::kMaxRegularLength)), | 2139 IntPtrGreaterThan(length, IntPtrConstant(FixedArray::kMaxRegularLength)), |
| 2078 large_object_fallback); | 2140 large_object_fallback); |
| 2079 Node* properties = | 2141 Node* properties = |
| 2080 AllocateNameDictionary(SmiUntag(GetCapacity<NameDictionary>(dictionary))); | 2142 AllocateNameDictionary(SmiUntag(GetCapacity<NameDictionary>(dictionary))); |
| 2081 CopyFixedArrayElements(FAST_ELEMENTS, dictionary, properties, length, | 2143 CopyFixedArrayElements(FAST_ELEMENTS, dictionary, properties, length, |
| 2082 SKIP_WRITE_BARRIER, INTPTR_PARAMETERS); | 2144 SKIP_WRITE_BARRIER, INTPTR_PARAMETERS); |
| 2083 return properties; | 2145 return properties; |
| 2084 } | 2146 } |
| 2085 | 2147 |
| 2086 Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties, | 2148 Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties, |
| 2087 Node* elements, | 2149 Node* elements, |
| 2088 AllocationFlags flags) { | 2150 AllocationFlags flags) { |
| 2089 CSA_ASSERT(this, IsMap(map)); | 2151 CSA_ASSERT(this, IsMap(map)); |
| 2090 Node* size = | 2152 Node* size = |
| 2091 IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize)); | 2153 IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize)); |
| 2092 Node* object = AllocateInNewSpace(size, flags); | 2154 Node* object = AllocateInNewSpace(size, flags); |
| 2093 StoreMapNoWriteBarrier(object, map); | 2155 StoreMapNoWriteBarrier(object, map); |
| 2094 InitializeJSObjectFromMap(object, map, size, properties, elements); | 2156 InitializeJSObjectFromMap(object, map, size, properties, elements); |
| 2095 return object; | 2157 return object; |
| 2096 } | 2158 } |
| 2097 | 2159 |
| 2098 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map, | 2160 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map, |
| 2099 Node* size, Node* properties, | 2161 Node* size, Node* properties, |
| 2100 Node* elements) { | 2162 Node* elements) { |
| 2163 CSA_SLOW_ASSERT(this, IsMap(map)); | |
| 2101 // This helper assumes that the object is in new-space, as guarded by the | 2164 // This helper assumes that the object is in new-space, as guarded by the |
| 2102 // check in AllocatedJSObjectFromMap. | 2165 // check in AllocatedJSObjectFromMap. |
| 2103 if (properties == nullptr) { | 2166 if (properties == nullptr) { |
| 2104 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map)))); | 2167 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map)))); |
| 2105 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset, | 2168 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset, |
| 2106 Heap::kEmptyFixedArrayRootIndex); | 2169 Heap::kEmptyFixedArrayRootIndex); |
| 2107 } else { | 2170 } else { |
| 2171 CSA_ASSERT(this, IsFixedArray(properties)); | |
| 2108 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset, | 2172 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset, |
| 2109 properties); | 2173 properties); |
| 2110 } | 2174 } |
| 2111 if (elements == nullptr) { | 2175 if (elements == nullptr) { |
| 2112 StoreObjectFieldRoot(object, JSObject::kElementsOffset, | 2176 StoreObjectFieldRoot(object, JSObject::kElementsOffset, |
| 2113 Heap::kEmptyFixedArrayRootIndex); | 2177 Heap::kEmptyFixedArrayRootIndex); |
| 2114 } else { | 2178 } else { |
| 2179 CSA_ASSERT(this, IsFixedArray(elements)); | |
| 2115 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements); | 2180 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements); |
| 2116 } | 2181 } |
| 2117 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize); | 2182 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize); |
| 2118 } | 2183 } |
| 2119 | 2184 |
| 2120 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map, | 2185 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map, |
| 2121 Node* size, int start_offset) { | 2186 Node* size, int start_offset) { |
| 2187 CSA_SLOW_ASSERT(this, IsMap(map)); | |
| 2122 // TODO(cbruni): activate in-object slack tracking machinery. | 2188 // TODO(cbruni): activate in-object slack tracking machinery. |
| 2123 Comment("InitializeJSObjectBody"); | 2189 Comment("InitializeJSObjectBody"); |
| 2124 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex); | 2190 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex); |
| 2125 // Calculate the untagged field addresses. | 2191 // Calculate the untagged field addresses. |
| 2126 object = BitcastTaggedToWord(object); | 2192 object = BitcastTaggedToWord(object); |
| 2127 Node* start_address = | 2193 Node* start_address = |
| 2128 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag)); | 2194 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag)); |
| 2129 Node* end_address = | 2195 Node* end_address = |
| 2130 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag)); | 2196 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag)); |
| 2131 StoreFieldsNoWriteBarrier(start_address, end_address, filler); | 2197 StoreFieldsNoWriteBarrier(start_address, end_address, filler); |
| 2132 } | 2198 } |
| 2133 | 2199 |
| 2134 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address, | 2200 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address, |
| 2135 Node* end_address, | 2201 Node* end_address, |
| 2136 Node* value) { | 2202 Node* value) { |
| 2137 Comment("StoreFieldsNoWriteBarrier"); | 2203 Comment("StoreFieldsNoWriteBarrier"); |
| 2138 CSA_ASSERT(this, WordIsWordAligned(start_address)); | 2204 CSA_ASSERT(this, WordIsWordAligned(start_address)); |
| 2139 CSA_ASSERT(this, WordIsWordAligned(end_address)); | 2205 CSA_ASSERT(this, WordIsWordAligned(end_address)); |
| 2140 BuildFastLoop(start_address, end_address, | 2206 BuildFastLoop(start_address, end_address, |
| 2141 [this, value](Node* current) { | 2207 [this, value](Node* current) { |
| 2142 StoreNoWriteBarrier(MachineRepresentation::kTagged, current, | 2208 StoreNoWriteBarrier(MachineRepresentation::kTagged, current, |
| 2143 value); | 2209 value); |
| 2144 }, | 2210 }, |
| 2145 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); | 2211 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); |
| 2146 } | 2212 } |
| 2147 | 2213 |
| 2148 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements( | 2214 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements( |
| 2149 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) { | 2215 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) { |
| 2150 Comment("begin allocation of JSArray without elements"); | 2216 Comment("begin allocation of JSArray without elements"); |
| 2217 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length)); | |
| 2218 CSA_SLOW_ASSERT(this, IsMap(array_map)); | |
| 2151 int base_size = JSArray::kSize; | 2219 int base_size = JSArray::kSize; |
| 2152 if (allocation_site != nullptr) { | 2220 if (allocation_site != nullptr) { |
| 2153 base_size += AllocationMemento::kSize; | 2221 base_size += AllocationMemento::kSize; |
| 2154 } | 2222 } |
| 2155 | 2223 |
| 2156 Node* size = IntPtrConstant(base_size); | 2224 Node* size = IntPtrConstant(base_size); |
| 2157 Node* array = AllocateUninitializedJSArray(kind, array_map, length, | 2225 Node* array = AllocateUninitializedJSArray(kind, array_map, length, |
| 2158 allocation_site, size); | 2226 allocation_site, size); |
| 2159 return array; | 2227 return array; |
| 2160 } | 2228 } |
| 2161 | 2229 |
| 2162 std::pair<Node*, Node*> | 2230 std::pair<Node*, Node*> |
| 2163 CodeStubAssembler::AllocateUninitializedJSArrayWithElements( | 2231 CodeStubAssembler::AllocateUninitializedJSArrayWithElements( |
| 2164 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site, | 2232 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site, |
| 2165 Node* capacity, ParameterMode capacity_mode) { | 2233 Node* capacity, ParameterMode capacity_mode) { |
| 2166 Comment("begin allocation of JSArray with elements"); | 2234 Comment("begin allocation of JSArray with elements"); |
| 2235 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length)); | |
| 2236 CSA_SLOW_ASSERT(this, IsMap(array_map)); | |
| 2167 int base_size = JSArray::kSize; | 2237 int base_size = JSArray::kSize; |
| 2168 | 2238 |
| 2169 if (allocation_site != nullptr) { | 2239 if (allocation_site != nullptr) { |
| 2170 base_size += AllocationMemento::kSize; | 2240 base_size += AllocationMemento::kSize; |
| 2171 } | 2241 } |
| 2172 | 2242 |
| 2173 int elements_offset = base_size; | 2243 int elements_offset = base_size; |
| 2174 | 2244 |
| 2175 // Compute space for elements | 2245 // Compute space for elements |
| 2176 base_size += FixedArray::kHeaderSize; | 2246 base_size += FixedArray::kHeaderSize; |
| 2177 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size); | 2247 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size); |
| 2178 | 2248 |
| 2179 Node* array = AllocateUninitializedJSArray(kind, array_map, length, | 2249 Node* array = AllocateUninitializedJSArray(kind, array_map, length, |
| 2180 allocation_site, size); | 2250 allocation_site, size); |
| 2181 | 2251 |
| 2182 Node* elements = InnerAllocate(array, elements_offset); | 2252 Node* elements = InnerAllocate(array, elements_offset); |
| 2183 StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements); | 2253 StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements); |
| 2184 | 2254 |
| 2185 return {array, elements}; | 2255 return {array, elements}; |
| 2186 } | 2256 } |
| 2187 | 2257 |
| 2188 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind, | 2258 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind, |
| 2189 Node* array_map, | 2259 Node* array_map, |
| 2190 Node* length, | 2260 Node* length, |
| 2191 Node* allocation_site, | 2261 Node* allocation_site, |
| 2192 Node* size_in_bytes) { | 2262 Node* size_in_bytes) { |
| 2263 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length)); | |
| 2264 CSA_SLOW_ASSERT(this, IsMap(array_map)); | |
| 2265 | |
| 2193 // Allocate space for the JSArray and the elements FixedArray in one go. | 2266 // Allocate space for the JSArray and the elements FixedArray in one go. |
| 2194 Node* array = AllocateInNewSpace(size_in_bytes); | 2267 Node* array = AllocateInNewSpace(size_in_bytes); |
| 2195 | 2268 |
| 2196 Comment("write JSArray headers"); | 2269 Comment("write JSArray headers"); |
| 2197 StoreMapNoWriteBarrier(array, array_map); | 2270 StoreMapNoWriteBarrier(array, array_map); |
| 2198 | 2271 |
| 2199 CSA_ASSERT(this, TaggedIsSmi(length)); | |
| 2200 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length); | 2272 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length); |
| 2201 | 2273 |
| 2202 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset, | 2274 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset, |
| 2203 Heap::kEmptyFixedArrayRootIndex); | 2275 Heap::kEmptyFixedArrayRootIndex); |
| 2204 | 2276 |
| 2205 if (allocation_site != nullptr) { | 2277 if (allocation_site != nullptr) { |
| 2206 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); | 2278 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); |
| 2207 } | 2279 } |
| 2208 return array; | 2280 return array; |
| 2209 } | 2281 } |
| 2210 | 2282 |
| 2211 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map, | 2283 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map, |
| 2212 Node* capacity, Node* length, | 2284 Node* capacity, Node* length, |
| 2213 Node* allocation_site, | 2285 Node* allocation_site, |
| 2214 ParameterMode capacity_mode) { | 2286 ParameterMode capacity_mode) { |
| 2287 CSA_SLOW_ASSERT(this, IsMap(array_map)); | |
| 2288 CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length)); | |
| 2289 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode)); | |
| 2290 | |
| 2215 Node *array = nullptr, *elements = nullptr; | 2291 Node *array = nullptr, *elements = nullptr; |
| 2216 if (IsIntPtrOrSmiConstantZero(capacity)) { | 2292 if (IsIntPtrOrSmiConstantZero(capacity)) { |
| 2217 // Array is empty. Use the shared empty fixed array instead of allocating a | 2293 // Array is empty. Use the shared empty fixed array instead of allocating a |
| 2218 // new one. | 2294 // new one. |
| 2219 array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length, | 2295 array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length, |
| 2220 nullptr); | 2296 nullptr); |
| 2221 StoreObjectFieldRoot(array, JSArray::kElementsOffset, | 2297 StoreObjectFieldRoot(array, JSArray::kElementsOffset, |
| 2222 Heap::kEmptyFixedArrayRootIndex); | 2298 Heap::kEmptyFixedArrayRootIndex); |
| 2223 } else { | 2299 } else { |
| 2224 // Allocate both array and elements object, and initialize the JSArray. | 2300 // Allocate both array and elements object, and initialize the JSArray. |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 2238 Heap::kTheHoleValueRootIndex, capacity_mode); | 2314 Heap::kTheHoleValueRootIndex, capacity_mode); |
| 2239 } | 2315 } |
| 2240 | 2316 |
| 2241 return array; | 2317 return array; |
| 2242 } | 2318 } |
| 2243 | 2319 |
| 2244 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, | 2320 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, |
| 2245 Node* capacity_node, | 2321 Node* capacity_node, |
| 2246 ParameterMode mode, | 2322 ParameterMode mode, |
| 2247 AllocationFlags flags) { | 2323 AllocationFlags flags) { |
| 2324 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode)); | |
| 2248 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node, | 2325 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node, |
| 2249 IntPtrOrSmiConstant(0, mode), mode)); | 2326 IntPtrOrSmiConstant(0, mode), mode)); |
| 2250 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode); | 2327 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode); |
| 2251 | 2328 |
| 2252 // Allocate both array and elements object, and initialize the JSArray. | 2329 // Allocate both array and elements object, and initialize the JSArray. |
| 2253 Node* array = Allocate(total_size, flags); | 2330 Node* array = Allocate(total_size, flags); |
| 2254 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind) | 2331 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind) |
| 2255 ? Heap::kFixedDoubleArrayMapRootIndex | 2332 ? Heap::kFixedDoubleArrayMapRootIndex |
| 2256 : Heap::kFixedArrayMapRootIndex; | 2333 : Heap::kFixedArrayMapRootIndex; |
| 2257 DCHECK(Heap::RootIsImmortalImmovable(map_index)); | 2334 DCHECK(Heap::RootIsImmortalImmovable(map_index)); |
| 2258 StoreMapNoWriteBarrier(array, map_index); | 2335 StoreMapNoWriteBarrier(array, map_index); |
| 2259 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, | 2336 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, |
| 2260 ParameterToTagged(capacity_node, mode)); | 2337 ParameterToTagged(capacity_node, mode)); |
| 2261 return array; | 2338 return array; |
| 2262 } | 2339 } |
| 2263 | 2340 |
| 2264 void CodeStubAssembler::FillFixedArrayWithValue( | 2341 void CodeStubAssembler::FillFixedArrayWithValue( |
| 2265 ElementsKind kind, Node* array, Node* from_node, Node* to_node, | 2342 ElementsKind kind, Node* array, Node* from_node, Node* to_node, |
| 2266 Heap::RootListIndex value_root_index, ParameterMode mode) { | 2343 Heap::RootListIndex value_root_index, ParameterMode mode) { |
| 2344 CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode)); | |
| 2345 CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode)); | |
| 2346 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind)); | |
| 2267 bool is_double = IsFastDoubleElementsKind(kind); | 2347 bool is_double = IsFastDoubleElementsKind(kind); |
| 2268 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || | 2348 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || |
| 2269 value_root_index == Heap::kUndefinedValueRootIndex); | 2349 value_root_index == Heap::kUndefinedValueRootIndex); |
| 2270 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); | 2350 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); |
| 2271 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); | 2351 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); |
| 2272 Node* double_hole = | 2352 Node* double_hole = |
| 2273 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); | 2353 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); |
| 2274 Node* value = LoadRoot(value_root_index); | 2354 Node* value = LoadRoot(value_root_index); |
| 2275 | 2355 |
| 2276 BuildFastFixedArrayForEach( | 2356 BuildFastFixedArrayForEach( |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 2300 value); | 2380 value); |
| 2301 } | 2381 } |
| 2302 }, | 2382 }, |
| 2303 mode); | 2383 mode); |
| 2304 } | 2384 } |
| 2305 | 2385 |
| 2306 void CodeStubAssembler::CopyFixedArrayElements( | 2386 void CodeStubAssembler::CopyFixedArrayElements( |
| 2307 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, | 2387 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, |
| 2308 Node* to_array, Node* element_count, Node* capacity, | 2388 Node* to_array, Node* element_count, Node* capacity, |
| 2309 WriteBarrierMode barrier_mode, ParameterMode mode) { | 2389 WriteBarrierMode barrier_mode, ParameterMode mode) { |
| 2390 CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode)); | |
| 2391 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode)); | |
| 2392 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(from_array, from_kind)); | |
| 2393 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(to_array, to_kind)); | |
| 2310 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); | 2394 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); |
| 2311 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; | 2395 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; |
| 2312 Comment("[ CopyFixedArrayElements"); | 2396 Comment("[ CopyFixedArrayElements"); |
| 2313 | 2397 |
| 2314 // Typed array elements are not supported. | 2398 // Typed array elements are not supported. |
| 2315 DCHECK(!IsFixedTypedArrayElementsKind(from_kind)); | 2399 DCHECK(!IsFixedTypedArrayElementsKind(from_kind)); |
| 2316 DCHECK(!IsFixedTypedArrayElementsKind(to_kind)); | 2400 DCHECK(!IsFixedTypedArrayElementsKind(to_kind)); |
| 2317 | 2401 |
| 2318 Label done(this); | 2402 Label done(this); |
| 2319 bool from_double_elements = IsFastDoubleElementsKind(from_kind); | 2403 bool from_double_elements = IsFastDoubleElementsKind(from_kind); |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2436 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1); | 2520 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1); |
| 2437 Comment("] CopyFixedArrayElements"); | 2521 Comment("] CopyFixedArrayElements"); |
| 2438 } | 2522 } |
| 2439 | 2523 |
| 2440 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string, | 2524 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string, |
| 2441 Node* from_index, Node* to_index, | 2525 Node* from_index, Node* to_index, |
| 2442 Node* character_count, | 2526 Node* character_count, |
| 2443 String::Encoding from_encoding, | 2527 String::Encoding from_encoding, |
| 2444 String::Encoding to_encoding, | 2528 String::Encoding to_encoding, |
| 2445 ParameterMode mode) { | 2529 ParameterMode mode) { |
| 2530 CSA_SLOW_ASSERT(this, IsString(from_string)); | |
| 2531 CSA_SLOW_ASSERT(this, IsString(to_string)); | |
| 2532 CSA_SLOW_ASSERT(this, MatchesParameterMode(character_count, mode)); | |
| 2533 CSA_SLOW_ASSERT(this, MatchesParameterMode(from_index, mode)); | |
| 2534 CSA_SLOW_ASSERT(this, MatchesParameterMode(to_index, mode)); | |
| 2446 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING; | 2535 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING; |
| 2447 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING; | 2536 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING; |
| 2448 DCHECK_IMPLIES(to_one_byte, from_one_byte); | 2537 DCHECK_IMPLIES(to_one_byte, from_one_byte); |
| 2449 Comment("CopyStringCharacters %s -> %s", | 2538 Comment("CopyStringCharacters %s -> %s", |
| 2450 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", | 2539 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", |
| 2451 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING"); | 2540 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING"); |
| 2452 | 2541 |
| 2453 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; | 2542 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; |
| 2454 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; | 2543 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; |
| 2455 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); | 2544 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2494 } | 2583 } |
| 2495 }, | 2584 }, |
| 2496 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); | 2585 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); |
| 2497 } | 2586 } |
| 2498 | 2587 |
| 2499 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array, | 2588 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array, |
| 2500 Node* offset, | 2589 Node* offset, |
| 2501 ElementsKind from_kind, | 2590 ElementsKind from_kind, |
| 2502 ElementsKind to_kind, | 2591 ElementsKind to_kind, |
| 2503 Label* if_hole) { | 2592 Label* if_hole) { |
| 2593 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, from_kind)); | |
| 2504 if (IsFastDoubleElementsKind(from_kind)) { | 2594 if (IsFastDoubleElementsKind(from_kind)) { |
| 2505 Node* value = | 2595 Node* value = |
| 2506 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64()); | 2596 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64()); |
| 2507 if (!IsFastDoubleElementsKind(to_kind)) { | 2597 if (!IsFastDoubleElementsKind(to_kind)) { |
| 2508 value = AllocateHeapNumberWithValue(value); | 2598 value = AllocateHeapNumberWithValue(value); |
| 2509 } | 2599 } |
| 2510 return value; | 2600 return value; |
| 2511 | 2601 |
| 2512 } else { | 2602 } else { |
| 2513 Node* value = Load(MachineType::AnyTagged(), array, offset); | 2603 Node* value = Load(MachineType::AnyTagged(), array, offset); |
| 2514 if (if_hole) { | 2604 if (if_hole) { |
| 2515 GotoIf(WordEqual(value, TheHoleConstant()), if_hole); | 2605 GotoIf(WordEqual(value, TheHoleConstant()), if_hole); |
| 2516 } | 2606 } |
| 2517 if (IsFastDoubleElementsKind(to_kind)) { | 2607 if (IsFastDoubleElementsKind(to_kind)) { |
| 2518 if (IsFastSmiElementsKind(from_kind)) { | 2608 if (IsFastSmiElementsKind(from_kind)) { |
| 2519 value = SmiToFloat64(value); | 2609 value = SmiToFloat64(value); |
| 2520 } else { | 2610 } else { |
| 2521 value = LoadHeapNumberValue(value); | 2611 value = LoadHeapNumberValue(value); |
| 2522 } | 2612 } |
| 2523 } | 2613 } |
| 2524 return value; | 2614 return value; |
| 2525 } | 2615 } |
| 2526 } | 2616 } |
| 2527 | 2617 |
| 2528 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity, | 2618 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity, |
| 2529 ParameterMode mode) { | 2619 ParameterMode mode) { |
| 2620 CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode)); | |
| 2530 Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode); | 2621 Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode); |
| 2531 Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode); | 2622 Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode); |
| 2532 Node* padding = IntPtrOrSmiConstant(16, mode); | 2623 Node* padding = IntPtrOrSmiConstant(16, mode); |
| 2533 return IntPtrOrSmiAdd(new_capacity, padding, mode); | 2624 return IntPtrOrSmiAdd(new_capacity, padding, mode); |
| 2534 } | 2625 } |
| 2535 | 2626 |
| 2536 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, | 2627 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, |
| 2537 ElementsKind kind, Node* key, | 2628 ElementsKind kind, Node* key, |
| 2538 Label* bailout) { | 2629 Label* bailout) { |
| 2630 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 2631 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(elements, kind)); | |
| 2632 CSA_SLOW_ASSERT(this, TaggedIsSmi(key)); | |
| 2539 Node* capacity = LoadFixedArrayBaseLength(elements); | 2633 Node* capacity = LoadFixedArrayBaseLength(elements); |
| 2540 | 2634 |
| 2541 ParameterMode mode = OptimalParameterMode(); | 2635 ParameterMode mode = OptimalParameterMode(); |
| 2542 capacity = TaggedToParameter(capacity, mode); | 2636 capacity = TaggedToParameter(capacity, mode); |
| 2543 key = TaggedToParameter(key, mode); | 2637 key = TaggedToParameter(key, mode); |
| 2544 | 2638 |
| 2545 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode, | 2639 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode, |
| 2546 bailout); | 2640 bailout); |
| 2547 } | 2641 } |
| 2548 | 2642 |
| 2549 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, | 2643 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, |
| 2550 ElementsKind kind, Node* key, | 2644 ElementsKind kind, Node* key, |
| 2551 Node* capacity, | 2645 Node* capacity, |
| 2552 ParameterMode mode, | 2646 ParameterMode mode, |
| 2553 Label* bailout) { | 2647 Label* bailout) { |
| 2554 Comment("TryGrowElementsCapacity"); | 2648 Comment("TryGrowElementsCapacity"); |
| 2649 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 2650 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(elements, kind)); | |
| 2651 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode)); | |
| 2652 CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode)); | |
| 2555 | 2653 |
| 2556 // If the gap growth is too big, fall back to the runtime. | 2654 // If the gap growth is too big, fall back to the runtime. |
| 2557 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode); | 2655 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode); |
| 2558 Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode); | 2656 Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode); |
| 2559 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout); | 2657 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout); |
| 2560 | 2658 |
| 2561 // Calculate the capacity of the new backing store. | 2659 // Calculate the capacity of the new backing store. |
| 2562 Node* new_capacity = CalculateNewElementsCapacity( | 2660 Node* new_capacity = CalculateNewElementsCapacity( |
| 2563 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode); | 2661 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode); |
| 2564 return GrowElementsCapacity(object, elements, kind, kind, capacity, | 2662 return GrowElementsCapacity(object, elements, kind, kind, capacity, |
| 2565 new_capacity, mode, bailout); | 2663 new_capacity, mode, bailout); |
| 2566 } | 2664 } |
| 2567 | 2665 |
| 2568 Node* CodeStubAssembler::GrowElementsCapacity( | 2666 Node* CodeStubAssembler::GrowElementsCapacity( |
| 2569 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind, | 2667 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind, |
| 2570 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) { | 2668 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) { |
| 2571 Comment("[ GrowElementsCapacity"); | 2669 Comment("[ GrowElementsCapacity"); |
| 2670 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 2671 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(elements, from_kind)); | |
| 2672 CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode)); | |
| 2673 CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode)); | |
| 2674 | |
| 2572 // If size of the allocation for the new capacity doesn't fit in a page | 2675 // If size of the allocation for the new capacity doesn't fit in a page |
| 2573 // that we can bump-pointer allocate from, fall back to the runtime. | 2676 // that we can bump-pointer allocate from, fall back to the runtime. |
| 2574 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind); | 2677 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind); |
| 2575 GotoIf(UintPtrOrSmiGreaterThanOrEqual( | 2678 GotoIf(UintPtrOrSmiGreaterThanOrEqual( |
| 2576 new_capacity, IntPtrOrSmiConstant(max_size, mode), mode), | 2679 new_capacity, IntPtrOrSmiConstant(max_size, mode), mode), |
| 2577 bailout); | 2680 bailout); |
| 2578 | 2681 |
| 2579 // Allocate the new backing store. | 2682 // Allocate the new backing store. |
| 2580 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode); | 2683 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode); |
| 2581 | 2684 |
| (...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2770 } | 2873 } |
| 2771 } | 2874 } |
| 2772 } | 2875 } |
| 2773 BIND(&if_valueisheapnumber); | 2876 BIND(&if_valueisheapnumber); |
| 2774 { | 2877 { |
| 2775 Node* result = AllocateHeapNumberWithValue(value); | 2878 Node* result = AllocateHeapNumberWithValue(value); |
| 2776 var_result.Bind(result); | 2879 var_result.Bind(result); |
| 2777 Goto(&if_join); | 2880 Goto(&if_join); |
| 2778 } | 2881 } |
| 2779 BIND(&if_join); | 2882 BIND(&if_join); |
| 2883 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
| 2780 return var_result.value(); | 2884 return var_result.value(); |
| 2781 } | 2885 } |
| 2782 | 2886 |
| 2783 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) { | 2887 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) { |
| 2784 if (Is64()) { | 2888 if (Is64()) { |
| 2785 return SmiTag(ChangeInt32ToInt64(value)); | 2889 return SmiTag(ChangeInt32ToInt64(value)); |
| 2786 } | 2890 } |
| 2787 VARIABLE(var_result, MachineRepresentation::kTagged); | 2891 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 2788 Node* pair = Int32AddWithOverflow(value, value); | 2892 Node* pair = Int32AddWithOverflow(value, value); |
| 2789 Node* overflow = Projection(1, pair); | 2893 Node* overflow = Projection(1, pair); |
| 2790 Label if_overflow(this, Label::kDeferred), if_notoverflow(this), | 2894 Label if_overflow(this, Label::kDeferred), if_notoverflow(this), |
| 2791 if_join(this); | 2895 if_join(this); |
| 2792 Branch(overflow, &if_overflow, &if_notoverflow); | 2896 Branch(overflow, &if_overflow, &if_notoverflow); |
| 2793 BIND(&if_overflow); | 2897 BIND(&if_overflow); |
| 2794 { | 2898 { |
| 2795 Node* value64 = ChangeInt32ToFloat64(value); | 2899 Node* value64 = ChangeInt32ToFloat64(value); |
| 2796 Node* result = AllocateHeapNumberWithValue(value64); | 2900 Node* result = AllocateHeapNumberWithValue(value64); |
| 2797 var_result.Bind(result); | 2901 var_result.Bind(result); |
| 2798 } | 2902 } |
| 2799 Goto(&if_join); | 2903 Goto(&if_join); |
| 2800 BIND(&if_notoverflow); | 2904 BIND(&if_notoverflow); |
| 2801 { | 2905 { |
| 2802 Node* result = BitcastWordToTaggedSigned(Projection(0, pair)); | 2906 Node* result = BitcastWordToTaggedSigned(Projection(0, pair)); |
| 2803 var_result.Bind(result); | 2907 var_result.Bind(result); |
| 2804 } | 2908 } |
| 2805 Goto(&if_join); | 2909 Goto(&if_join); |
| 2806 BIND(&if_join); | 2910 BIND(&if_join); |
| 2911 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
| 2807 return var_result.value(); | 2912 return var_result.value(); |
| 2808 } | 2913 } |
| 2809 | 2914 |
| 2810 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) { | 2915 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) { |
| 2811 Label if_overflow(this, Label::kDeferred), if_not_overflow(this), | 2916 Label if_overflow(this, Label::kDeferred), if_not_overflow(this), |
| 2812 if_join(this); | 2917 if_join(this); |
| 2813 VARIABLE(var_result, MachineRepresentation::kTagged); | 2918 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 2814 // If {value} > 2^31 - 1, we need to store it in a HeapNumber. | 2919 // If {value} > 2^31 - 1, we need to store it in a HeapNumber. |
| 2815 Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow, | 2920 Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow, |
| 2816 &if_not_overflow); | 2921 &if_not_overflow); |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 2833 Goto(&if_join); | 2938 Goto(&if_join); |
| 2834 | 2939 |
| 2835 BIND(&if_overflow); | 2940 BIND(&if_overflow); |
| 2836 { | 2941 { |
| 2837 Node* float64_value = ChangeUint32ToFloat64(value); | 2942 Node* float64_value = ChangeUint32ToFloat64(value); |
| 2838 var_result.Bind(AllocateHeapNumberWithValue(float64_value)); | 2943 var_result.Bind(AllocateHeapNumberWithValue(float64_value)); |
| 2839 } | 2944 } |
| 2840 Goto(&if_join); | 2945 Goto(&if_join); |
| 2841 | 2946 |
| 2842 BIND(&if_join); | 2947 BIND(&if_join); |
| 2948 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
| 2843 return var_result.value(); | 2949 return var_result.value(); |
| 2844 } | 2950 } |
| 2845 | 2951 |
| 2846 Node* CodeStubAssembler::ToThisString(Node* context, Node* value, | 2952 Node* CodeStubAssembler::ToThisString(Node* context, Node* value, |
| 2847 char const* method_name) { | 2953 char const* method_name) { |
| 2848 VARIABLE(var_value, MachineRepresentation::kTagged, value); | 2954 VARIABLE(var_value, MachineRepresentation::kTagged, value); |
| 2849 | 2955 |
| 2850 // Check if the {value} is a Smi or a HeapObject. | 2956 // Check if the {value} is a Smi or a HeapObject. |
| 2851 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this), | 2957 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this), |
| 2852 if_valueisstring(this); | 2958 if_valueisstring(this); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2897 // The {value} is a Smi, convert it to a String. | 3003 // The {value} is a Smi, convert it to a String. |
| 2898 Callable callable = CodeFactory::NumberToString(isolate()); | 3004 Callable callable = CodeFactory::NumberToString(isolate()); |
| 2899 var_value.Bind(CallStub(callable, context, value)); | 3005 var_value.Bind(CallStub(callable, context, value)); |
| 2900 Goto(&if_valueisstring); | 3006 Goto(&if_valueisstring); |
| 2901 } | 3007 } |
| 2902 BIND(&if_valueisstring); | 3008 BIND(&if_valueisstring); |
| 2903 return var_value.value(); | 3009 return var_value.value(); |
| 2904 } | 3010 } |
| 2905 | 3011 |
| 2906 Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) { | 3012 Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) { |
| 3013 CSA_SLOW_ASSERT(this, IsNumber(value)); | |
| 2907 VARIABLE(result, MachineRepresentation::kFloat64); | 3014 VARIABLE(result, MachineRepresentation::kFloat64); |
| 2908 Label smi(this); | 3015 Label smi(this); |
| 2909 Label done(this, &result); | 3016 Label done(this, &result); |
| 2910 GotoIf(TaggedIsSmi(value), &smi); | 3017 GotoIf(TaggedIsSmi(value), &smi); |
| 2911 result.Bind( | 3018 result.Bind( |
| 2912 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64())); | 3019 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64())); |
| 2913 Goto(&done); | 3020 Goto(&done); |
| 2914 | 3021 |
| 2915 BIND(&smi); | 3022 BIND(&smi); |
| 2916 { | 3023 { |
| 2917 result.Bind(SmiToFloat64(value)); | 3024 result.Bind(SmiToFloat64(value)); |
| 2918 Goto(&done); | 3025 Goto(&done); |
| 2919 } | 3026 } |
| 2920 | 3027 |
| 2921 BIND(&done); | 3028 BIND(&done); |
| 2922 return result.value(); | 3029 return result.value(); |
| 2923 } | 3030 } |
| 2924 | 3031 |
| 2925 Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) { | 3032 Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) { |
| 3033 CSA_SLOW_ASSERT(this, IsNumber(value)); | |
| 2926 VARIABLE(result, MachineType::PointerRepresentation()); | 3034 VARIABLE(result, MachineType::PointerRepresentation()); |
| 2927 Label smi(this), done(this, &result); | 3035 Label smi(this), done(this, &result); |
| 2928 GotoIf(TaggedIsSmi(value), &smi); | 3036 GotoIf(TaggedIsSmi(value), &smi); |
| 2929 | 3037 |
| 2930 CSA_ASSERT(this, IsHeapNumber(value)); | 3038 CSA_ASSERT(this, IsHeapNumber(value)); |
| 2931 result.Bind(ChangeFloat64ToUintPtr(LoadHeapNumberValue(value))); | 3039 result.Bind(ChangeFloat64ToUintPtr(LoadHeapNumberValue(value))); |
| 2932 Goto(&done); | 3040 Goto(&done); |
| 2933 | 3041 |
| 2934 BIND(&smi); | 3042 BIND(&smi); |
| 2935 result.Bind(SmiToWord(value)); | 3043 result.Bind(SmiToWord(value)); |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3053 | 3161 |
| 3054 BIND(&out); | 3162 BIND(&out); |
| 3055 return var_value_map.value(); | 3163 return var_value_map.value(); |
| 3056 } | 3164 } |
| 3057 | 3165 |
| 3058 Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) { | 3166 Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) { |
| 3059 return Word32Equal(instance_type, Int32Constant(type)); | 3167 return Word32Equal(instance_type, Int32Constant(type)); |
| 3060 } | 3168 } |
| 3061 | 3169 |
| 3062 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) { | 3170 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) { |
| 3171 CSA_SLOW_ASSERT(this, IsMap(map)); | |
| 3063 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map)); | 3172 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map)); |
| 3064 uint32_t mask = | 3173 uint32_t mask = |
| 3065 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded; | 3174 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded; |
| 3066 USE(mask); | 3175 USE(mask); |
| 3067 // Interceptors or access checks imply special receiver. | 3176 // Interceptors or access checks imply special receiver. |
| 3068 CSA_ASSERT(this, | 3177 CSA_ASSERT(this, |
| 3069 SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special, | 3178 SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special, |
| 3070 Int32Constant(1), MachineRepresentation::kWord32)); | 3179 Int32Constant(1), MachineRepresentation::kWord32)); |
| 3071 return is_special; | 3180 return is_special; |
| 3072 } | 3181 } |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3170 return Int32GreaterThanOrEqual(LoadInstanceType(object), | 3279 return Int32GreaterThanOrEqual(LoadInstanceType(object), |
| 3171 Int32Constant(FIRST_JS_RECEIVER_TYPE)); | 3280 Int32Constant(FIRST_JS_RECEIVER_TYPE)); |
| 3172 } | 3281 } |
| 3173 | 3282 |
| 3174 Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) { | 3283 Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) { |
| 3175 return Word32Equal(LoadInstanceType(object), | 3284 return Word32Equal(LoadInstanceType(object), |
| 3176 Int32Constant(JS_GLOBAL_PROXY_TYPE)); | 3285 Int32Constant(JS_GLOBAL_PROXY_TYPE)); |
| 3177 } | 3286 } |
| 3178 | 3287 |
| 3179 Node* CodeStubAssembler::IsMap(Node* map) { | 3288 Node* CodeStubAssembler::IsMap(Node* map) { |
| 3180 return HasInstanceType(map, MAP_TYPE); | 3289 return WordEqual(LoadMap(map), MetaMapConstant()); |
|
Igor Sheludko
2017/05/03 14:03:05
return IsMetaMap(LoadMap(map));
jgruber
2017/05/18 14:33:35
Done.
| |
| 3181 } | 3290 } |
| 3182 | 3291 |
| 3183 Node* CodeStubAssembler::IsJSValueInstanceType(Node* instance_type) { | 3292 Node* CodeStubAssembler::IsJSValueInstanceType(Node* instance_type) { |
| 3184 return Word32Equal(instance_type, Int32Constant(JS_VALUE_TYPE)); | 3293 return Word32Equal(instance_type, Int32Constant(JS_VALUE_TYPE)); |
| 3185 } | 3294 } |
| 3186 | 3295 |
| 3187 Node* CodeStubAssembler::IsJSValue(Node* object) { | 3296 Node* CodeStubAssembler::IsJSValue(Node* object) { |
| 3188 return IsJSValueMap(LoadMap(object)); | 3297 return IsJSValueMap(LoadMap(object)); |
| 3189 } | 3298 } |
| 3190 | 3299 |
| 3191 Node* CodeStubAssembler::IsJSValueMap(Node* map) { | 3300 Node* CodeStubAssembler::IsJSValueMap(Node* map) { |
| 3192 return IsJSValueInstanceType(LoadMapInstanceType(map)); | 3301 return IsJSValueInstanceType(LoadMapInstanceType(map)); |
| 3193 } | 3302 } |
| 3194 | 3303 |
| 3195 Node* CodeStubAssembler::IsJSArrayInstanceType(Node* instance_type) { | 3304 Node* CodeStubAssembler::IsJSArrayInstanceType(Node* instance_type) { |
| 3196 return Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE)); | 3305 return Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE)); |
| 3197 } | 3306 } |
| 3198 | 3307 |
| 3199 Node* CodeStubAssembler::IsJSArray(Node* object) { | 3308 Node* CodeStubAssembler::IsJSArray(Node* object) { |
| 3200 return IsJSArrayMap(LoadMap(object)); | 3309 return IsJSArrayMap(LoadMap(object)); |
| 3201 } | 3310 } |
| 3202 | 3311 |
| 3203 Node* CodeStubAssembler::IsJSArrayMap(Node* map) { | 3312 Node* CodeStubAssembler::IsJSArrayMap(Node* map) { |
| 3204 return IsJSArrayInstanceType(LoadMapInstanceType(map)); | 3313 return IsJSArrayInstanceType(LoadMapInstanceType(map)); |
| 3205 } | 3314 } |
| 3206 | 3315 |
| 3316 Node* CodeStubAssembler::IsFixedArray(Node* object) { | |
| 3317 return HasInstanceType(object, FIXED_ARRAY_TYPE); | |
| 3318 } | |
| 3319 | |
| 3320 Node* CodeStubAssembler::IsFixedArrayWithKind(Node* object, ElementsKind kind) { | |
| 3321 if (IsFastDoubleElementsKind(kind)) { | |
| 3322 return IsFixedDoubleArray(object); | |
| 3323 } else { | |
| 3324 DCHECK(IsFastSmiOrObjectElementsKind(kind)); | |
| 3325 return IsFixedArray(object); | |
| 3326 } | |
| 3327 } | |
| 3328 | |
| 3207 Node* CodeStubAssembler::IsWeakCell(Node* object) { | 3329 Node* CodeStubAssembler::IsWeakCell(Node* object) { |
| 3208 return IsWeakCellMap(LoadMap(object)); | 3330 return IsWeakCellMap(LoadMap(object)); |
| 3209 } | 3331 } |
| 3210 | 3332 |
| 3211 Node* CodeStubAssembler::IsBoolean(Node* object) { | 3333 Node* CodeStubAssembler::IsBoolean(Node* object) { |
| 3212 return IsBooleanMap(LoadMap(object)); | 3334 return IsBooleanMap(LoadMap(object)); |
| 3213 } | 3335 } |
| 3214 | 3336 |
| 3215 Node* CodeStubAssembler::IsPropertyCell(Node* object) { | 3337 Node* CodeStubAssembler::IsPropertyCell(Node* object) { |
| 3216 return IsPropertyCellMap(LoadMap(object)); | 3338 return IsPropertyCellMap(LoadMap(object)); |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3332 | 3454 |
| 3333 var_result.Bind(Int32Constant(0)); | 3455 var_result.Bind(Int32Constant(0)); |
| 3334 Goto(&out); | 3456 Goto(&out); |
| 3335 | 3457 |
| 3336 BIND(&out); | 3458 BIND(&out); |
| 3337 return var_result.value(); | 3459 return var_result.value(); |
| 3338 } | 3460 } |
| 3339 | 3461 |
| 3340 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index, | 3462 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index, |
| 3341 ParameterMode parameter_mode) { | 3463 ParameterMode parameter_mode) { |
| 3342 if (parameter_mode == SMI_PARAMETERS) CSA_ASSERT(this, TaggedIsSmi(index)); | 3464 CSA_ASSERT(this, MatchesParameterMode(index, parameter_mode)); |
| 3343 CSA_ASSERT(this, IsString(string)); | 3465 CSA_ASSERT(this, IsString(string)); |
| 3344 | 3466 |
| 3345 // Translate the {index} into a Word. | 3467 // Translate the {index} into a Word. |
| 3346 Node* const int_index = ParameterToWord(index, parameter_mode); | 3468 Node* const int_index = ParameterToWord(index, parameter_mode); |
| 3347 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(int_index, IntPtrConstant(0))); | 3469 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(int_index, IntPtrConstant(0))); |
| 3348 | 3470 |
| 3349 VARIABLE(var_result, MachineRepresentation::kWord32); | 3471 VARIABLE(var_result, MachineRepresentation::kWord32); |
| 3350 | 3472 |
| 3351 Label out(this, &var_result), runtime_generic(this), runtime_external(this); | 3473 Label out(this, &var_result), runtime_generic(this), runtime_external(this); |
| 3352 | 3474 |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3444 // Allocate a new SeqTwoByteString for {code}. | 3566 // Allocate a new SeqTwoByteString for {code}. |
| 3445 Node* result = AllocateSeqTwoByteString(1); | 3567 Node* result = AllocateSeqTwoByteString(1); |
| 3446 StoreNoWriteBarrier( | 3568 StoreNoWriteBarrier( |
| 3447 MachineRepresentation::kWord16, result, | 3569 MachineRepresentation::kWord16, result, |
| 3448 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code); | 3570 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code); |
| 3449 var_result.Bind(result); | 3571 var_result.Bind(result); |
| 3450 Goto(&if_done); | 3572 Goto(&if_done); |
| 3451 } | 3573 } |
| 3452 | 3574 |
| 3453 BIND(&if_done); | 3575 BIND(&if_done); |
| 3576 CSA_ASSERT(this, IsString(var_result.value())); | |
| 3454 return var_result.value(); | 3577 return var_result.value(); |
| 3455 } | 3578 } |
| 3456 | 3579 |
| 3457 namespace { | 3580 namespace { |
| 3458 | 3581 |
| 3459 // A wrapper around CopyStringCharacters which determines the correct string | 3582 // A wrapper around CopyStringCharacters which determines the correct string |
| 3460 // encoding, allocates a corresponding sequential string, and then copies the | 3583 // encoding, allocates a corresponding sequential string, and then copies the |
| 3461 // given character range using CopyStringCharacters. | 3584 // given character range using CopyStringCharacters. |
| 3462 // |from_string| must be a sequential string. |from_index| and | 3585 // |from_string| must be a sequential string. |from_index| and |
| 3463 // |character_count| must be Smis s.t. | 3586 // |character_count| must be Smis s.t. |
| (...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3641 | 3764 |
| 3642 // Fall back to a runtime call. | 3765 // Fall back to a runtime call. |
| 3643 BIND(&runtime); | 3766 BIND(&runtime); |
| 3644 { | 3767 { |
| 3645 var_result.Bind( | 3768 var_result.Bind( |
| 3646 CallRuntime(Runtime::kSubString, context, string, from, to)); | 3769 CallRuntime(Runtime::kSubString, context, string, from, to)); |
| 3647 Goto(&end); | 3770 Goto(&end); |
| 3648 } | 3771 } |
| 3649 | 3772 |
| 3650 BIND(&end); | 3773 BIND(&end); |
| 3774 CSA_ASSERT(this, IsString(var_result.value())); | |
| 3651 return var_result.value(); | 3775 return var_result.value(); |
| 3652 } | 3776 } |
| 3653 | 3777 |
| 3654 ToDirectStringAssembler::ToDirectStringAssembler( | 3778 ToDirectStringAssembler::ToDirectStringAssembler( |
| 3655 compiler::CodeAssemblerState* state, Node* string) | 3779 compiler::CodeAssemblerState* state, Node* string) |
| 3656 : CodeStubAssembler(state), | 3780 : CodeStubAssembler(state), |
| 3657 var_string_(this, MachineRepresentation::kTagged, string), | 3781 var_string_(this, MachineRepresentation::kTagged, string), |
| 3658 var_instance_type_(this, MachineRepresentation::kWord32), | 3782 var_instance_type_(this, MachineRepresentation::kWord32), |
| 3659 var_offset_(this, MachineType::PointerRepresentation()), | 3783 var_offset_(this, MachineType::PointerRepresentation()), |
| 3660 var_is_external_(this, MachineRepresentation::kWord32) { | 3784 var_is_external_(this, MachineRepresentation::kWord32) { |
| (...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3783 kHeapObjectTag)); | 3907 kHeapObjectTag)); |
| 3784 } | 3908 } |
| 3785 var_result.Bind(result); | 3909 var_result.Bind(result); |
| 3786 Goto(&out); | 3910 Goto(&out); |
| 3787 } | 3911 } |
| 3788 | 3912 |
| 3789 BIND(&out); | 3913 BIND(&out); |
| 3790 return var_result.value(); | 3914 return var_result.value(); |
| 3791 } | 3915 } |
| 3792 | 3916 |
| 3793 Node* CodeStubAssembler::TryDerefExternalString(Node* const string, | |
| 3794 Node* const instance_type, | |
| 3795 Label* if_bailout) { | |
| 3796 Label out(this); | |
| 3797 | |
| 3798 CSA_ASSERT(this, IsExternalStringInstanceType(instance_type)); | |
| 3799 GotoIf(IsShortExternalStringInstanceType(instance_type), if_bailout); | |
| 3800 | |
| 3801 // Move the pointer so that offset-wise, it looks like a sequential string. | |
| 3802 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); | |
| 3803 | |
| 3804 Node* resource_data = LoadObjectField( | |
| 3805 string, ExternalString::kResourceDataOffset, MachineType::Pointer()); | |
| 3806 Node* const fake_sequential_string = | |
| 3807 IntPtrSub(resource_data, | |
| 3808 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | |
| 3809 | |
| 3810 return fake_sequential_string; | |
| 3811 } | |
| 3812 | |
| 3813 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string, | 3917 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string, |
| 3814 Node* instance_type, | 3918 Node* instance_type, |
| 3815 Variable* var_did_something) { | 3919 Variable* var_did_something) { |
| 3816 Label deref(this), done(this, var_did_something); | 3920 Label deref(this), done(this, var_did_something); |
| 3817 Node* representation = | 3921 Node* representation = |
| 3818 Word32And(instance_type, Int32Constant(kStringRepresentationMask)); | 3922 Word32And(instance_type, Int32Constant(kStringRepresentationMask)); |
| 3819 GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), &deref); | 3923 GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), &deref); |
| 3820 GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), &done); | 3924 GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), &done); |
| 3821 // Cons string. | 3925 // Cons string. |
| 3822 Node* rhs = LoadObjectField(var_string->value(), ConsString::kSecondOffset); | 3926 Node* rhs = LoadObjectField(var_string->value(), ConsString::kSecondOffset); |
| (...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4008 Node* value = AllocateSeqTwoByteString(2); | 4112 Node* value = AllocateSeqTwoByteString(2); |
| 4009 StoreNoWriteBarrier( | 4113 StoreNoWriteBarrier( |
| 4010 MachineRepresentation::kWord32, value, | 4114 MachineRepresentation::kWord32, value, |
| 4011 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), | 4115 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), |
| 4012 codepoint); | 4116 codepoint); |
| 4013 var_result.Bind(value); | 4117 var_result.Bind(value); |
| 4014 Goto(&return_result); | 4118 Goto(&return_result); |
| 4015 } | 4119 } |
| 4016 | 4120 |
| 4017 BIND(&return_result); | 4121 BIND(&return_result); |
| 4122 CSA_ASSERT(this, IsString(var_result.value())); | |
| 4018 return var_result.value(); | 4123 return var_result.value(); |
| 4019 } | 4124 } |
| 4020 | 4125 |
| 4021 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) { | 4126 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) { |
| 4127 CSA_SLOW_ASSERT(this, IsString(input)); | |
| 4022 Label runtime(this, Label::kDeferred); | 4128 Label runtime(this, Label::kDeferred); |
| 4023 Label end(this); | 4129 Label end(this); |
| 4024 | 4130 |
| 4025 VARIABLE(var_result, MachineRepresentation::kTagged); | 4131 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 4026 | 4132 |
| 4027 // Check if string has a cached array index. | 4133 // Check if string has a cached array index. |
| 4028 Node* hash = LoadNameHashField(input); | 4134 Node* hash = LoadNameHashField(input); |
| 4029 Node* bit = | 4135 Node* bit = |
| 4030 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask)); | 4136 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask)); |
| 4031 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime); | 4137 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime); |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4111 GotoIf(WordNotEqual(smi_key, argument), &runtime); | 4217 GotoIf(WordNotEqual(smi_key, argument), &runtime); |
| 4112 | 4218 |
| 4113 // Smi match, return value from cache entry. | 4219 // Smi match, return value from cache entry. |
| 4114 IncrementCounter(isolate()->counters()->number_to_string_native(), 1); | 4220 IncrementCounter(isolate()->counters()->number_to_string_native(), 1); |
| 4115 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index, | 4221 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index, |
| 4116 kPointerSize, SMI_PARAMETERS)); | 4222 kPointerSize, SMI_PARAMETERS)); |
| 4117 Goto(&done); | 4223 Goto(&done); |
| 4118 } | 4224 } |
| 4119 | 4225 |
| 4120 BIND(&done); | 4226 BIND(&done); |
| 4227 CSA_ASSERT(this, IsString(result.value())); | |
| 4121 return result.value(); | 4228 return result.value(); |
| 4122 } | 4229 } |
| 4123 | 4230 |
| 4124 Node* CodeStubAssembler::ToName(Node* context, Node* value) { | 4231 Node* CodeStubAssembler::ToName(Node* context, Node* value) { |
| 4125 Label end(this); | 4232 Label end(this); |
| 4126 VARIABLE(var_result, MachineRepresentation::kTagged); | 4233 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 4127 | 4234 |
| 4128 Label is_number(this); | 4235 Label is_number(this); |
| 4129 GotoIf(TaggedIsSmi(value), &is_number); | 4236 GotoIf(TaggedIsSmi(value), &is_number); |
| 4130 | 4237 |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 4157 Goto(&end); | 4264 Goto(&end); |
| 4158 | 4265 |
| 4159 BIND(¬_oddball); | 4266 BIND(¬_oddball); |
| 4160 { | 4267 { |
| 4161 var_result.Bind(CallRuntime(Runtime::kToName, context, value)); | 4268 var_result.Bind(CallRuntime(Runtime::kToName, context, value)); |
| 4162 Goto(&end); | 4269 Goto(&end); |
| 4163 } | 4270 } |
| 4164 } | 4271 } |
| 4165 | 4272 |
| 4166 BIND(&end); | 4273 BIND(&end); |
| 4274 CSA_ASSERT(this, IsName(var_result.value())); | |
| 4167 return var_result.value(); | 4275 return var_result.value(); |
| 4168 } | 4276 } |
| 4169 | 4277 |
| 4170 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) { | 4278 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) { |
| 4171 // Assert input is a HeapObject (not smi or heap number) | 4279 // Assert input is a HeapObject (not smi or heap number) |
| 4172 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input))); | 4280 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input))); |
| 4173 CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input)))); | 4281 CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input)))); |
| 4174 | 4282 |
| 4175 // We might need to loop once here due to ToPrimitive conversions. | 4283 // We might need to loop once here due to ToPrimitive conversions. |
| 4176 VARIABLE(var_input, MachineRepresentation::kTagged, input); | 4284 VARIABLE(var_input, MachineRepresentation::kTagged, input); |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4245 // Note: We cannot tail call to the runtime here, as js-to-wasm | 4353 // Note: We cannot tail call to the runtime here, as js-to-wasm |
| 4246 // trampolines also use this code currently, and they declare all | 4354 // trampolines also use this code currently, and they declare all |
| 4247 // outgoing parameters as untagged, while we would push a tagged | 4355 // outgoing parameters as untagged, while we would push a tagged |
| 4248 // object here. | 4356 // object here. |
| 4249 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input)); | 4357 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input)); |
| 4250 Goto(&end); | 4358 Goto(&end); |
| 4251 } | 4359 } |
| 4252 } | 4360 } |
| 4253 | 4361 |
| 4254 BIND(&end); | 4362 BIND(&end); |
| 4363 CSA_ASSERT(this, IsNumber(var_result.value())); | |
| 4255 return var_result.value(); | 4364 return var_result.value(); |
| 4256 } | 4365 } |
| 4257 | 4366 |
| 4258 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) { | 4367 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) { |
| 4259 VARIABLE(var_result, MachineRepresentation::kTagged); | 4368 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 4260 Label end(this); | 4369 Label end(this); |
| 4261 | 4370 |
| 4262 Label not_smi(this, Label::kDeferred); | 4371 Label not_smi(this, Label::kDeferred); |
| 4263 GotoIfNot(TaggedIsSmi(input), ¬_smi); | 4372 GotoIfNot(TaggedIsSmi(input), ¬_smi); |
| 4264 var_result.Bind(input); | 4373 var_result.Bind(input); |
| 4265 Goto(&end); | 4374 Goto(&end); |
| 4266 | 4375 |
| 4267 BIND(¬_smi); | 4376 BIND(¬_smi); |
| 4268 { | 4377 { |
| 4269 Label not_heap_number(this, Label::kDeferred); | 4378 Label not_heap_number(this, Label::kDeferred); |
| 4270 Node* input_map = LoadMap(input); | 4379 Node* input_map = LoadMap(input); |
| 4271 GotoIfNot(IsHeapNumberMap(input_map), ¬_heap_number); | 4380 GotoIfNot(IsHeapNumberMap(input_map), ¬_heap_number); |
| 4272 | 4381 |
| 4273 var_result.Bind(input); | 4382 var_result.Bind(input); |
| 4274 Goto(&end); | 4383 Goto(&end); |
| 4275 | 4384 |
| 4276 BIND(¬_heap_number); | 4385 BIND(¬_heap_number); |
| 4277 { | 4386 { |
| 4278 var_result.Bind(NonNumberToNumber(context, input)); | 4387 var_result.Bind(NonNumberToNumber(context, input)); |
| 4279 Goto(&end); | 4388 Goto(&end); |
| 4280 } | 4389 } |
| 4281 } | 4390 } |
| 4282 | 4391 |
| 4283 BIND(&end); | 4392 BIND(&end); |
| 4393 CSA_ASSERT(this, IsNumber(var_result.value())); | |
| 4284 return var_result.value(); | 4394 return var_result.value(); |
| 4285 } | 4395 } |
| 4286 | 4396 |
| 4287 // ES#sec-touint32 | 4397 // ES#sec-touint32 |
| 4288 Node* CodeStubAssembler::ToUint32(Node* context, Node* input) { | 4398 Node* CodeStubAssembler::ToUint32(Node* context, Node* input) { |
| 4289 Node* const float_zero = Float64Constant(0.0); | 4399 Node* const float_zero = Float64Constant(0.0); |
| 4290 Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32)); | 4400 Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32)); |
| 4291 | 4401 |
| 4292 Label out(this); | 4402 Label out(this); |
| 4293 | 4403 |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4376 } | 4486 } |
| 4377 | 4487 |
| 4378 BIND(&return_zero); | 4488 BIND(&return_zero); |
| 4379 { | 4489 { |
| 4380 var_result.Bind(SmiConstant(Smi::kZero)); | 4490 var_result.Bind(SmiConstant(Smi::kZero)); |
| 4381 Goto(&out); | 4491 Goto(&out); |
| 4382 } | 4492 } |
| 4383 } | 4493 } |
| 4384 | 4494 |
| 4385 BIND(&out); | 4495 BIND(&out); |
| 4496 CSA_ASSERT(this, IsNumber(var_result.value())); | |
| 4386 return var_result.value(); | 4497 return var_result.value(); |
| 4387 } | 4498 } |
| 4388 | 4499 |
| 4389 Node* CodeStubAssembler::ToString(Node* context, Node* input) { | 4500 Node* CodeStubAssembler::ToString(Node* context, Node* input) { |
| 4390 Label is_number(this); | 4501 Label is_number(this); |
| 4391 Label runtime(this, Label::kDeferred); | 4502 Label runtime(this, Label::kDeferred); |
| 4392 VARIABLE(result, MachineRepresentation::kTagged); | 4503 VARIABLE(result, MachineRepresentation::kTagged); |
| 4393 Label done(this, &result); | 4504 Label done(this, &result); |
| 4394 | 4505 |
| 4395 GotoIf(TaggedIsSmi(input), &is_number); | 4506 GotoIf(TaggedIsSmi(input), &is_number); |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 4415 Goto(&done); | 4526 Goto(&done); |
| 4416 } | 4527 } |
| 4417 | 4528 |
| 4418 BIND(&runtime); | 4529 BIND(&runtime); |
| 4419 { | 4530 { |
| 4420 result.Bind(CallRuntime(Runtime::kToString, context, input)); | 4531 result.Bind(CallRuntime(Runtime::kToString, context, input)); |
| 4421 Goto(&done); | 4532 Goto(&done); |
| 4422 } | 4533 } |
| 4423 | 4534 |
| 4424 BIND(&done); | 4535 BIND(&done); |
| 4536 CSA_ASSERT(this, IsString(result.value())); | |
| 4425 return result.value(); | 4537 return result.value(); |
| 4426 } | 4538 } |
| 4427 | 4539 |
| 4428 Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) { | 4540 Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) { |
| 4429 Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this); | 4541 Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this); |
| 4430 VARIABLE(result, MachineRepresentation::kTagged); | 4542 VARIABLE(result, MachineRepresentation::kTagged); |
| 4431 Label done(this, &result); | 4543 Label done(this, &result); |
| 4432 | 4544 |
| 4433 BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver); | 4545 BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver); |
| 4434 | 4546 |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4468 Goto(&negative_check); | 4580 Goto(&negative_check); |
| 4469 | 4581 |
| 4470 BIND(&negative_check); | 4582 BIND(&negative_check); |
| 4471 Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done); | 4583 Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done); |
| 4472 | 4584 |
| 4473 BIND(&return_zero); | 4585 BIND(&return_zero); |
| 4474 result.Bind(SmiConstant(0)); | 4586 result.Bind(SmiConstant(0)); |
| 4475 Goto(&done); | 4587 Goto(&done); |
| 4476 | 4588 |
| 4477 BIND(&done); | 4589 BIND(&done); |
| 4590 CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value())); | |
| 4478 return result.value(); | 4591 return result.value(); |
| 4479 } | 4592 } |
| 4480 | 4593 |
| 4481 Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context, | 4594 Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context, |
| 4482 Label* range_error) { | 4595 Label* range_error) { |
| 4483 VARIABLE(result, MachineRepresentation::kTagged, input); | 4596 VARIABLE(result, MachineRepresentation::kTagged, input); |
| 4484 Label to_integer(this), negative_check(this), return_zero(this), done(this); | 4597 Label to_integer(this), negative_check(this), return_zero(this), done(this); |
| 4485 Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer); | 4598 Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer); |
| 4486 | 4599 |
| 4487 BIND(&to_integer); | 4600 BIND(&to_integer); |
| 4488 result.Bind(ToInteger(context, result.value(), | 4601 result.Bind(ToInteger(context, result.value(), |
| 4489 CodeStubAssembler::kTruncateMinusZero)); | 4602 CodeStubAssembler::kTruncateMinusZero)); |
| 4490 GotoIfNot(TaggedIsSmi(result.value()), range_error); | 4603 GotoIfNot(TaggedIsSmi(result.value()), range_error); |
| 4491 CSA_ASSERT(this, TaggedIsSmi(result.value())); | 4604 CSA_ASSERT(this, TaggedIsSmi(result.value())); |
| 4492 Goto(&negative_check); | 4605 Goto(&negative_check); |
| 4493 | 4606 |
| 4494 BIND(&negative_check); | 4607 BIND(&negative_check); |
| 4495 Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done); | 4608 Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done); |
| 4496 | 4609 |
| 4497 BIND(&return_zero); | 4610 BIND(&return_zero); |
| 4498 result.Bind(SmiConstant(0)); | 4611 result.Bind(SmiConstant(0)); |
| 4499 Goto(&done); | 4612 Goto(&done); |
| 4500 | 4613 |
| 4501 BIND(&done); | 4614 BIND(&done); |
| 4615 CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value())); | |
| 4502 return result.value(); | 4616 return result.value(); |
| 4503 } | 4617 } |
| 4504 | 4618 |
| 4505 Node* CodeStubAssembler::ToInteger(Node* context, Node* input, | 4619 Node* CodeStubAssembler::ToInteger(Node* context, Node* input, |
| 4506 ToIntegerTruncationMode mode) { | 4620 ToIntegerTruncationMode mode) { |
| 4507 // We might need to loop once for ToNumber conversion. | 4621 // We might need to loop once for ToNumber conversion. |
| 4508 VARIABLE(var_arg, MachineRepresentation::kTagged, input); | 4622 VARIABLE(var_arg, MachineRepresentation::kTagged, input); |
| 4509 Label loop(this, &var_arg), out(this); | 4623 Label loop(this, &var_arg), out(this); |
| 4510 Goto(&loop); | 4624 Goto(&loop); |
| 4511 BIND(&loop); | 4625 BIND(&loop); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4552 var_arg.Bind(CallStub(callable, context, arg)); | 4666 var_arg.Bind(CallStub(callable, context, arg)); |
| 4553 Goto(&loop); | 4667 Goto(&loop); |
| 4554 } | 4668 } |
| 4555 | 4669 |
| 4556 BIND(&return_zero); | 4670 BIND(&return_zero); |
| 4557 var_arg.Bind(SmiConstant(Smi::kZero)); | 4671 var_arg.Bind(SmiConstant(Smi::kZero)); |
| 4558 Goto(&out); | 4672 Goto(&out); |
| 4559 } | 4673 } |
| 4560 | 4674 |
| 4561 BIND(&out); | 4675 BIND(&out); |
| 4676 CSA_SLOW_ASSERT(this, IsNumber(var_arg.value())); | |
| 4562 return var_arg.value(); | 4677 return var_arg.value(); |
| 4563 } | 4678 } |
| 4564 | 4679 |
| 4565 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift, | 4680 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift, |
| 4566 uint32_t mask) { | 4681 uint32_t mask) { |
| 4567 return Word32Shr(Word32And(word32, Int32Constant(mask)), | 4682 return Word32Shr(Word32And(word32, Int32Constant(mask)), |
| 4568 static_cast<int>(shift)); | 4683 static_cast<int>(shift)); |
| 4569 } | 4684 } |
| 4570 | 4685 |
| 4571 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) { | 4686 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) { |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4668 BIND(&if_hascachedindex); | 4783 BIND(&if_hascachedindex); |
| 4669 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash)); | 4784 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash)); |
| 4670 Goto(if_keyisindex); | 4785 Goto(if_keyisindex); |
| 4671 } | 4786 } |
| 4672 | 4787 |
| 4673 void CodeStubAssembler::TryInternalizeString( | 4788 void CodeStubAssembler::TryInternalizeString( |
| 4674 Node* string, Label* if_index, Variable* var_index, Label* if_internalized, | 4789 Node* string, Label* if_index, Variable* var_index, Label* if_internalized, |
| 4675 Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) { | 4790 Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) { |
| 4676 DCHECK(var_index->rep() == MachineType::PointerRepresentation()); | 4791 DCHECK(var_index->rep() == MachineType::PointerRepresentation()); |
| 4677 DCHECK(var_internalized->rep() == MachineRepresentation::kTagged); | 4792 DCHECK(var_internalized->rep() == MachineRepresentation::kTagged); |
| 4793 CSA_SLOW_ASSERT(this, IsString(string)); | |
| 4678 Node* function = ExternalConstant( | 4794 Node* function = ExternalConstant( |
| 4679 ExternalReference::try_internalize_string_function(isolate())); | 4795 ExternalReference::try_internalize_string_function(isolate())); |
| 4680 Node* result = CallCFunction1(MachineType::AnyTagged(), | 4796 Node* result = CallCFunction1(MachineType::AnyTagged(), |
| 4681 MachineType::AnyTagged(), function, string); | 4797 MachineType::AnyTagged(), function, string); |
| 4682 Label internalized(this); | 4798 Label internalized(this); |
| 4683 GotoIf(TaggedIsNotSmi(result), &internalized); | 4799 GotoIf(TaggedIsNotSmi(result), &internalized); |
| 4684 Node* word_result = SmiUntag(result); | 4800 Node* word_result = SmiUntag(result); |
| 4685 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)), | 4801 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)), |
| 4686 if_not_internalized); | 4802 if_not_internalized); |
| 4687 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)), | 4803 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)), |
| (...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4915 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value, | 5031 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value, |
| 4916 Node* index, Node* enum_index) { | 5032 Node* index, Node* enum_index) { |
| 4917 UNREACHABLE(); // Use specializations instead. | 5033 UNREACHABLE(); // Use specializations instead. |
| 4918 } | 5034 } |
| 4919 | 5035 |
| 4920 template <> | 5036 template <> |
| 4921 void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary, | 5037 void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary, |
| 4922 Node* name, Node* value, | 5038 Node* name, Node* value, |
| 4923 Node* index, | 5039 Node* index, |
| 4924 Node* enum_index) { | 5040 Node* enum_index) { |
| 5041 CSA_SLOW_ASSERT(this, IsDictionary(dictionary)); | |
| 5042 | |
| 4925 // Store name and value. | 5043 // Store name and value. |
| 4926 StoreFixedArrayElement(dictionary, index, name); | 5044 StoreFixedArrayElement(dictionary, index, name); |
| 4927 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value); | 5045 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value); |
| 4928 | 5046 |
| 4929 // Prepare details of the new property. | 5047 // Prepare details of the new property. |
| 4930 const int kInitialIndex = 0; | 5048 const int kInitialIndex = 0; |
| 4931 PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell); | 5049 PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell); |
| 4932 enum_index = | 5050 enum_index = |
| 4933 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift); | 5051 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift); |
| 4934 STATIC_ASSERT(kInitialIndex == 0); | 5052 STATIC_ASSERT(kInitialIndex == 0); |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 4956 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary, | 5074 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary, |
| 4957 Node* key, Node* value, | 5075 Node* key, Node* value, |
| 4958 Node* index, | 5076 Node* index, |
| 4959 Node* enum_index) { | 5077 Node* enum_index) { |
| 4960 UNIMPLEMENTED(); | 5078 UNIMPLEMENTED(); |
| 4961 } | 5079 } |
| 4962 | 5080 |
| 4963 template <class Dictionary> | 5081 template <class Dictionary> |
| 4964 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value, | 5082 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value, |
| 4965 Label* bailout) { | 5083 Label* bailout) { |
| 5084 CSA_SLOW_ASSERT(this, IsDictionary(dictionary)); | |
| 4966 Node* capacity = GetCapacity<Dictionary>(dictionary); | 5085 Node* capacity = GetCapacity<Dictionary>(dictionary); |
| 4967 Node* nof = GetNumberOfElements<Dictionary>(dictionary); | 5086 Node* nof = GetNumberOfElements<Dictionary>(dictionary); |
| 4968 Node* new_nof = SmiAdd(nof, SmiConstant(1)); | 5087 Node* new_nof = SmiAdd(nof, SmiConstant(1)); |
| 4969 // Require 33% to still be free after adding additional_elements. | 5088 // Require 33% to still be free after adding additional_elements. |
| 4970 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi! | 5089 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi! |
| 4971 // But that's OK here because it's only used for a comparison. | 5090 // But that's OK here because it's only used for a comparison. |
| 4972 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1)); | 5091 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1)); |
| 4973 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout); | 5092 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout); |
| 4974 // Require rehashing if more than 50% of free elements are deleted elements. | 5093 // Require rehashing if more than 50% of free elements are deleted elements. |
| 4975 Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary); | 5094 Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary); |
| (...skipping 1618 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6594 // Store the WeakCell in the feedback vector. | 6713 // Store the WeakCell in the feedback vector. |
| 6595 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0, | 6714 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0, |
| 6596 CodeStubAssembler::SMI_PARAMETERS); | 6715 CodeStubAssembler::SMI_PARAMETERS); |
| 6597 return cell; | 6716 return cell; |
| 6598 } | 6717 } |
| 6599 | 6718 |
| 6600 Node* CodeStubAssembler::BuildFastLoop( | 6719 Node* CodeStubAssembler::BuildFastLoop( |
| 6601 const CodeStubAssembler::VariableList& vars, Node* start_index, | 6720 const CodeStubAssembler::VariableList& vars, Node* start_index, |
| 6602 Node* end_index, const FastLoopBody& body, int increment, | 6721 Node* end_index, const FastLoopBody& body, int increment, |
| 6603 ParameterMode parameter_mode, IndexAdvanceMode advance_mode) { | 6722 ParameterMode parameter_mode, IndexAdvanceMode advance_mode) { |
| 6723 CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode)); | |
| 6724 CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode)); | |
| 6604 MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS) | 6725 MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS) |
| 6605 ? MachineType::PointerRepresentation() | 6726 ? MachineType::PointerRepresentation() |
| 6606 : MachineRepresentation::kTaggedSigned; | 6727 : MachineRepresentation::kTaggedSigned; |
| 6607 VARIABLE(var, index_rep, start_index); | 6728 VARIABLE(var, index_rep, start_index); |
| 6608 VariableList vars_copy(vars, zone()); | 6729 VariableList vars_copy(vars, zone()); |
| 6609 vars_copy.Add(&var, zone()); | 6730 vars_copy.Add(&var, zone()); |
| 6610 Label loop(this, vars_copy); | 6731 Label loop(this, vars_copy); |
| 6611 Label after_loop(this); | 6732 Label after_loop(this); |
| 6612 // Introduce an explicit second check of the termination condition before the | 6733 // Introduce an explicit second check of the termination condition before the |
| 6613 // loop that helps turbofan generate better code. If there's only a single | 6734 // loop that helps turbofan generate better code. If there's only a single |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 6631 BIND(&after_loop); | 6752 BIND(&after_loop); |
| 6632 return var.value(); | 6753 return var.value(); |
| 6633 } | 6754 } |
| 6634 | 6755 |
| 6635 void CodeStubAssembler::BuildFastFixedArrayForEach( | 6756 void CodeStubAssembler::BuildFastFixedArrayForEach( |
| 6636 const CodeStubAssembler::VariableList& vars, Node* fixed_array, | 6757 const CodeStubAssembler::VariableList& vars, Node* fixed_array, |
| 6637 ElementsKind kind, Node* first_element_inclusive, | 6758 ElementsKind kind, Node* first_element_inclusive, |
| 6638 Node* last_element_exclusive, const FastFixedArrayForEachBody& body, | 6759 Node* last_element_exclusive, const FastFixedArrayForEachBody& body, |
| 6639 ParameterMode mode, ForEachDirection direction) { | 6760 ParameterMode mode, ForEachDirection direction) { |
| 6640 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); | 6761 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); |
| 6762 CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode)); | |
| 6763 CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode)); | |
| 6764 CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(fixed_array, kind)); | |
| 6641 int32_t first_val; | 6765 int32_t first_val; |
| 6642 bool constant_first = ToInt32Constant(first_element_inclusive, first_val); | 6766 bool constant_first = ToInt32Constant(first_element_inclusive, first_val); |
| 6643 int32_t last_val; | 6767 int32_t last_val; |
| 6644 bool constent_last = ToInt32Constant(last_element_exclusive, last_val); | 6768 bool constent_last = ToInt32Constant(last_element_exclusive, last_val); |
| 6645 if (constant_first && constent_last) { | 6769 if (constant_first && constent_last) { |
| 6646 int delta = last_val - first_val; | 6770 int delta = last_val - first_val; |
| 6647 DCHECK(delta >= 0); | 6771 DCHECK(delta >= 0); |
| 6648 if (delta <= kElementLoopUnrollThreshold) { | 6772 if (delta <= kElementLoopUnrollThreshold) { |
| 6649 if (direction == ForEachDirection::kForward) { | 6773 if (direction == ForEachDirection::kForward) { |
| 6650 for (int i = first_val; i < last_val; ++i) { | 6774 for (int i = first_val; i < last_val; ++i) { |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6691 (kMaxRegularHeapObjectSize - base_size) / kPointerSize; | 6815 (kMaxRegularHeapObjectSize - base_size) / kPointerSize; |
| 6692 GotoIf(IntPtrOrSmiGreaterThan( | 6816 GotoIf(IntPtrOrSmiGreaterThan( |
| 6693 element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode), | 6817 element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode), |
| 6694 mode), | 6818 mode), |
| 6695 doesnt_fit); | 6819 doesnt_fit); |
| 6696 } | 6820 } |
| 6697 | 6821 |
| 6698 void CodeStubAssembler::InitializeFieldsWithRoot( | 6822 void CodeStubAssembler::InitializeFieldsWithRoot( |
| 6699 Node* object, Node* start_offset, Node* end_offset, | 6823 Node* object, Node* start_offset, Node* end_offset, |
| 6700 Heap::RootListIndex root_index) { | 6824 Heap::RootListIndex root_index) { |
| 6825 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
| 6701 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag)); | 6826 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag)); |
| 6702 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag)); | 6827 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag)); |
| 6703 Node* root_value = LoadRoot(root_index); | 6828 Node* root_value = LoadRoot(root_index); |
| 6704 BuildFastLoop(end_offset, start_offset, | 6829 BuildFastLoop(end_offset, start_offset, |
| 6705 [this, object, root_value](Node* current) { | 6830 [this, object, root_value](Node* current) { |
| 6706 StoreNoWriteBarrier(MachineRepresentation::kTagged, object, | 6831 StoreNoWriteBarrier(MachineRepresentation::kTagged, object, |
| 6707 current, root_value); | 6832 current, root_value); |
| 6708 }, | 6833 }, |
| 6709 -kPointerSize, INTPTR_PARAMETERS, | 6834 -kPointerSize, INTPTR_PARAMETERS, |
| 6710 CodeStubAssembler::IndexAdvanceMode::kPre); | 6835 CodeStubAssembler::IndexAdvanceMode::kPre); |
| 6711 } | 6836 } |
| 6712 | 6837 |
| 6713 void CodeStubAssembler::BranchIfNumericRelationalComparison( | 6838 void CodeStubAssembler::BranchIfNumericRelationalComparison( |
| 6714 RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true, | 6839 RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true, |
| 6715 Label* if_false) { | 6840 Label* if_false) { |
| 6841 CSA_SLOW_ASSERT(this, IsNumber(lhs)); | |
| 6842 CSA_SLOW_ASSERT(this, IsNumber(rhs)); | |
| 6843 | |
| 6716 Label end(this); | 6844 Label end(this); |
| 6717 VARIABLE(result, MachineRepresentation::kTagged); | 6845 VARIABLE(result, MachineRepresentation::kTagged); |
| 6718 | 6846 |
| 6719 // Shared entry for floating point comparison. | 6847 // Shared entry for floating point comparison. |
| 6720 Label do_fcmp(this); | 6848 Label do_fcmp(this); |
| 6721 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); | 6849 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); |
| 6722 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); | 6850 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); |
| 6723 | 6851 |
| 6724 // Check if the {lhs} is a Smi or a HeapObject. | 6852 // Check if the {lhs} is a Smi or a HeapObject. |
| 6725 Label if_lhsissmi(this), if_lhsisnotsmi(this); | 6853 Label if_lhsissmi(this), if_lhsisnotsmi(this); |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6817 void CodeStubAssembler::GotoUnlessNumberLessThan(Node* lhs, Node* rhs, | 6945 void CodeStubAssembler::GotoUnlessNumberLessThan(Node* lhs, Node* rhs, |
| 6818 Label* if_false) { | 6946 Label* if_false) { |
| 6819 Label if_true(this); | 6947 Label if_true(this); |
| 6820 BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false); | 6948 BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false); |
| 6821 BIND(&if_true); | 6949 BIND(&if_true); |
| 6822 } | 6950 } |
| 6823 | 6951 |
| 6824 Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode, | 6952 Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode, |
| 6825 Node* lhs, Node* rhs, | 6953 Node* lhs, Node* rhs, |
| 6826 Node* context) { | 6954 Node* context) { |
| 6955 CSA_SLOW_ASSERT(this, IsNumber(lhs)); | |
| 6956 CSA_SLOW_ASSERT(this, IsNumber(rhs)); | |
| 6957 | |
| 6827 Label return_true(this), return_false(this), end(this); | 6958 Label return_true(this), return_false(this), end(this); |
| 6828 VARIABLE(result, MachineRepresentation::kTagged); | 6959 VARIABLE(result, MachineRepresentation::kTagged); |
| 6829 | 6960 |
| 6830 // Shared entry for floating point comparison. | 6961 // Shared entry for floating point comparison. |
| 6831 Label do_fcmp(this); | 6962 Label do_fcmp(this); |
| 6832 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); | 6963 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); |
| 6833 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); | 6964 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); |
| 6834 | 6965 |
| 6835 // We might need to loop several times due to ToPrimitive and/or ToNumber | 6966 // We might need to loop several times due to ToPrimitive and/or ToNumber |
| 6836 // conversions. | 6967 // conversions. |
| (...skipping 1481 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 8318 | 8449 |
| 8319 BIND(&return_false); | 8450 BIND(&return_false); |
| 8320 var_result.Bind(FalseConstant()); | 8451 var_result.Bind(FalseConstant()); |
| 8321 Goto(&return_result); | 8452 Goto(&return_result); |
| 8322 | 8453 |
| 8323 BIND(&return_result); | 8454 BIND(&return_result); |
| 8324 return var_result.value(); | 8455 return var_result.value(); |
| 8325 } | 8456 } |
| 8326 | 8457 |
| 8327 Node* CodeStubAssembler::NumberInc(Node* value) { | 8458 Node* CodeStubAssembler::NumberInc(Node* value) { |
| 8459 CSA_SLOW_ASSERT(this, IsNumber(value)); | |
| 8460 | |
| 8328 VARIABLE(var_result, MachineRepresentation::kTagged); | 8461 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 8329 VARIABLE(var_finc_value, MachineRepresentation::kFloat64); | 8462 VARIABLE(var_finc_value, MachineRepresentation::kFloat64); |
| 8330 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); | 8463 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); |
| 8331 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); | 8464 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); |
| 8332 | 8465 |
| 8333 BIND(&if_issmi); | 8466 BIND(&if_issmi); |
| 8334 { | 8467 { |
| 8335 // Try fast Smi addition first. | 8468 // Try fast Smi addition first. |
| 8336 Node* one = SmiConstant(Smi::FromInt(1)); | 8469 Node* one = SmiConstant(Smi::FromInt(1)); |
| 8337 Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value), | 8470 Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value), |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 8370 Node* finc_result = Float64Add(finc_value, one); | 8503 Node* finc_result = Float64Add(finc_value, one); |
| 8371 var_result.Bind(AllocateHeapNumberWithValue(finc_result)); | 8504 var_result.Bind(AllocateHeapNumberWithValue(finc_result)); |
| 8372 Goto(&end); | 8505 Goto(&end); |
| 8373 } | 8506 } |
| 8374 | 8507 |
| 8375 BIND(&end); | 8508 BIND(&end); |
| 8376 return var_result.value(); | 8509 return var_result.value(); |
| 8377 } | 8510 } |
| 8378 | 8511 |
| 8379 Node* CodeStubAssembler::NumberDec(Node* value) { | 8512 Node* CodeStubAssembler::NumberDec(Node* value) { |
| 8513 CSA_SLOW_ASSERT(this, IsNumber(value)); | |
| 8514 | |
| 8380 VARIABLE(var_result, MachineRepresentation::kTagged); | 8515 VARIABLE(var_result, MachineRepresentation::kTagged); |
| 8381 VARIABLE(var_fdec_value, MachineRepresentation::kFloat64); | 8516 VARIABLE(var_fdec_value, MachineRepresentation::kFloat64); |
| 8382 Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this); | 8517 Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this); |
| 8383 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); | 8518 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); |
| 8384 | 8519 |
| 8385 BIND(&if_issmi); | 8520 BIND(&if_issmi); |
| 8386 { | 8521 { |
| 8387 // Try fast Smi addition first. | 8522 // Try fast Smi addition first. |
| 8388 Node* one = SmiConstant(Smi::FromInt(1)); | 8523 Node* one = SmiConstant(Smi::FromInt(1)); |
| 8389 Node* pair = IntPtrSubWithOverflow(BitcastTaggedToWord(value), | 8524 Node* pair = IntPtrSubWithOverflow(BitcastTaggedToWord(value), |
| (...skipping 406 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 8796 Load(MachineType::Uint8(), | 8931 Load(MachineType::Uint8(), |
| 8797 ExternalConstant( | 8932 ExternalConstant( |
| 8798 ExternalReference::promise_hook_or_debug_is_active_address( | 8933 ExternalReference::promise_hook_or_debug_is_active_address( |
| 8799 isolate()))); | 8934 isolate()))); |
| 8800 return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0)); | 8935 return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0)); |
| 8801 } | 8936 } |
| 8802 | 8937 |
| 8803 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map, | 8938 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map, |
| 8804 Node* shared_info, | 8939 Node* shared_info, |
| 8805 Node* context) { | 8940 Node* context) { |
| 8941 CSA_SLOW_ASSERT(this, IsMap(map)); | |
| 8942 | |
| 8806 Node* const code = BitcastTaggedToWord( | 8943 Node* const code = BitcastTaggedToWord( |
| 8807 LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset)); | 8944 LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset)); |
| 8808 Node* const code_entry = | 8945 Node* const code_entry = |
| 8809 IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); | 8946 IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); |
| 8810 | 8947 |
| 8811 Node* const fun = Allocate(JSFunction::kSize); | 8948 Node* const fun = Allocate(JSFunction::kSize); |
| 8812 StoreMapNoWriteBarrier(fun, map); | 8949 StoreMapNoWriteBarrier(fun, map); |
| 8813 StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset, | 8950 StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset, |
| 8814 Heap::kEmptyFixedArrayRootIndex); | 8951 Heap::kEmptyFixedArrayRootIndex); |
| 8815 StoreObjectFieldRoot(fun, JSObject::kElementsOffset, | 8952 StoreObjectFieldRoot(fun, JSObject::kElementsOffset, |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 8882 formatted.c_str(), TENURED); | 9019 formatted.c_str(), TENURED); |
| 8883 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), | 9020 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), |
| 8884 HeapConstant(string)); | 9021 HeapConstant(string)); |
| 8885 } | 9022 } |
| 8886 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value); | 9023 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value); |
| 8887 #endif | 9024 #endif |
| 8888 } | 9025 } |
| 8889 | 9026 |
| 8890 } // namespace internal | 9027 } // namespace internal |
| 8891 } // namespace v8 | 9028 } // namespace v8 |
| OLD | NEW |