OLD | NEW |
---|---|
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 #include "src/code-stub-assembler.h" | 4 #include "src/code-stub-assembler.h" |
5 #include "src/code-factory.h" | 5 #include "src/code-factory.h" |
6 #include "src/frames-inl.h" | 6 #include "src/frames-inl.h" |
7 #include "src/frames.h" | 7 #include "src/frames.h" |
8 | 8 |
9 namespace v8 { | 9 namespace v8 { |
10 namespace internal { | 10 namespace internal { |
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
187 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) { | 187 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) { |
188 Comment("IntPtrRoundUpToPowerOfTwo32"); | 188 Comment("IntPtrRoundUpToPowerOfTwo32"); |
189 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u))); | 189 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u))); |
190 value = IntPtrSub(value, IntPtrConstant(1)); | 190 value = IntPtrSub(value, IntPtrConstant(1)); |
191 for (int i = 1; i <= 16; i *= 2) { | 191 for (int i = 1; i <= 16; i *= 2) { |
192 value = WordOr(value, WordShr(value, IntPtrConstant(i))); | 192 value = WordOr(value, WordShr(value, IntPtrConstant(i))); |
193 } | 193 } |
194 return IntPtrAdd(value, IntPtrConstant(1)); | 194 return IntPtrAdd(value, IntPtrConstant(1)); |
195 } | 195 } |
196 | 196 |
197 Node* CodeStubAssembler::IsParameterMode(Node* value, ParameterMode mode) { | |
198 return (mode == SMI_PARAMETERS) ? TaggedIsSmi(value) : Int32Constant(1); | |
199 } | |
200 | |
197 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) { | 201 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) { |
198 // value && !(value & (value - 1)) | 202 // value && !(value & (value - 1)) |
199 return WordEqual( | 203 return WordEqual( |
200 Select( | 204 Select( |
201 WordEqual(value, IntPtrConstant(0)), | 205 WordEqual(value, IntPtrConstant(0)), |
202 [=] { return IntPtrConstant(1); }, | 206 [=] { return IntPtrConstant(1); }, |
203 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }, | 207 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }, |
204 MachineType::PointerRepresentation()), | 208 MachineType::PointerRepresentation()), |
205 IntPtrConstant(0)); | 209 IntPtrConstant(0)); |
206 } | 210 } |
(...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
430 | 434 |
431 Node* CodeStubAssembler::SmiTag(Node* value) { | 435 Node* CodeStubAssembler::SmiTag(Node* value) { |
432 int32_t constant_value; | 436 int32_t constant_value; |
433 if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) { | 437 if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) { |
434 return SmiConstant(Smi::FromInt(constant_value)); | 438 return SmiConstant(Smi::FromInt(constant_value)); |
435 } | 439 } |
436 return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant())); | 440 return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant())); |
437 } | 441 } |
438 | 442 |
439 Node* CodeStubAssembler::SmiUntag(Node* value) { | 443 Node* CodeStubAssembler::SmiUntag(Node* value) { |
444 CSA_SLOW_ASSERT(this, TaggedIsSmi(value)); | |
440 return WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()); | 445 return WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()); |
441 } | 446 } |
442 | 447 |
443 Node* CodeStubAssembler::SmiToWord32(Node* value) { | 448 Node* CodeStubAssembler::SmiToWord32(Node* value) { |
449 CSA_SLOW_ASSERT(this, TaggedIsSmi(value)); | |
444 Node* result = SmiUntag(value); | 450 Node* result = SmiUntag(value); |
445 return TruncateWordToWord32(result); | 451 return TruncateWordToWord32(result); |
446 } | 452 } |
447 | 453 |
448 Node* CodeStubAssembler::SmiToFloat64(Node* value) { | 454 Node* CodeStubAssembler::SmiToFloat64(Node* value) { |
455 CSA_SLOW_ASSERT(this, TaggedIsSmi(value)); | |
449 return ChangeInt32ToFloat64(SmiToWord32(value)); | 456 return ChangeInt32ToFloat64(SmiToWord32(value)); |
450 } | 457 } |
451 | 458 |
452 Node* CodeStubAssembler::SmiMax(Node* a, Node* b) { | 459 Node* CodeStubAssembler::SmiMax(Node* a, Node* b) { |
460 CSA_SLOW_ASSERT(this, TaggedIsSmi(a)); | |
461 CSA_SLOW_ASSERT(this, TaggedIsSmi(b)); | |
453 return SelectTaggedConstant(SmiLessThan(a, b), b, a); | 462 return SelectTaggedConstant(SmiLessThan(a, b), b, a); |
454 } | 463 } |
455 | 464 |
456 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) { | 465 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) { |
466 CSA_SLOW_ASSERT(this, TaggedIsSmi(a)); | |
467 CSA_SLOW_ASSERT(this, TaggedIsSmi(b)); | |
457 return SelectTaggedConstant(SmiLessThan(a, b), a, b); | 468 return SelectTaggedConstant(SmiLessThan(a, b), a, b); |
458 } | 469 } |
459 | 470 |
460 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) { | 471 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) { |
472 CSA_SLOW_ASSERT(this, TaggedIsSmi(a)); | |
473 CSA_SLOW_ASSERT(this, TaggedIsSmi(b)); | |
461 VARIABLE(var_result, MachineRepresentation::kTagged); | 474 VARIABLE(var_result, MachineRepresentation::kTagged); |
462 Label return_result(this, &var_result), | 475 Label return_result(this, &var_result), |
463 return_minuszero(this, Label::kDeferred), | 476 return_minuszero(this, Label::kDeferred), |
464 return_nan(this, Label::kDeferred); | 477 return_nan(this, Label::kDeferred); |
465 | 478 |
466 // Untag {a} and {b}. | 479 // Untag {a} and {b}. |
467 a = SmiToWord32(a); | 480 a = SmiToWord32(a); |
468 b = SmiToWord32(b); | 481 b = SmiToWord32(b); |
469 | 482 |
470 // Return NaN if {b} is zero. | 483 // Return NaN if {b} is zero. |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
510 | 523 |
511 BIND(&return_minuszero); | 524 BIND(&return_minuszero); |
512 var_result.Bind(MinusZeroConstant()); | 525 var_result.Bind(MinusZeroConstant()); |
513 Goto(&return_result); | 526 Goto(&return_result); |
514 | 527 |
515 BIND(&return_nan); | 528 BIND(&return_nan); |
516 var_result.Bind(NanConstant()); | 529 var_result.Bind(NanConstant()); |
517 Goto(&return_result); | 530 Goto(&return_result); |
518 | 531 |
519 BIND(&return_result); | 532 BIND(&return_result); |
533 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
520 return var_result.value(); | 534 return var_result.value(); |
521 } | 535 } |
522 | 536 |
523 Node* CodeStubAssembler::SmiMul(Node* a, Node* b) { | 537 Node* CodeStubAssembler::SmiMul(Node* a, Node* b) { |
538 CSA_SLOW_ASSERT(this, TaggedIsSmi(a)); | |
539 CSA_SLOW_ASSERT(this, TaggedIsSmi(b)); | |
524 VARIABLE(var_result, MachineRepresentation::kTagged); | 540 VARIABLE(var_result, MachineRepresentation::kTagged); |
525 VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64); | 541 VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64); |
526 VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64); | 542 VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64); |
527 Label return_result(this, &var_result); | 543 Label return_result(this, &var_result); |
528 | 544 |
529 // Both {a} and {b} are Smis. Convert them to integers and multiply. | 545 // Both {a} and {b} are Smis. Convert them to integers and multiply. |
530 Node* lhs32 = SmiToWord32(a); | 546 Node* lhs32 = SmiToWord32(a); |
531 Node* rhs32 = SmiToWord32(b); | 547 Node* rhs32 = SmiToWord32(b); |
532 Node* pair = Int32MulWithOverflow(lhs32, rhs32); | 548 Node* pair = Int32MulWithOverflow(lhs32, rhs32); |
533 | 549 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
571 { | 587 { |
572 var_lhs_float64.Bind(SmiToFloat64(a)); | 588 var_lhs_float64.Bind(SmiToFloat64(a)); |
573 var_rhs_float64.Bind(SmiToFloat64(b)); | 589 var_rhs_float64.Bind(SmiToFloat64(b)); |
574 Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value()); | 590 Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value()); |
575 Node* result = AllocateHeapNumberWithValue(value); | 591 Node* result = AllocateHeapNumberWithValue(value); |
576 var_result.Bind(result); | 592 var_result.Bind(result); |
577 Goto(&return_result); | 593 Goto(&return_result); |
578 } | 594 } |
579 | 595 |
580 BIND(&return_result); | 596 BIND(&return_result); |
597 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
581 return var_result.value(); | 598 return var_result.value(); |
582 } | 599 } |
583 | 600 |
584 Node* CodeStubAssembler::TrySmiDiv(Node* dividend, Node* divisor, | 601 Node* CodeStubAssembler::TrySmiDiv(Node* dividend, Node* divisor, |
585 Label* bailout) { | 602 Label* bailout) { |
603 CSA_SLOW_ASSERT(this, TaggedIsSmi(dividend)); | |
604 CSA_SLOW_ASSERT(this, TaggedIsSmi(divisor)); | |
605 | |
586 // Both {a} and {b} are Smis. Bailout to floating point division if {divisor} | 606 // Both {a} and {b} are Smis. Bailout to floating point division if {divisor} |
587 // is zero. | 607 // is zero. |
588 GotoIf(WordEqual(divisor, SmiConstant(0)), bailout); | 608 GotoIf(WordEqual(divisor, SmiConstant(0)), bailout); |
589 | 609 |
590 // Do floating point division if {dividend} is zero and {divisor} is | 610 // Do floating point division if {dividend} is zero and {divisor} is |
591 // negative. | 611 // negative. |
592 Label dividend_is_zero(this), dividend_is_not_zero(this); | 612 Label dividend_is_zero(this), dividend_is_not_zero(this); |
593 Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero, | 613 Branch(WordEqual(dividend, SmiConstant(0)), ÷nd_is_zero, |
594 ÷nd_is_not_zero); | 614 ÷nd_is_not_zero); |
595 | 615 |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
653 } | 673 } |
654 | 674 |
655 Node* CodeStubAssembler::WordIsWordAligned(Node* word) { | 675 Node* CodeStubAssembler::WordIsWordAligned(Node* word) { |
656 return WordEqual(IntPtrConstant(0), | 676 return WordEqual(IntPtrConstant(0), |
657 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1))); | 677 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1))); |
658 } | 678 } |
659 | 679 |
660 void CodeStubAssembler::BranchIfPrototypesHaveNoElements( | 680 void CodeStubAssembler::BranchIfPrototypesHaveNoElements( |
661 Node* receiver_map, Label* definitely_no_elements, | 681 Node* receiver_map, Label* definitely_no_elements, |
662 Label* possibly_elements) { | 682 Label* possibly_elements) { |
683 CSA_SLOW_ASSERT(this, IsMap(receiver_map)); | |
663 VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map); | 684 VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map); |
664 Label loop_body(this, &var_map); | 685 Label loop_body(this, &var_map); |
665 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex); | 686 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex); |
666 Goto(&loop_body); | 687 Goto(&loop_body); |
667 | 688 |
668 BIND(&loop_body); | 689 BIND(&loop_body); |
669 { | 690 { |
670 Node* map = var_map.value(); | 691 Node* map = var_map.value(); |
671 Node* prototype = LoadMapPrototype(map); | 692 Node* prototype = LoadMapPrototype(map); |
672 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements); | 693 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements); |
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
970 return Load(rep, frame_pointer, IntPtrConstant(offset)); | 991 return Load(rep, frame_pointer, IntPtrConstant(offset)); |
971 } | 992 } |
972 | 993 |
973 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, | 994 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, |
974 MachineType rep) { | 995 MachineType rep) { |
975 return Load(rep, buffer, IntPtrConstant(offset)); | 996 return Load(rep, buffer, IntPtrConstant(offset)); |
976 } | 997 } |
977 | 998 |
978 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, | 999 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset, |
979 MachineType rep) { | 1000 MachineType rep) { |
1001 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
980 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); | 1002 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag)); |
981 } | 1003 } |
982 | 1004 |
983 Node* CodeStubAssembler::LoadObjectField(Node* object, Node* offset, | 1005 Node* CodeStubAssembler::LoadObjectField(Node* object, Node* offset, |
984 MachineType rep) { | 1006 MachineType rep) { |
1007 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
985 return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag))); | 1008 return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag))); |
986 } | 1009 } |
987 | 1010 |
988 Node* CodeStubAssembler::LoadAndUntagObjectField(Node* object, int offset) { | 1011 Node* CodeStubAssembler::LoadAndUntagObjectField(Node* object, int offset) { |
989 if (Is64()) { | 1012 if (Is64()) { |
990 #if V8_TARGET_LITTLE_ENDIAN | 1013 #if V8_TARGET_LITTLE_ENDIAN |
991 offset += kPointerSize / 2; | 1014 offset += kPointerSize / 2; |
992 #endif | 1015 #endif |
993 return ChangeInt32ToInt64( | 1016 return ChangeInt32ToInt64( |
994 LoadObjectField(object, offset, MachineType::Int32())); | 1017 LoadObjectField(object, offset, MachineType::Int32())); |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1051 return StoreNoWriteBarrier(MachineRepresentation::kWord32, base, | 1074 return StoreNoWriteBarrier(MachineRepresentation::kWord32, base, |
1052 IntPtrConstant(payload_offset), | 1075 IntPtrConstant(payload_offset), |
1053 TruncateInt64ToInt32(value)); | 1076 TruncateInt64ToInt32(value)); |
1054 } else { | 1077 } else { |
1055 return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base, | 1078 return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base, |
1056 IntPtrConstant(offset), SmiTag(value)); | 1079 IntPtrConstant(offset), SmiTag(value)); |
1057 } | 1080 } |
1058 } | 1081 } |
1059 | 1082 |
1060 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) { | 1083 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) { |
1084 CSA_SLOW_ASSERT(this, IsHeapNumber(object)); | |
1061 return LoadObjectField(object, HeapNumber::kValueOffset, | 1085 return LoadObjectField(object, HeapNumber::kValueOffset, |
1062 MachineType::Float64()); | 1086 MachineType::Float64()); |
1063 } | 1087 } |
1064 | 1088 |
1065 Node* CodeStubAssembler::LoadMap(Node* object) { | 1089 Node* CodeStubAssembler::LoadMap(Node* object) { |
1090 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
1066 return LoadObjectField(object, HeapObject::kMapOffset); | 1091 return LoadObjectField(object, HeapObject::kMapOffset); |
1067 } | 1092 } |
1068 | 1093 |
1069 Node* CodeStubAssembler::LoadInstanceType(Node* object) { | 1094 Node* CodeStubAssembler::LoadInstanceType(Node* object) { |
1070 return LoadMapInstanceType(LoadMap(object)); | 1095 return LoadMapInstanceType(LoadMap(object)); |
1071 } | 1096 } |
1072 | 1097 |
1073 Node* CodeStubAssembler::HasInstanceType(Node* object, | 1098 Node* CodeStubAssembler::HasInstanceType(Node* object, |
1074 InstanceType instance_type) { | 1099 InstanceType instance_type) { |
1075 return Word32Equal(LoadInstanceType(object), Int32Constant(instance_type)); | 1100 return Word32Equal(LoadInstanceType(object), Int32Constant(instance_type)); |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1110 CSA_SLOW_ASSERT(this, IsMap(map)); | 1135 CSA_SLOW_ASSERT(this, IsMap(map)); |
1111 return LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()); | 1136 return LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()); |
1112 } | 1137 } |
1113 | 1138 |
1114 Node* CodeStubAssembler::LoadMapBitField3(Node* map) { | 1139 Node* CodeStubAssembler::LoadMapBitField3(Node* map) { |
1115 CSA_SLOW_ASSERT(this, IsMap(map)); | 1140 CSA_SLOW_ASSERT(this, IsMap(map)); |
1116 return LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()); | 1141 return LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()); |
1117 } | 1142 } |
1118 | 1143 |
1119 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) { | 1144 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) { |
1145 CSA_SLOW_ASSERT(this, IsMap(map)); | |
1120 return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8()); | 1146 return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8()); |
1121 } | 1147 } |
1122 | 1148 |
1123 Node* CodeStubAssembler::LoadMapElementsKind(Node* map) { | 1149 Node* CodeStubAssembler::LoadMapElementsKind(Node* map) { |
1124 CSA_SLOW_ASSERT(this, IsMap(map)); | 1150 CSA_SLOW_ASSERT(this, IsMap(map)); |
1125 Node* bit_field2 = LoadMapBitField2(map); | 1151 Node* bit_field2 = LoadMapBitField2(map); |
1126 return DecodeWord32<Map::ElementsKindBits>(bit_field2); | 1152 return DecodeWord32<Map::ElementsKindBits>(bit_field2); |
1127 } | 1153 } |
1128 | 1154 |
1129 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) { | 1155 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) { |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1193 result.Bind( | 1219 result.Bind( |
1194 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset)); | 1220 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset)); |
1195 Goto(&loop); | 1221 Goto(&loop); |
1196 } | 1222 } |
1197 BIND(&done); | 1223 BIND(&done); |
1198 return result.value(); | 1224 return result.value(); |
1199 } | 1225 } |
1200 | 1226 |
1201 Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField( | 1227 Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField( |
1202 Node* shared, int offset, ParameterMode mode) { | 1228 Node* shared, int offset, ParameterMode mode) { |
1229 CSA_SLOW_ASSERT(this, HasInstanceType(shared, SHARED_FUNCTION_INFO_TYPE)); | |
1203 if (Is64()) { | 1230 if (Is64()) { |
1204 Node* result = LoadObjectField(shared, offset, MachineType::Int32()); | 1231 Node* result = LoadObjectField(shared, offset, MachineType::Int32()); |
1205 if (mode == SMI_PARAMETERS) { | 1232 if (mode == SMI_PARAMETERS) { |
1206 result = SmiTag(result); | 1233 result = SmiTag(result); |
1207 } else { | 1234 } else { |
1208 result = ChangeUint32ToWord(result); | 1235 result = ChangeUint32ToWord(result); |
1209 } | 1236 } |
1210 return result; | 1237 return result; |
1211 } else { | 1238 } else { |
1212 Node* result = LoadObjectField(shared, offset); | 1239 Node* result = LoadObjectField(shared, offset); |
(...skipping 25 matching lines...) Expand all Loading... | |
1238 return LoadObjectField(object, String::kLengthOffset); | 1265 return LoadObjectField(object, String::kLengthOffset); |
1239 } | 1266 } |
1240 | 1267 |
1241 Node* CodeStubAssembler::LoadJSValueValue(Node* object) { | 1268 Node* CodeStubAssembler::LoadJSValueValue(Node* object) { |
1242 CSA_ASSERT(this, IsJSValue(object)); | 1269 CSA_ASSERT(this, IsJSValue(object)); |
1243 return LoadObjectField(object, JSValue::kValueOffset); | 1270 return LoadObjectField(object, JSValue::kValueOffset); |
1244 } | 1271 } |
1245 | 1272 |
1246 Node* CodeStubAssembler::LoadWeakCellValueUnchecked(Node* weak_cell) { | 1273 Node* CodeStubAssembler::LoadWeakCellValueUnchecked(Node* weak_cell) { |
1247 // TODO(ishell): fix callers. | 1274 // TODO(ishell): fix callers. |
1275 CSA_SLOW_ASSERT(this, IsWeakCell(weak_cell)); | |
1248 return LoadObjectField(weak_cell, WeakCell::kValueOffset); | 1276 return LoadObjectField(weak_cell, WeakCell::kValueOffset); |
1249 } | 1277 } |
1250 | 1278 |
1251 Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell, Label* if_cleared) { | 1279 Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell, Label* if_cleared) { |
1252 CSA_ASSERT(this, IsWeakCell(weak_cell)); | 1280 CSA_ASSERT(this, IsWeakCell(weak_cell)); |
1253 Node* value = LoadWeakCellValueUnchecked(weak_cell); | 1281 Node* value = LoadWeakCellValueUnchecked(weak_cell); |
1254 if (if_cleared != nullptr) { | 1282 if (if_cleared != nullptr) { |
1255 GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared); | 1283 GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared); |
1256 } | 1284 } |
1257 return value; | 1285 return value; |
1258 } | 1286 } |
1259 | 1287 |
1260 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, | 1288 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, |
1261 int additional_offset, | 1289 int additional_offset, |
1262 ParameterMode parameter_mode) { | 1290 ParameterMode parameter_mode) { |
1291 CSA_SLOW_ASSERT(this, IsFixedArray(object)); | |
1263 int32_t header_size = | 1292 int32_t header_size = |
1264 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1293 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1265 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1294 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
1266 parameter_mode, header_size); | 1295 parameter_mode, header_size); |
1267 return Load(MachineType::AnyTagged(), object, offset); | 1296 return Load(MachineType::AnyTagged(), object, offset); |
1268 } | 1297 } |
1269 | 1298 |
1270 Node* CodeStubAssembler::LoadFixedTypedArrayElement( | 1299 Node* CodeStubAssembler::LoadFixedTypedArrayElement( |
1271 Node* data_pointer, Node* index_node, ElementsKind elements_kind, | 1300 Node* data_pointer, Node* index_node, ElementsKind elements_kind, |
1272 ParameterMode parameter_mode) { | 1301 ParameterMode parameter_mode) { |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1327 return AllocateHeapNumberWithValue(value); | 1356 return AllocateHeapNumberWithValue(value); |
1328 default: | 1357 default: |
1329 UNREACHABLE(); | 1358 UNREACHABLE(); |
1330 return nullptr; | 1359 return nullptr; |
1331 } | 1360 } |
1332 } | 1361 } |
1333 | 1362 |
1334 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( | 1363 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement( |
1335 Node* object, Node* index_node, int additional_offset, | 1364 Node* object, Node* index_node, int additional_offset, |
1336 ParameterMode parameter_mode) { | 1365 ParameterMode parameter_mode) { |
1366 CSA_SLOW_ASSERT(this, IsFixedArray(object)); | |
1367 CSA_SLOW_ASSERT(this, IsParameterMode(index_node, parameter_mode)); | |
1337 int32_t header_size = | 1368 int32_t header_size = |
1338 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1369 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1339 #if V8_TARGET_LITTLE_ENDIAN | 1370 #if V8_TARGET_LITTLE_ENDIAN |
1340 if (Is64()) { | 1371 if (Is64()) { |
1341 header_size += kPointerSize / 2; | 1372 header_size += kPointerSize / 2; |
1342 } | 1373 } |
1343 #endif | 1374 #endif |
1344 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1375 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
1345 parameter_mode, header_size); | 1376 parameter_mode, header_size); |
1346 if (Is64()) { | 1377 if (Is64()) { |
1347 return Load(MachineType::Int32(), object, offset); | 1378 return Load(MachineType::Int32(), object, offset); |
1348 } else { | 1379 } else { |
1349 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset)); | 1380 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset)); |
1350 } | 1381 } |
1351 } | 1382 } |
1352 | 1383 |
1353 Node* CodeStubAssembler::LoadFixedDoubleArrayElement( | 1384 Node* CodeStubAssembler::LoadFixedDoubleArrayElement( |
1354 Node* object, Node* index_node, MachineType machine_type, | 1385 Node* object, Node* index_node, MachineType machine_type, |
1355 int additional_offset, ParameterMode parameter_mode, Label* if_hole) { | 1386 int additional_offset, ParameterMode parameter_mode, Label* if_hole) { |
1387 CSA_SLOW_ASSERT(this, IsFixedArray(object)); | |
1388 CSA_SLOW_ASSERT(this, IsParameterMode(index_node, parameter_mode)); | |
1356 CSA_ASSERT(this, IsFixedDoubleArray(object)); | 1389 CSA_ASSERT(this, IsFixedDoubleArray(object)); |
1357 int32_t header_size = | 1390 int32_t header_size = |
1358 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1391 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1359 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS, | 1392 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS, |
1360 parameter_mode, header_size); | 1393 parameter_mode, header_size); |
1361 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type); | 1394 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type); |
1362 } | 1395 } |
1363 | 1396 |
1364 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset, | 1397 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset, |
1365 Label* if_hole, | 1398 Label* if_hole, |
(...skipping 14 matching lines...) Expand all Loading... | |
1380 } | 1413 } |
1381 } | 1414 } |
1382 if (machine_type.IsNone()) { | 1415 if (machine_type.IsNone()) { |
1383 // This means the actual value is not needed. | 1416 // This means the actual value is not needed. |
1384 return nullptr; | 1417 return nullptr; |
1385 } | 1418 } |
1386 return Load(machine_type, base, offset); | 1419 return Load(machine_type, base, offset); |
1387 } | 1420 } |
1388 | 1421 |
1389 Node* CodeStubAssembler::LoadContextElement(Node* context, int slot_index) { | 1422 Node* CodeStubAssembler::LoadContextElement(Node* context, int slot_index) { |
1423 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
1390 int offset = Context::SlotOffset(slot_index); | 1424 int offset = Context::SlotOffset(slot_index); |
1391 return Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)); | 1425 return Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)); |
1392 } | 1426 } |
1393 | 1427 |
1394 Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) { | 1428 Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) { |
1429 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
1395 Node* offset = | 1430 Node* offset = |
1396 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), | 1431 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
1397 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); | 1432 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); |
1398 return Load(MachineType::AnyTagged(), context, offset); | 1433 return Load(MachineType::AnyTagged(), context, offset); |
1399 } | 1434 } |
1400 | 1435 |
1401 Node* CodeStubAssembler::StoreContextElement(Node* context, int slot_index, | 1436 Node* CodeStubAssembler::StoreContextElement(Node* context, int slot_index, |
1402 Node* value) { | 1437 Node* value) { |
1438 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
Camillo Bruni
2017/04/28 15:57:44
I've prepared an IsContext which will call HeapObj
jgruber
2017/05/03 11:11:50
Acknowledged.
| |
1403 int offset = Context::SlotOffset(slot_index); | 1439 int offset = Context::SlotOffset(slot_index); |
1404 return Store(context, IntPtrConstant(offset), value); | 1440 return Store(context, IntPtrConstant(offset), value); |
1405 } | 1441 } |
1406 | 1442 |
1407 Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index, | 1443 Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index, |
1408 Node* value) { | 1444 Node* value) { |
1445 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
1409 Node* offset = | 1446 Node* offset = |
1410 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), | 1447 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
1411 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); | 1448 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag)); |
1412 return Store(context, offset, value); | 1449 return Store(context, offset, value); |
1413 } | 1450 } |
1414 | 1451 |
1415 Node* CodeStubAssembler::StoreContextElementNoWriteBarrier(Node* context, | 1452 Node* CodeStubAssembler::StoreContextElementNoWriteBarrier(Node* context, |
1416 int slot_index, | 1453 int slot_index, |
1417 Node* value) { | 1454 Node* value) { |
1455 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
1418 int offset = Context::SlotOffset(slot_index); | 1456 int offset = Context::SlotOffset(slot_index); |
1419 return StoreNoWriteBarrier(MachineRepresentation::kTagged, context, | 1457 return StoreNoWriteBarrier(MachineRepresentation::kTagged, context, |
1420 IntPtrConstant(offset), value); | 1458 IntPtrConstant(offset), value); |
1421 } | 1459 } |
1422 | 1460 |
1423 Node* CodeStubAssembler::LoadNativeContext(Node* context) { | 1461 Node* CodeStubAssembler::LoadNativeContext(Node* context) { |
1462 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
1424 return LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX); | 1463 return LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX); |
1425 } | 1464 } |
1426 | 1465 |
1427 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind, | 1466 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind, |
1428 Node* native_context) { | 1467 Node* native_context) { |
1429 CSA_ASSERT(this, IsNativeContext(native_context)); | 1468 CSA_ASSERT(this, IsNativeContext(native_context)); |
1430 return LoadContextElement(native_context, Context::ArrayMapIndex(kind)); | 1469 return LoadContextElement(native_context, Context::ArrayMapIndex(kind)); |
1431 } | 1470 } |
1432 | 1471 |
1433 Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function, | 1472 Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function, |
(...skipping 11 matching lines...) Expand all Loading... | |
1445 GotoIfNot(IsMap(proto_or_map), &done); | 1484 GotoIfNot(IsMap(proto_or_map), &done); |
1446 | 1485 |
1447 var_result.Bind(LoadMapPrototype(proto_or_map)); | 1486 var_result.Bind(LoadMapPrototype(proto_or_map)); |
1448 Goto(&done); | 1487 Goto(&done); |
1449 | 1488 |
1450 BIND(&done); | 1489 BIND(&done); |
1451 return var_result.value(); | 1490 return var_result.value(); |
1452 } | 1491 } |
1453 | 1492 |
1454 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { | 1493 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { |
1494 CSA_SLOW_ASSERT(this, IsHeapNumber(object)); | |
1455 return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value, | 1495 return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value, |
1456 MachineRepresentation::kFloat64); | 1496 MachineRepresentation::kFloat64); |
1457 } | 1497 } |
1458 | 1498 |
1459 Node* CodeStubAssembler::StoreObjectField( | 1499 Node* CodeStubAssembler::StoreObjectField( |
1460 Node* object, int offset, Node* value) { | 1500 Node* object, int offset, Node* value) { |
1501 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
1461 DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead. | 1502 DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead. |
1462 return Store(object, IntPtrConstant(offset - kHeapObjectTag), value); | 1503 return Store(object, IntPtrConstant(offset - kHeapObjectTag), value); |
1463 } | 1504 } |
1464 | 1505 |
1465 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset, | 1506 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset, |
1466 Node* value) { | 1507 Node* value) { |
1508 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
1467 int const_offset; | 1509 int const_offset; |
1468 if (ToInt32Constant(offset, const_offset)) { | 1510 if (ToInt32Constant(offset, const_offset)) { |
1469 return StoreObjectField(object, const_offset, value); | 1511 return StoreObjectField(object, const_offset, value); |
1470 } | 1512 } |
1471 return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), | 1513 return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), |
1472 value); | 1514 value); |
1473 } | 1515 } |
1474 | 1516 |
1475 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( | 1517 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( |
1476 Node* object, int offset, Node* value, MachineRepresentation rep) { | 1518 Node* object, int offset, Node* value, MachineRepresentation rep) { |
1519 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
1477 return StoreNoWriteBarrier(rep, object, | 1520 return StoreNoWriteBarrier(rep, object, |
1478 IntPtrConstant(offset - kHeapObjectTag), value); | 1521 IntPtrConstant(offset - kHeapObjectTag), value); |
1479 } | 1522 } |
1480 | 1523 |
1481 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( | 1524 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( |
1482 Node* object, Node* offset, Node* value, MachineRepresentation rep) { | 1525 Node* object, Node* offset, Node* value, MachineRepresentation rep) { |
1526 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
1483 int const_offset; | 1527 int const_offset; |
1484 if (ToInt32Constant(offset, const_offset)) { | 1528 if (ToInt32Constant(offset, const_offset)) { |
1485 return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep); | 1529 return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep); |
1486 } | 1530 } |
1487 return StoreNoWriteBarrier( | 1531 return StoreNoWriteBarrier( |
1488 rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value); | 1532 rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value); |
1489 } | 1533 } |
1490 | 1534 |
1491 Node* CodeStubAssembler::StoreMap(Node* object, Node* map) { | 1535 Node* CodeStubAssembler::StoreMap(Node* object, Node* map) { |
1536 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
1492 CSA_SLOW_ASSERT(this, IsMap(map)); | 1537 CSA_SLOW_ASSERT(this, IsMap(map)); |
1493 return StoreWithMapWriteBarrier( | 1538 return StoreWithMapWriteBarrier( |
1494 object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map); | 1539 object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map); |
1495 } | 1540 } |
1496 | 1541 |
1497 Node* CodeStubAssembler::StoreMapNoWriteBarrier( | 1542 Node* CodeStubAssembler::StoreMapNoWriteBarrier( |
1498 Node* object, Heap::RootListIndex map_root_index) { | 1543 Node* object, Heap::RootListIndex map_root_index) { |
1499 return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index)); | 1544 return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index)); |
1500 } | 1545 } |
1501 | 1546 |
1502 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) { | 1547 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) { |
1548 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
1503 CSA_SLOW_ASSERT(this, IsMap(map)); | 1549 CSA_SLOW_ASSERT(this, IsMap(map)); |
1504 return StoreNoWriteBarrier( | 1550 return StoreNoWriteBarrier( |
1505 MachineRepresentation::kTagged, object, | 1551 MachineRepresentation::kTagged, object, |
1506 IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map); | 1552 IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map); |
1507 } | 1553 } |
1508 | 1554 |
1509 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset, | 1555 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset, |
1510 Heap::RootListIndex root_index) { | 1556 Heap::RootListIndex root_index) { |
1511 if (Heap::RootIsImmortalImmovable(root_index)) { | 1557 if (Heap::RootIsImmortalImmovable(root_index)) { |
1512 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index)); | 1558 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index)); |
1513 } else { | 1559 } else { |
1514 return StoreObjectField(object, offset, LoadRoot(root_index)); | 1560 return StoreObjectField(object, offset, LoadRoot(root_index)); |
1515 } | 1561 } |
1516 } | 1562 } |
1517 | 1563 |
1518 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node, | 1564 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node, |
1519 Node* value, | 1565 Node* value, |
1520 WriteBarrierMode barrier_mode, | 1566 WriteBarrierMode barrier_mode, |
1521 int additional_offset, | 1567 int additional_offset, |
1522 ParameterMode parameter_mode) { | 1568 ParameterMode parameter_mode) { |
1569 CSA_SLOW_ASSERT(this, IsFixedArray(object)); | |
1570 CSA_SLOW_ASSERT(this, IsParameterMode(index_node, parameter_mode)); | |
1523 DCHECK(barrier_mode == SKIP_WRITE_BARRIER || | 1571 DCHECK(barrier_mode == SKIP_WRITE_BARRIER || |
1524 barrier_mode == UPDATE_WRITE_BARRIER); | 1572 barrier_mode == UPDATE_WRITE_BARRIER); |
1525 int header_size = | 1573 int header_size = |
1526 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1574 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
1527 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1575 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
1528 parameter_mode, header_size); | 1576 parameter_mode, header_size); |
1529 if (barrier_mode == SKIP_WRITE_BARRIER) { | 1577 if (barrier_mode == SKIP_WRITE_BARRIER) { |
1530 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, | 1578 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, |
1531 value); | 1579 value); |
1532 } else { | 1580 } else { |
1533 return Store(object, offset, value); | 1581 return Store(object, offset, value); |
1534 } | 1582 } |
1535 } | 1583 } |
1536 | 1584 |
1537 Node* CodeStubAssembler::StoreFixedDoubleArrayElement( | 1585 Node* CodeStubAssembler::StoreFixedDoubleArrayElement( |
1538 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) { | 1586 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) { |
1539 CSA_ASSERT(this, IsFixedDoubleArray(object)); | 1587 CSA_ASSERT(this, IsFixedDoubleArray(object)); |
1588 CSA_SLOW_ASSERT(this, IsParameterMode(index_node, parameter_mode)); | |
1540 Node* offset = | 1589 Node* offset = |
1541 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode, | 1590 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode, |
1542 FixedArray::kHeaderSize - kHeapObjectTag); | 1591 FixedArray::kHeaderSize - kHeapObjectTag); |
1543 MachineRepresentation rep = MachineRepresentation::kFloat64; | 1592 MachineRepresentation rep = MachineRepresentation::kFloat64; |
1544 return StoreNoWriteBarrier(rep, object, offset, value); | 1593 return StoreNoWriteBarrier(rep, object, offset, value); |
1545 } | 1594 } |
1546 | 1595 |
1547 Node* CodeStubAssembler::EnsureArrayPushable(Node* receiver, Label* bailout) { | 1596 Node* CodeStubAssembler::EnsureArrayPushable(Node* receiver, Label* bailout) { |
1548 // Disallow pushing onto prototypes. It might be the JSArray prototype. | 1597 // Disallow pushing onto prototypes. It might be the JSArray prototype. |
1549 // Disallow pushing onto non-extensible objects. | 1598 // Disallow pushing onto non-extensible objects. |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1587 kind, capacity, new_capacity, mode, | 1636 kind, capacity, new_capacity, mode, |
1588 bailout)); | 1637 bailout)); |
1589 Goto(&fits); | 1638 Goto(&fits); |
1590 BIND(&fits); | 1639 BIND(&fits); |
1591 } | 1640 } |
1592 | 1641 |
1593 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, | 1642 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, |
1594 CodeStubArguments& args, | 1643 CodeStubArguments& args, |
1595 Variable& arg_index, | 1644 Variable& arg_index, |
1596 Label* bailout) { | 1645 Label* bailout) { |
1646 CSA_SLOW_ASSERT(this, IsJSArray(array)); | |
1597 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); | 1647 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); |
1598 Label pre_bailout(this); | 1648 Label pre_bailout(this); |
1599 Label success(this); | 1649 Label success(this); |
1600 VARIABLE(var_tagged_length, MachineRepresentation::kTagged); | 1650 VARIABLE(var_tagged_length, MachineRepresentation::kTagged); |
1601 ParameterMode mode = OptimalParameterMode(); | 1651 ParameterMode mode = OptimalParameterMode(); |
1602 VARIABLE(var_length, OptimalParameterRepresentation(), | 1652 VARIABLE(var_length, OptimalParameterRepresentation(), |
1603 TaggedToParameter(LoadJSArrayLength(array), mode)); | 1653 TaggedToParameter(LoadJSArrayLength(array), mode)); |
1604 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); | 1654 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); |
1605 | 1655 |
1606 // Resize the capacity of the fixed array if it doesn't fit. | 1656 // Resize the capacity of the fixed array if it doesn't fit. |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1657 Float64SilenceNaN(double_value), mode); | 1707 Float64SilenceNaN(double_value), mode); |
1658 } else { | 1708 } else { |
1659 WriteBarrierMode barrier_mode = | 1709 WriteBarrierMode barrier_mode = |
1660 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; | 1710 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; |
1661 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode); | 1711 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode); |
1662 } | 1712 } |
1663 } | 1713 } |
1664 | 1714 |
1665 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, | 1715 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array, |
1666 Node* value, Label* bailout) { | 1716 Node* value, Label* bailout) { |
1717 CSA_SLOW_ASSERT(this, IsJSArray(array)); | |
1667 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); | 1718 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind)); |
1668 ParameterMode mode = OptimalParameterMode(); | 1719 ParameterMode mode = OptimalParameterMode(); |
1669 VARIABLE(var_length, OptimalParameterRepresentation(), | 1720 VARIABLE(var_length, OptimalParameterRepresentation(), |
1670 TaggedToParameter(LoadJSArrayLength(array), mode)); | 1721 TaggedToParameter(LoadJSArrayLength(array), mode)); |
1671 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); | 1722 VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array)); |
1672 | 1723 |
1673 // Resize the capacity of the fixed array if it doesn't fit. | 1724 // Resize the capacity of the fixed array if it doesn't fit. |
1674 Node* growth = IntPtrOrSmiConstant(1, mode); | 1725 Node* growth = IntPtrOrSmiConstant(1, mode); |
1675 PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(), | 1726 PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(), |
1676 &var_elements, growth, bailout); | 1727 &var_elements, growth, bailout); |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1716 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot, | 1767 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot, |
1717 IntPtrConstant(String::kEmptyHashField), | 1768 IntPtrConstant(String::kEmptyHashField), |
1718 MachineType::PointerRepresentation()); | 1769 MachineType::PointerRepresentation()); |
1719 return result; | 1770 return result; |
1720 } | 1771 } |
1721 | 1772 |
1722 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, | 1773 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length, |
1723 ParameterMode mode, | 1774 ParameterMode mode, |
1724 AllocationFlags flags) { | 1775 AllocationFlags flags) { |
1725 Comment("AllocateSeqOneByteString"); | 1776 Comment("AllocateSeqOneByteString"); |
1777 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
1778 CSA_SLOW_ASSERT(this, IsParameterMode(length, mode)); | |
1726 VARIABLE(var_result, MachineRepresentation::kTagged); | 1779 VARIABLE(var_result, MachineRepresentation::kTagged); |
1727 | 1780 |
1728 // Compute the SeqOneByteString size and check if it fits into new space. | 1781 // Compute the SeqOneByteString size and check if it fits into new space. |
1729 Label if_lengthiszero(this), if_sizeissmall(this), | 1782 Label if_lengthiszero(this), if_sizeissmall(this), |
1730 if_notsizeissmall(this, Label::kDeferred), if_join(this); | 1783 if_notsizeissmall(this, Label::kDeferred), if_join(this); |
1731 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); | 1784 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); |
1732 | 1785 |
1733 Node* raw_size = GetArrayAllocationSize( | 1786 Node* raw_size = GetArrayAllocationSize( |
1734 length, UINT8_ELEMENTS, mode, | 1787 length, UINT8_ELEMENTS, mode, |
1735 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); | 1788 SeqOneByteString::kHeaderSize + kObjectAlignmentMask); |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1786 // Initialize both used and unused parts of hash field slot at once. | 1839 // Initialize both used and unused parts of hash field slot at once. |
1787 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot, | 1840 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot, |
1788 IntPtrConstant(String::kEmptyHashField), | 1841 IntPtrConstant(String::kEmptyHashField), |
1789 MachineType::PointerRepresentation()); | 1842 MachineType::PointerRepresentation()); |
1790 return result; | 1843 return result; |
1791 } | 1844 } |
1792 | 1845 |
1793 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, | 1846 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length, |
1794 ParameterMode mode, | 1847 ParameterMode mode, |
1795 AllocationFlags flags) { | 1848 AllocationFlags flags) { |
1849 CSA_SLOW_ASSERT(this, IsFixedArray(context)); | |
1850 CSA_SLOW_ASSERT(this, IsParameterMode(length, mode)); | |
1796 Comment("AllocateSeqTwoByteString"); | 1851 Comment("AllocateSeqTwoByteString"); |
1797 VARIABLE(var_result, MachineRepresentation::kTagged); | 1852 VARIABLE(var_result, MachineRepresentation::kTagged); |
1798 | 1853 |
1799 // Compute the SeqTwoByteString size and check if it fits into new space. | 1854 // Compute the SeqTwoByteString size and check if it fits into new space. |
1800 Label if_lengthiszero(this), if_sizeissmall(this), | 1855 Label if_lengthiszero(this), if_sizeissmall(this), |
1801 if_notsizeissmall(this, Label::kDeferred), if_join(this); | 1856 if_notsizeissmall(this, Label::kDeferred), if_join(this); |
1802 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); | 1857 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero); |
1803 | 1858 |
1804 Node* raw_size = GetArrayAllocationSize( | 1859 Node* raw_size = GetArrayAllocationSize( |
1805 length, UINT16_ELEMENTS, mode, | 1860 length, UINT16_ELEMENTS, mode, |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1841 Goto(&if_join); | 1896 Goto(&if_join); |
1842 } | 1897 } |
1843 | 1898 |
1844 BIND(&if_join); | 1899 BIND(&if_join); |
1845 return var_result.value(); | 1900 return var_result.value(); |
1846 } | 1901 } |
1847 | 1902 |
1848 Node* CodeStubAssembler::AllocateSlicedString( | 1903 Node* CodeStubAssembler::AllocateSlicedString( |
1849 Heap::RootListIndex map_root_index, Node* length, Node* parent, | 1904 Heap::RootListIndex map_root_index, Node* length, Node* parent, |
1850 Node* offset) { | 1905 Node* offset) { |
1906 CSA_ASSERT(this, IsString(parent)); | |
1851 CSA_ASSERT(this, TaggedIsSmi(length)); | 1907 CSA_ASSERT(this, TaggedIsSmi(length)); |
1908 CSA_ASSERT(this, TaggedIsSmi(offset)); | |
1852 Node* result = Allocate(SlicedString::kSize); | 1909 Node* result = Allocate(SlicedString::kSize); |
1853 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); | 1910 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); |
1854 StoreMapNoWriteBarrier(result, map_root_index); | 1911 StoreMapNoWriteBarrier(result, map_root_index); |
1855 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length, | 1912 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length, |
1856 MachineRepresentation::kTagged); | 1913 MachineRepresentation::kTagged); |
1857 // Initialize both used and unused parts of hash field slot at once. | 1914 // Initialize both used and unused parts of hash field slot at once. |
1858 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot, | 1915 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot, |
1859 IntPtrConstant(String::kEmptyHashField), | 1916 IntPtrConstant(String::kEmptyHashField), |
1860 MachineType::PointerRepresentation()); | 1917 MachineType::PointerRepresentation()); |
1861 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent, | 1918 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent, |
(...skipping 12 matching lines...) Expand all Loading... | |
1874 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent, | 1931 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent, |
1875 Node* offset) { | 1932 Node* offset) { |
1876 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent, | 1933 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent, |
1877 offset); | 1934 offset); |
1878 } | 1935 } |
1879 | 1936 |
1880 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index, | 1937 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index, |
1881 Node* length, Node* first, | 1938 Node* length, Node* first, |
1882 Node* second, | 1939 Node* second, |
1883 AllocationFlags flags) { | 1940 AllocationFlags flags) { |
1941 CSA_ASSERT(this, IsString(first)); | |
1942 CSA_ASSERT(this, IsString(second)); | |
1884 CSA_ASSERT(this, TaggedIsSmi(length)); | 1943 CSA_ASSERT(this, TaggedIsSmi(length)); |
1885 Node* result = Allocate(ConsString::kSize, flags); | 1944 Node* result = Allocate(ConsString::kSize, flags); |
1886 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); | 1945 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); |
1887 StoreMapNoWriteBarrier(result, map_root_index); | 1946 StoreMapNoWriteBarrier(result, map_root_index); |
1888 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length, | 1947 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length, |
1889 MachineRepresentation::kTagged); | 1948 MachineRepresentation::kTagged); |
1890 // Initialize both used and unused parts of hash field slot at once. | 1949 // Initialize both used and unused parts of hash field slot at once. |
1891 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot, | 1950 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot, |
1892 IntPtrConstant(String::kEmptyHashField), | 1951 IntPtrConstant(String::kEmptyHashField), |
1893 MachineType::PointerRepresentation()); | 1952 MachineType::PointerRepresentation()); |
(...skipping 19 matching lines...) Expand all Loading... | |
1913 | 1972 |
1914 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first, | 1973 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first, |
1915 Node* second, | 1974 Node* second, |
1916 AllocationFlags flags) { | 1975 AllocationFlags flags) { |
1917 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first, | 1976 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first, |
1918 second, flags); | 1977 second, flags); |
1919 } | 1978 } |
1920 | 1979 |
1921 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left, | 1980 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left, |
1922 Node* right, AllocationFlags flags) { | 1981 Node* right, AllocationFlags flags) { |
1982 CSA_ASSERT(this, IsFixedArray(context)); | |
1983 CSA_ASSERT(this, IsString(left)); | |
1984 CSA_ASSERT(this, IsString(right)); | |
1923 CSA_ASSERT(this, TaggedIsSmi(length)); | 1985 CSA_ASSERT(this, TaggedIsSmi(length)); |
1924 // Added string can be a cons string. | 1986 // Added string can be a cons string. |
1925 Comment("Allocating ConsString"); | 1987 Comment("Allocating ConsString"); |
1926 Node* left_instance_type = LoadInstanceType(left); | 1988 Node* left_instance_type = LoadInstanceType(left); |
1927 Node* right_instance_type = LoadInstanceType(right); | 1989 Node* right_instance_type = LoadInstanceType(right); |
1928 | 1990 |
1929 // Compute intersection and difference of instance types. | 1991 // Compute intersection and difference of instance types. |
1930 Node* anded_instance_types = | 1992 Node* anded_instance_types = |
1931 Word32And(left_instance_type, right_instance_type); | 1993 Word32And(left_instance_type, right_instance_type); |
1932 Node* xored_instance_types = | 1994 Node* xored_instance_types = |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1968 result.Bind(AllocateTwoByteConsString(length, left, right, flags)); | 2030 result.Bind(AllocateTwoByteConsString(length, left, right, flags)); |
1969 Goto(&done); | 2031 Goto(&done); |
1970 | 2032 |
1971 BIND(&done); | 2033 BIND(&done); |
1972 | 2034 |
1973 return result.value(); | 2035 return result.value(); |
1974 } | 2036 } |
1975 | 2037 |
1976 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length, | 2038 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length, |
1977 Node* index, Node* input) { | 2039 Node* index, Node* input) { |
2040 CSA_ASSERT(this, IsFixedArray(context)); | |
2041 CSA_ASSERT(this, TaggedIsSmi(index)); | |
2042 CSA_ASSERT(this, TaggedIsSmi(length)); | |
2043 CSA_ASSERT(this, IsString(input)); | |
2044 | |
2045 #ifdef DEBUG | |
1978 Node* const max_length = | 2046 Node* const max_length = |
1979 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray)); | 2047 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray)); |
1980 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length)); | 2048 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length)); |
1981 USE(max_length); | 2049 #endif // DEBUG |
1982 | 2050 |
1983 // Allocate the JSRegExpResult. | 2051 // Allocate the JSRegExpResult. |
1984 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove | 2052 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove |
1985 // unneeded store of elements. | 2053 // unneeded store of elements. |
1986 Node* const result = Allocate(JSRegExpResult::kSize); | 2054 Node* const result = Allocate(JSRegExpResult::kSize); |
1987 | 2055 |
1988 // TODO(jgruber): Store map as Heap constant? | 2056 // TODO(jgruber): Store map as Heap constant? |
1989 Node* const native_context = LoadNativeContext(context); | 2057 Node* const native_context = LoadNativeContext(context); |
1990 Node* const map = | 2058 Node* const map = |
1991 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX); | 2059 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX); |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2064 kHeapObjectTag)); | 2132 kHeapObjectTag)); |
2065 Node* end_address = IntPtrAdd( | 2133 Node* end_address = IntPtrAdd( |
2066 result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag))); | 2134 result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag))); |
2067 StoreFieldsNoWriteBarrier(start_address, end_address, filler); | 2135 StoreFieldsNoWriteBarrier(start_address, end_address, filler); |
2068 return result; | 2136 return result; |
2069 } | 2137 } |
2070 | 2138 |
2071 Node* CodeStubAssembler::CopyNameDictionary(Node* dictionary, | 2139 Node* CodeStubAssembler::CopyNameDictionary(Node* dictionary, |
2072 Label* large_object_fallback) { | 2140 Label* large_object_fallback) { |
2073 Comment("Copy boilerplate property dict"); | 2141 Comment("Copy boilerplate property dict"); |
2142 CSA_SLOW_ASSERT(this, IsFixedArray(dictionary)); | |
2074 Label done(this); | 2143 Label done(this); |
2075 Node* length = SmiUntag(LoadFixedArrayBaseLength(dictionary)); | 2144 Node* length = SmiUntag(LoadFixedArrayBaseLength(dictionary)); |
2076 GotoIf( | 2145 GotoIf( |
2077 IntPtrGreaterThan(length, IntPtrConstant(FixedArray::kMaxRegularLength)), | 2146 IntPtrGreaterThan(length, IntPtrConstant(FixedArray::kMaxRegularLength)), |
2078 large_object_fallback); | 2147 large_object_fallback); |
2079 Node* properties = | 2148 Node* properties = |
2080 AllocateNameDictionary(SmiUntag(GetCapacity<NameDictionary>(dictionary))); | 2149 AllocateNameDictionary(SmiUntag(GetCapacity<NameDictionary>(dictionary))); |
2081 CopyFixedArrayElements(FAST_ELEMENTS, dictionary, properties, length, | 2150 CopyFixedArrayElements(FAST_ELEMENTS, dictionary, properties, length, |
2082 SKIP_WRITE_BARRIER, INTPTR_PARAMETERS); | 2151 SKIP_WRITE_BARRIER, INTPTR_PARAMETERS); |
2083 return properties; | 2152 return properties; |
2084 } | 2153 } |
2085 | 2154 |
2086 Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties, | 2155 Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties, |
2087 Node* elements, | 2156 Node* elements, |
2088 AllocationFlags flags) { | 2157 AllocationFlags flags) { |
2089 CSA_ASSERT(this, IsMap(map)); | 2158 CSA_ASSERT(this, IsMap(map)); |
2090 Node* size = | 2159 Node* size = |
2091 IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize)); | 2160 IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize)); |
2092 Node* object = AllocateInNewSpace(size, flags); | 2161 Node* object = AllocateInNewSpace(size, flags); |
2093 StoreMapNoWriteBarrier(object, map); | 2162 StoreMapNoWriteBarrier(object, map); |
2094 InitializeJSObjectFromMap(object, map, size, properties, elements); | 2163 InitializeJSObjectFromMap(object, map, size, properties, elements); |
2095 return object; | 2164 return object; |
2096 } | 2165 } |
2097 | 2166 |
2098 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map, | 2167 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map, |
2099 Node* size, Node* properties, | 2168 Node* size, Node* properties, |
2100 Node* elements) { | 2169 Node* elements) { |
2170 CSA_SLOW_ASSERT(this, IsMap(map)); | |
2101 // This helper assumes that the object is in new-space, as guarded by the | 2171 // This helper assumes that the object is in new-space, as guarded by the |
2102 // check in AllocatedJSObjectFromMap. | 2172 // check in AllocatedJSObjectFromMap. |
2103 if (properties == nullptr) { | 2173 if (properties == nullptr) { |
2104 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map)))); | 2174 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map)))); |
2105 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset, | 2175 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset, |
2106 Heap::kEmptyFixedArrayRootIndex); | 2176 Heap::kEmptyFixedArrayRootIndex); |
2107 } else { | 2177 } else { |
2178 CSA_ASSERT(this, IsFixedArray(properties)); | |
2108 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset, | 2179 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset, |
2109 properties); | 2180 properties); |
2110 } | 2181 } |
2111 if (elements == nullptr) { | 2182 if (elements == nullptr) { |
2112 StoreObjectFieldRoot(object, JSObject::kElementsOffset, | 2183 StoreObjectFieldRoot(object, JSObject::kElementsOffset, |
2113 Heap::kEmptyFixedArrayRootIndex); | 2184 Heap::kEmptyFixedArrayRootIndex); |
2114 } else { | 2185 } else { |
2186 CSA_ASSERT(this, IsFixedArray(elements)); | |
2115 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements); | 2187 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements); |
2116 } | 2188 } |
2117 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize); | 2189 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize); |
2118 } | 2190 } |
2119 | 2191 |
2120 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map, | 2192 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map, |
2121 Node* size, int start_offset) { | 2193 Node* size, int start_offset) { |
2194 CSA_SLOW_ASSERT(this, IsMap(map)); | |
2122 // TODO(cbruni): activate in-object slack tracking machinery. | 2195 // TODO(cbruni): activate in-object slack tracking machinery. |
2123 Comment("InitializeJSObjectBody"); | 2196 Comment("InitializeJSObjectBody"); |
2124 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex); | 2197 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex); |
2125 // Calculate the untagged field addresses. | 2198 // Calculate the untagged field addresses. |
2126 object = BitcastTaggedToWord(object); | 2199 object = BitcastTaggedToWord(object); |
2127 Node* start_address = | 2200 Node* start_address = |
2128 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag)); | 2201 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag)); |
2129 Node* end_address = | 2202 Node* end_address = |
2130 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag)); | 2203 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag)); |
2131 StoreFieldsNoWriteBarrier(start_address, end_address, filler); | 2204 StoreFieldsNoWriteBarrier(start_address, end_address, filler); |
2132 } | 2205 } |
2133 | 2206 |
2134 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address, | 2207 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address, |
2135 Node* end_address, | 2208 Node* end_address, |
2136 Node* value) { | 2209 Node* value) { |
2137 Comment("StoreFieldsNoWriteBarrier"); | 2210 Comment("StoreFieldsNoWriteBarrier"); |
2138 CSA_ASSERT(this, WordIsWordAligned(start_address)); | 2211 CSA_ASSERT(this, WordIsWordAligned(start_address)); |
2139 CSA_ASSERT(this, WordIsWordAligned(end_address)); | 2212 CSA_ASSERT(this, WordIsWordAligned(end_address)); |
2140 BuildFastLoop(start_address, end_address, | 2213 BuildFastLoop(start_address, end_address, |
2141 [this, value](Node* current) { | 2214 [this, value](Node* current) { |
2142 StoreNoWriteBarrier(MachineRepresentation::kTagged, current, | 2215 StoreNoWriteBarrier(MachineRepresentation::kTagged, current, |
2143 value); | 2216 value); |
2144 }, | 2217 }, |
2145 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); | 2218 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); |
2146 } | 2219 } |
2147 | 2220 |
2148 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements( | 2221 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements( |
2149 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) { | 2222 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) { |
2150 Comment("begin allocation of JSArray without elements"); | 2223 Comment("begin allocation of JSArray without elements"); |
2224 CSA_SLOW_ASSERT(this, TaggedIsSmi(length)); | |
Camillo Bruni
2017/04/28 15:57:44
TaggedIsPositiveSmi(length)
jgruber
2017/05/03 11:11:50
Done.
| |
2225 CSA_SLOW_ASSERT(this, IsMap(array_map)); | |
2151 int base_size = JSArray::kSize; | 2226 int base_size = JSArray::kSize; |
2152 if (allocation_site != nullptr) { | 2227 if (allocation_site != nullptr) { |
2153 base_size += AllocationMemento::kSize; | 2228 base_size += AllocationMemento::kSize; |
2154 } | 2229 } |
2155 | 2230 |
2156 Node* size = IntPtrConstant(base_size); | 2231 Node* size = IntPtrConstant(base_size); |
2157 Node* array = AllocateUninitializedJSArray(kind, array_map, length, | 2232 Node* array = AllocateUninitializedJSArray(kind, array_map, length, |
2158 allocation_site, size); | 2233 allocation_site, size); |
2159 return array; | 2234 return array; |
2160 } | 2235 } |
2161 | 2236 |
2162 std::pair<Node*, Node*> | 2237 std::pair<Node*, Node*> |
2163 CodeStubAssembler::AllocateUninitializedJSArrayWithElements( | 2238 CodeStubAssembler::AllocateUninitializedJSArrayWithElements( |
2164 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site, | 2239 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site, |
2165 Node* capacity, ParameterMode capacity_mode) { | 2240 Node* capacity, ParameterMode capacity_mode) { |
2166 Comment("begin allocation of JSArray with elements"); | 2241 Comment("begin allocation of JSArray with elements"); |
2242 CSA_SLOW_ASSERT(this, TaggedIsSmi(length)); | |
Camillo Bruni
2017/04/28 15:57:44
TaggedIsPositiveSmi(length)
jgruber
2017/05/03 11:11:50
Done.
| |
2243 CSA_SLOW_ASSERT(this, IsMap(array_map)); | |
2167 int base_size = JSArray::kSize; | 2244 int base_size = JSArray::kSize; |
2168 | 2245 |
2169 if (allocation_site != nullptr) { | 2246 if (allocation_site != nullptr) { |
2170 base_size += AllocationMemento::kSize; | 2247 base_size += AllocationMemento::kSize; |
2171 } | 2248 } |
2172 | 2249 |
2173 int elements_offset = base_size; | 2250 int elements_offset = base_size; |
2174 | 2251 |
2175 // Compute space for elements | 2252 // Compute space for elements |
2176 base_size += FixedArray::kHeaderSize; | 2253 base_size += FixedArray::kHeaderSize; |
2177 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size); | 2254 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size); |
2178 | 2255 |
2179 Node* array = AllocateUninitializedJSArray(kind, array_map, length, | 2256 Node* array = AllocateUninitializedJSArray(kind, array_map, length, |
2180 allocation_site, size); | 2257 allocation_site, size); |
2181 | 2258 |
2182 Node* elements = InnerAllocate(array, elements_offset); | 2259 Node* elements = InnerAllocate(array, elements_offset); |
2183 StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements); | 2260 StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements); |
2184 | 2261 |
2185 return {array, elements}; | 2262 return {array, elements}; |
2186 } | 2263 } |
2187 | 2264 |
2188 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind, | 2265 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind, |
2189 Node* array_map, | 2266 Node* array_map, |
2190 Node* length, | 2267 Node* length, |
2191 Node* allocation_site, | 2268 Node* allocation_site, |
2192 Node* size_in_bytes) { | 2269 Node* size_in_bytes) { |
2270 CSA_SLOW_ASSERT(this, TaggedIsSmi(length)); | |
Camillo Bruni
2017/04/28 15:57:44
TaggedIsPositiveSmi(length)
jgruber
2017/05/03 11:11:50
Done.
| |
2271 CSA_SLOW_ASSERT(this, IsMap(array_map)); | |
2272 | |
2193 // Allocate space for the JSArray and the elements FixedArray in one go. | 2273 // Allocate space for the JSArray and the elements FixedArray in one go. |
2194 Node* array = AllocateInNewSpace(size_in_bytes); | 2274 Node* array = AllocateInNewSpace(size_in_bytes); |
2195 | 2275 |
2196 Comment("write JSArray headers"); | 2276 Comment("write JSArray headers"); |
2197 StoreMapNoWriteBarrier(array, array_map); | 2277 StoreMapNoWriteBarrier(array, array_map); |
2198 | 2278 |
2199 CSA_ASSERT(this, TaggedIsSmi(length)); | 2279 CSA_ASSERT(this, TaggedIsSmi(length)); |
2200 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length); | 2280 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length); |
2201 | 2281 |
2202 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset, | 2282 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset, |
2203 Heap::kEmptyFixedArrayRootIndex); | 2283 Heap::kEmptyFixedArrayRootIndex); |
2204 | 2284 |
2205 if (allocation_site != nullptr) { | 2285 if (allocation_site != nullptr) { |
2206 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); | 2286 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); |
2207 } | 2287 } |
2208 return array; | 2288 return array; |
2209 } | 2289 } |
2210 | 2290 |
2211 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map, | 2291 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map, |
2212 Node* capacity, Node* length, | 2292 Node* capacity, Node* length, |
2213 Node* allocation_site, | 2293 Node* allocation_site, |
2214 ParameterMode capacity_mode) { | 2294 ParameterMode capacity_mode) { |
2295 CSA_SLOW_ASSERT(this, TaggedIsSmi(length)); | |
Camillo Bruni
2017/04/28 15:57:44
TaggedIsPositiveSmi(length)
jgruber
2017/05/03 11:11:50
Done.
| |
2296 CSA_SLOW_ASSERT(this, IsMap(array_map)); | |
2297 CSA_SLOW_ASSERT(this, IsParameterMode(capacity, capacity_mode)); | |
2298 | |
2215 Node *array = nullptr, *elements = nullptr; | 2299 Node *array = nullptr, *elements = nullptr; |
2216 if (IsIntPtrOrSmiConstantZero(capacity)) { | 2300 if (IsIntPtrOrSmiConstantZero(capacity)) { |
2217 // Array is empty. Use the shared empty fixed array instead of allocating a | 2301 // Array is empty. Use the shared empty fixed array instead of allocating a |
2218 // new one. | 2302 // new one. |
2219 array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length, | 2303 array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length, |
2220 nullptr); | 2304 nullptr); |
2221 StoreObjectFieldRoot(array, JSArray::kElementsOffset, | 2305 StoreObjectFieldRoot(array, JSArray::kElementsOffset, |
2222 Heap::kEmptyFixedArrayRootIndex); | 2306 Heap::kEmptyFixedArrayRootIndex); |
2223 } else { | 2307 } else { |
2224 // Allocate both array and elements object, and initialize the JSArray. | 2308 // Allocate both array and elements object, and initialize the JSArray. |
(...skipping 13 matching lines...) Expand all Loading... | |
2238 Heap::kTheHoleValueRootIndex, capacity_mode); | 2322 Heap::kTheHoleValueRootIndex, capacity_mode); |
2239 } | 2323 } |
2240 | 2324 |
2241 return array; | 2325 return array; |
2242 } | 2326 } |
2243 | 2327 |
2244 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, | 2328 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, |
2245 Node* capacity_node, | 2329 Node* capacity_node, |
2246 ParameterMode mode, | 2330 ParameterMode mode, |
2247 AllocationFlags flags) { | 2331 AllocationFlags flags) { |
2332 CSA_SLOW_ASSERT(this, IsParameterMode(capacity_node, mode)); | |
2248 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node, | 2333 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node, |
2249 IntPtrOrSmiConstant(0, mode), mode)); | 2334 IntPtrOrSmiConstant(0, mode), mode)); |
2250 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode); | 2335 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode); |
2251 | 2336 |
2252 // Allocate both array and elements object, and initialize the JSArray. | 2337 // Allocate both array and elements object, and initialize the JSArray. |
2253 Node* array = Allocate(total_size, flags); | 2338 Node* array = Allocate(total_size, flags); |
2254 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind) | 2339 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind) |
2255 ? Heap::kFixedDoubleArrayMapRootIndex | 2340 ? Heap::kFixedDoubleArrayMapRootIndex |
2256 : Heap::kFixedArrayMapRootIndex; | 2341 : Heap::kFixedArrayMapRootIndex; |
2257 DCHECK(Heap::RootIsImmortalImmovable(map_index)); | 2342 DCHECK(Heap::RootIsImmortalImmovable(map_index)); |
2258 StoreMapNoWriteBarrier(array, map_index); | 2343 StoreMapNoWriteBarrier(array, map_index); |
2259 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, | 2344 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, |
2260 ParameterToTagged(capacity_node, mode)); | 2345 ParameterToTagged(capacity_node, mode)); |
2261 return array; | 2346 return array; |
2262 } | 2347 } |
2263 | 2348 |
2264 void CodeStubAssembler::FillFixedArrayWithValue( | 2349 void CodeStubAssembler::FillFixedArrayWithValue( |
2265 ElementsKind kind, Node* array, Node* from_node, Node* to_node, | 2350 ElementsKind kind, Node* array, Node* from_node, Node* to_node, |
2266 Heap::RootListIndex value_root_index, ParameterMode mode) { | 2351 Heap::RootListIndex value_root_index, ParameterMode mode) { |
2352 CSA_SLOW_ASSERT(this, IsParameterMode(from_node, mode)); | |
2353 CSA_SLOW_ASSERT(this, IsParameterMode(to_node, mode)); | |
2354 CSA_SLOW_ASSERT(this, IsFixedArray(array)); | |
2267 bool is_double = IsFastDoubleElementsKind(kind); | 2355 bool is_double = IsFastDoubleElementsKind(kind); |
2268 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || | 2356 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || |
2269 value_root_index == Heap::kUndefinedValueRootIndex); | 2357 value_root_index == Heap::kUndefinedValueRootIndex); |
2270 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); | 2358 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); |
2271 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); | 2359 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32); |
2272 Node* double_hole = | 2360 Node* double_hole = |
2273 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); | 2361 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32); |
2274 Node* value = LoadRoot(value_root_index); | 2362 Node* value = LoadRoot(value_root_index); |
2275 | 2363 |
2276 BuildFastFixedArrayForEach( | 2364 BuildFastFixedArrayForEach( |
(...skipping 23 matching lines...) Expand all Loading... | |
2300 value); | 2388 value); |
2301 } | 2389 } |
2302 }, | 2390 }, |
2303 mode); | 2391 mode); |
2304 } | 2392 } |
2305 | 2393 |
2306 void CodeStubAssembler::CopyFixedArrayElements( | 2394 void CodeStubAssembler::CopyFixedArrayElements( |
2307 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, | 2395 ElementsKind from_kind, Node* from_array, ElementsKind to_kind, |
2308 Node* to_array, Node* element_count, Node* capacity, | 2396 Node* to_array, Node* element_count, Node* capacity, |
2309 WriteBarrierMode barrier_mode, ParameterMode mode) { | 2397 WriteBarrierMode barrier_mode, ParameterMode mode) { |
2398 CSA_SLOW_ASSERT(this, IsParameterMode(element_count, mode)); | |
2399 CSA_SLOW_ASSERT(this, IsParameterMode(capacity, mode)); | |
2400 CSA_SLOW_ASSERT(this, IsFixedArray(from_array)); | |
2401 CSA_SLOW_ASSERT(this, IsFixedArray(to_array)); | |
2310 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); | 2402 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); |
2311 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; | 2403 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag; |
2312 Comment("[ CopyFixedArrayElements"); | 2404 Comment("[ CopyFixedArrayElements"); |
2313 | 2405 |
2314 // Typed array elements are not supported. | 2406 // Typed array elements are not supported. |
2315 DCHECK(!IsFixedTypedArrayElementsKind(from_kind)); | 2407 DCHECK(!IsFixedTypedArrayElementsKind(from_kind)); |
2316 DCHECK(!IsFixedTypedArrayElementsKind(to_kind)); | 2408 DCHECK(!IsFixedTypedArrayElementsKind(to_kind)); |
2317 | 2409 |
2318 Label done(this); | 2410 Label done(this); |
2319 bool from_double_elements = IsFastDoubleElementsKind(from_kind); | 2411 bool from_double_elements = IsFastDoubleElementsKind(from_kind); |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2436 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1); | 2528 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1); |
2437 Comment("] CopyFixedArrayElements"); | 2529 Comment("] CopyFixedArrayElements"); |
2438 } | 2530 } |
2439 | 2531 |
2440 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string, | 2532 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string, |
2441 Node* from_index, Node* to_index, | 2533 Node* from_index, Node* to_index, |
2442 Node* character_count, | 2534 Node* character_count, |
2443 String::Encoding from_encoding, | 2535 String::Encoding from_encoding, |
2444 String::Encoding to_encoding, | 2536 String::Encoding to_encoding, |
2445 ParameterMode mode) { | 2537 ParameterMode mode) { |
2538 CSA_SLOW_ASSERT(this, IsString(from_string)); | |
2539 CSA_SLOW_ASSERT(this, IsString(to_string)); | |
2540 CSA_SLOW_ASSERT(this, IsParameterMode(character_count, mode)); | |
2541 CSA_SLOW_ASSERT(this, IsParameterMode(from_index, mode)); | |
2542 CSA_SLOW_ASSERT(this, IsParameterMode(to_index, mode)); | |
2446 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING; | 2543 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING; |
2447 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING; | 2544 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING; |
2448 DCHECK_IMPLIES(to_one_byte, from_one_byte); | 2545 DCHECK_IMPLIES(to_one_byte, from_one_byte); |
2449 Comment("CopyStringCharacters %s -> %s", | 2546 Comment("CopyStringCharacters %s -> %s", |
2450 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", | 2547 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING", |
2451 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING"); | 2548 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING"); |
2452 | 2549 |
2453 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; | 2550 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; |
2454 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; | 2551 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS; |
2455 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); | 2552 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2494 } | 2591 } |
2495 }, | 2592 }, |
2496 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); | 2593 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost); |
2497 } | 2594 } |
2498 | 2595 |
2499 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array, | 2596 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array, |
2500 Node* offset, | 2597 Node* offset, |
2501 ElementsKind from_kind, | 2598 ElementsKind from_kind, |
2502 ElementsKind to_kind, | 2599 ElementsKind to_kind, |
2503 Label* if_hole) { | 2600 Label* if_hole) { |
2601 CSA_SLOW_ASSERT(this, IsFixedArray(array)); | |
2504 if (IsFastDoubleElementsKind(from_kind)) { | 2602 if (IsFastDoubleElementsKind(from_kind)) { |
2505 Node* value = | 2603 Node* value = |
2506 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64()); | 2604 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64()); |
2507 if (!IsFastDoubleElementsKind(to_kind)) { | 2605 if (!IsFastDoubleElementsKind(to_kind)) { |
2508 value = AllocateHeapNumberWithValue(value); | 2606 value = AllocateHeapNumberWithValue(value); |
2509 } | 2607 } |
2510 return value; | 2608 return value; |
2511 | 2609 |
2512 } else { | 2610 } else { |
2513 Node* value = Load(MachineType::AnyTagged(), array, offset); | 2611 Node* value = Load(MachineType::AnyTagged(), array, offset); |
2514 if (if_hole) { | 2612 if (if_hole) { |
2515 GotoIf(WordEqual(value, TheHoleConstant()), if_hole); | 2613 GotoIf(WordEqual(value, TheHoleConstant()), if_hole); |
2516 } | 2614 } |
2517 if (IsFastDoubleElementsKind(to_kind)) { | 2615 if (IsFastDoubleElementsKind(to_kind)) { |
2518 if (IsFastSmiElementsKind(from_kind)) { | 2616 if (IsFastSmiElementsKind(from_kind)) { |
2519 value = SmiToFloat64(value); | 2617 value = SmiToFloat64(value); |
2520 } else { | 2618 } else { |
2521 value = LoadHeapNumberValue(value); | 2619 value = LoadHeapNumberValue(value); |
2522 } | 2620 } |
2523 } | 2621 } |
2524 return value; | 2622 return value; |
2525 } | 2623 } |
2526 } | 2624 } |
2527 | 2625 |
2528 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity, | 2626 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity, |
2529 ParameterMode mode) { | 2627 ParameterMode mode) { |
2628 CSA_SLOW_ASSERT(this, IsParameterMode(old_capacity, mode)); | |
2530 Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode); | 2629 Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode); |
2531 Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode); | 2630 Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode); |
2532 Node* padding = IntPtrOrSmiConstant(16, mode); | 2631 Node* padding = IntPtrOrSmiConstant(16, mode); |
2533 return IntPtrOrSmiAdd(new_capacity, padding, mode); | 2632 return IntPtrOrSmiAdd(new_capacity, padding, mode); |
2534 } | 2633 } |
2535 | 2634 |
2536 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, | 2635 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, |
2537 ElementsKind kind, Node* key, | 2636 ElementsKind kind, Node* key, |
2538 Label* bailout) { | 2637 Label* bailout) { |
2638 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
2639 CSA_SLOW_ASSERT(this, IsFixedArray(elements)); | |
2640 CSA_SLOW_ASSERT(this, TaggedIsSmi(key)); | |
2539 Node* capacity = LoadFixedArrayBaseLength(elements); | 2641 Node* capacity = LoadFixedArrayBaseLength(elements); |
2540 | 2642 |
2541 ParameterMode mode = OptimalParameterMode(); | 2643 ParameterMode mode = OptimalParameterMode(); |
2542 capacity = TaggedToParameter(capacity, mode); | 2644 capacity = TaggedToParameter(capacity, mode); |
2543 key = TaggedToParameter(key, mode); | 2645 key = TaggedToParameter(key, mode); |
2544 | 2646 |
2545 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode, | 2647 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode, |
2546 bailout); | 2648 bailout); |
2547 } | 2649 } |
2548 | 2650 |
2549 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, | 2651 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements, |
2550 ElementsKind kind, Node* key, | 2652 ElementsKind kind, Node* key, |
2551 Node* capacity, | 2653 Node* capacity, |
2552 ParameterMode mode, | 2654 ParameterMode mode, |
2553 Label* bailout) { | 2655 Label* bailout) { |
2554 Comment("TryGrowElementsCapacity"); | 2656 Comment("TryGrowElementsCapacity"); |
2657 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
2658 CSA_SLOW_ASSERT(this, IsFixedArray(elements)); | |
2659 CSA_SLOW_ASSERT(this, IsParameterMode(capacity, mode)); | |
2660 CSA_SLOW_ASSERT(this, IsParameterMode(key, mode)); | |
2555 | 2661 |
2556 // If the gap growth is too big, fall back to the runtime. | 2662 // If the gap growth is too big, fall back to the runtime. |
2557 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode); | 2663 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode); |
2558 Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode); | 2664 Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode); |
2559 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout); | 2665 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout); |
2560 | 2666 |
2561 // Calculate the capacity of the new backing store. | 2667 // Calculate the capacity of the new backing store. |
2562 Node* new_capacity = CalculateNewElementsCapacity( | 2668 Node* new_capacity = CalculateNewElementsCapacity( |
2563 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode); | 2669 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode); |
2564 return GrowElementsCapacity(object, elements, kind, kind, capacity, | 2670 return GrowElementsCapacity(object, elements, kind, kind, capacity, |
2565 new_capacity, mode, bailout); | 2671 new_capacity, mode, bailout); |
2566 } | 2672 } |
2567 | 2673 |
2568 Node* CodeStubAssembler::GrowElementsCapacity( | 2674 Node* CodeStubAssembler::GrowElementsCapacity( |
2569 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind, | 2675 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind, |
2570 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) { | 2676 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) { |
2571 Comment("[ GrowElementsCapacity"); | 2677 Comment("[ GrowElementsCapacity"); |
2678 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
2679 CSA_SLOW_ASSERT(this, IsFixedArray(elements)); | |
2680 CSA_SLOW_ASSERT(this, IsParameterMode(capacity, mode)); | |
2681 CSA_SLOW_ASSERT(this, IsParameterMode(new_capacity, mode)); | |
2682 | |
2572 // If size of the allocation for the new capacity doesn't fit in a page | 2683 // If size of the allocation for the new capacity doesn't fit in a page |
2573 // that we can bump-pointer allocate from, fall back to the runtime. | 2684 // that we can bump-pointer allocate from, fall back to the runtime. |
2574 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind); | 2685 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind); |
2575 GotoIf(UintPtrOrSmiGreaterThanOrEqual( | 2686 GotoIf(UintPtrOrSmiGreaterThanOrEqual( |
2576 new_capacity, IntPtrOrSmiConstant(max_size, mode), mode), | 2687 new_capacity, IntPtrOrSmiConstant(max_size, mode), mode), |
2577 bailout); | 2688 bailout); |
2578 | 2689 |
2579 // Allocate the new backing store. | 2690 // Allocate the new backing store. |
2580 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode); | 2691 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode); |
2581 | 2692 |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2770 } | 2881 } |
2771 } | 2882 } |
2772 } | 2883 } |
2773 BIND(&if_valueisheapnumber); | 2884 BIND(&if_valueisheapnumber); |
2774 { | 2885 { |
2775 Node* result = AllocateHeapNumberWithValue(value); | 2886 Node* result = AllocateHeapNumberWithValue(value); |
2776 var_result.Bind(result); | 2887 var_result.Bind(result); |
2777 Goto(&if_join); | 2888 Goto(&if_join); |
2778 } | 2889 } |
2779 BIND(&if_join); | 2890 BIND(&if_join); |
2891 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
2780 return var_result.value(); | 2892 return var_result.value(); |
2781 } | 2893 } |
2782 | 2894 |
2783 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) { | 2895 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) { |
2784 if (Is64()) { | 2896 if (Is64()) { |
2785 return SmiTag(ChangeInt32ToInt64(value)); | 2897 return SmiTag(ChangeInt32ToInt64(value)); |
2786 } | 2898 } |
2787 VARIABLE(var_result, MachineRepresentation::kTagged); | 2899 VARIABLE(var_result, MachineRepresentation::kTagged); |
2788 Node* pair = Int32AddWithOverflow(value, value); | 2900 Node* pair = Int32AddWithOverflow(value, value); |
2789 Node* overflow = Projection(1, pair); | 2901 Node* overflow = Projection(1, pair); |
2790 Label if_overflow(this, Label::kDeferred), if_notoverflow(this), | 2902 Label if_overflow(this, Label::kDeferred), if_notoverflow(this), |
2791 if_join(this); | 2903 if_join(this); |
2792 Branch(overflow, &if_overflow, &if_notoverflow); | 2904 Branch(overflow, &if_overflow, &if_notoverflow); |
2793 BIND(&if_overflow); | 2905 BIND(&if_overflow); |
2794 { | 2906 { |
2795 Node* value64 = ChangeInt32ToFloat64(value); | 2907 Node* value64 = ChangeInt32ToFloat64(value); |
2796 Node* result = AllocateHeapNumberWithValue(value64); | 2908 Node* result = AllocateHeapNumberWithValue(value64); |
2797 var_result.Bind(result); | 2909 var_result.Bind(result); |
2798 } | 2910 } |
2799 Goto(&if_join); | 2911 Goto(&if_join); |
2800 BIND(&if_notoverflow); | 2912 BIND(&if_notoverflow); |
2801 { | 2913 { |
2802 Node* result = BitcastWordToTaggedSigned(Projection(0, pair)); | 2914 Node* result = BitcastWordToTaggedSigned(Projection(0, pair)); |
2803 var_result.Bind(result); | 2915 var_result.Bind(result); |
2804 } | 2916 } |
2805 Goto(&if_join); | 2917 Goto(&if_join); |
2806 BIND(&if_join); | 2918 BIND(&if_join); |
2919 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
2807 return var_result.value(); | 2920 return var_result.value(); |
2808 } | 2921 } |
2809 | 2922 |
2810 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) { | 2923 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) { |
2811 Label if_overflow(this, Label::kDeferred), if_not_overflow(this), | 2924 Label if_overflow(this, Label::kDeferred), if_not_overflow(this), |
2812 if_join(this); | 2925 if_join(this); |
2813 VARIABLE(var_result, MachineRepresentation::kTagged); | 2926 VARIABLE(var_result, MachineRepresentation::kTagged); |
2814 // If {value} > 2^31 - 1, we need to store it in a HeapNumber. | 2927 // If {value} > 2^31 - 1, we need to store it in a HeapNumber. |
2815 Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow, | 2928 Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow, |
2816 &if_not_overflow); | 2929 &if_not_overflow); |
(...skipping 16 matching lines...) Expand all Loading... | |
2833 Goto(&if_join); | 2946 Goto(&if_join); |
2834 | 2947 |
2835 BIND(&if_overflow); | 2948 BIND(&if_overflow); |
2836 { | 2949 { |
2837 Node* float64_value = ChangeUint32ToFloat64(value); | 2950 Node* float64_value = ChangeUint32ToFloat64(value); |
2838 var_result.Bind(AllocateHeapNumberWithValue(float64_value)); | 2951 var_result.Bind(AllocateHeapNumberWithValue(float64_value)); |
2839 } | 2952 } |
2840 Goto(&if_join); | 2953 Goto(&if_join); |
2841 | 2954 |
2842 BIND(&if_join); | 2955 BIND(&if_join); |
2956 CSA_SLOW_ASSERT(this, IsNumber(var_result.value())); | |
2843 return var_result.value(); | 2957 return var_result.value(); |
2844 } | 2958 } |
2845 | 2959 |
2846 Node* CodeStubAssembler::ToThisString(Node* context, Node* value, | 2960 Node* CodeStubAssembler::ToThisString(Node* context, Node* value, |
2847 char const* method_name) { | 2961 char const* method_name) { |
2848 VARIABLE(var_value, MachineRepresentation::kTagged, value); | 2962 VARIABLE(var_value, MachineRepresentation::kTagged, value); |
2849 | 2963 |
2850 // Check if the {value} is a Smi or a HeapObject. | 2964 // Check if the {value} is a Smi or a HeapObject. |
2851 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this), | 2965 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this), |
2852 if_valueisstring(this); | 2966 if_valueisstring(this); |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2897 // The {value} is a Smi, convert it to a String. | 3011 // The {value} is a Smi, convert it to a String. |
2898 Callable callable = CodeFactory::NumberToString(isolate()); | 3012 Callable callable = CodeFactory::NumberToString(isolate()); |
2899 var_value.Bind(CallStub(callable, context, value)); | 3013 var_value.Bind(CallStub(callable, context, value)); |
2900 Goto(&if_valueisstring); | 3014 Goto(&if_valueisstring); |
2901 } | 3015 } |
2902 BIND(&if_valueisstring); | 3016 BIND(&if_valueisstring); |
2903 return var_value.value(); | 3017 return var_value.value(); |
2904 } | 3018 } |
2905 | 3019 |
2906 Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) { | 3020 Node* CodeStubAssembler::ChangeNumberToFloat64(Node* value) { |
3021 CSA_SLOW_ASSERT(this, IsNumber(value)); | |
2907 VARIABLE(result, MachineRepresentation::kFloat64); | 3022 VARIABLE(result, MachineRepresentation::kFloat64); |
2908 Label smi(this); | 3023 Label smi(this); |
2909 Label done(this, &result); | 3024 Label done(this, &result); |
2910 GotoIf(TaggedIsSmi(value), &smi); | 3025 GotoIf(TaggedIsSmi(value), &smi); |
2911 result.Bind( | 3026 result.Bind( |
2912 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64())); | 3027 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64())); |
2913 Goto(&done); | 3028 Goto(&done); |
2914 | 3029 |
2915 BIND(&smi); | 3030 BIND(&smi); |
2916 { | 3031 { |
2917 result.Bind(SmiToFloat64(value)); | 3032 result.Bind(SmiToFloat64(value)); |
2918 Goto(&done); | 3033 Goto(&done); |
2919 } | 3034 } |
2920 | 3035 |
2921 BIND(&done); | 3036 BIND(&done); |
2922 return result.value(); | 3037 return result.value(); |
2923 } | 3038 } |
2924 | 3039 |
2925 Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) { | 3040 Node* CodeStubAssembler::ChangeNumberToIntPtr(Node* value) { |
3041 CSA_SLOW_ASSERT(this, IsNumber(value)); | |
2926 VARIABLE(result, MachineType::PointerRepresentation()); | 3042 VARIABLE(result, MachineType::PointerRepresentation()); |
2927 Label smi(this), done(this, &result); | 3043 Label smi(this), done(this, &result); |
2928 GotoIf(TaggedIsSmi(value), &smi); | 3044 GotoIf(TaggedIsSmi(value), &smi); |
2929 | 3045 |
2930 CSA_ASSERT(this, IsHeapNumber(value)); | 3046 CSA_ASSERT(this, IsHeapNumber(value)); |
2931 result.Bind(ChangeFloat64ToUintPtr(LoadHeapNumberValue(value))); | 3047 result.Bind(ChangeFloat64ToUintPtr(LoadHeapNumberValue(value))); |
2932 Goto(&done); | 3048 Goto(&done); |
2933 | 3049 |
2934 BIND(&smi); | 3050 BIND(&smi); |
2935 result.Bind(SmiToWord(value)); | 3051 result.Bind(SmiToWord(value)); |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3053 | 3169 |
3054 BIND(&out); | 3170 BIND(&out); |
3055 return var_value_map.value(); | 3171 return var_value_map.value(); |
3056 } | 3172 } |
3057 | 3173 |
3058 Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) { | 3174 Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) { |
3059 return Word32Equal(instance_type, Int32Constant(type)); | 3175 return Word32Equal(instance_type, Int32Constant(type)); |
3060 } | 3176 } |
3061 | 3177 |
3062 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) { | 3178 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) { |
3179 CSA_SLOW_ASSERT(this, IsMap(map)); | |
3063 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map)); | 3180 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map)); |
3064 uint32_t mask = | 3181 uint32_t mask = |
3065 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded; | 3182 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded; |
3066 USE(mask); | 3183 USE(mask); |
3067 // Interceptors or access checks imply special receiver. | 3184 // Interceptors or access checks imply special receiver. |
3068 CSA_ASSERT(this, | 3185 CSA_ASSERT(this, |
3069 SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special, | 3186 SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special, |
3070 Int32Constant(1), MachineRepresentation::kWord32)); | 3187 Int32Constant(1), MachineRepresentation::kWord32)); |
3071 return is_special; | 3188 return is_special; |
3072 } | 3189 } |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3197 } | 3314 } |
3198 | 3315 |
3199 Node* CodeStubAssembler::IsJSArray(Node* object) { | 3316 Node* CodeStubAssembler::IsJSArray(Node* object) { |
3200 return IsJSArrayMap(LoadMap(object)); | 3317 return IsJSArrayMap(LoadMap(object)); |
3201 } | 3318 } |
3202 | 3319 |
3203 Node* CodeStubAssembler::IsJSArrayMap(Node* map) { | 3320 Node* CodeStubAssembler::IsJSArrayMap(Node* map) { |
3204 return IsJSArrayInstanceType(LoadMapInstanceType(map)); | 3321 return IsJSArrayInstanceType(LoadMapInstanceType(map)); |
3205 } | 3322 } |
3206 | 3323 |
3324 Node* CodeStubAssembler::IsFixedArray(Node* object) { | |
3325 return HasInstanceType(object, FIXED_ARRAY_TYPE); | |
3326 } | |
3327 | |
3207 Node* CodeStubAssembler::IsWeakCell(Node* object) { | 3328 Node* CodeStubAssembler::IsWeakCell(Node* object) { |
3208 return IsWeakCellMap(LoadMap(object)); | 3329 return IsWeakCellMap(LoadMap(object)); |
3209 } | 3330 } |
3210 | 3331 |
3211 Node* CodeStubAssembler::IsBoolean(Node* object) { | 3332 Node* CodeStubAssembler::IsBoolean(Node* object) { |
3212 return IsBooleanMap(LoadMap(object)); | 3333 return IsBooleanMap(LoadMap(object)); |
3213 } | 3334 } |
3214 | 3335 |
3215 Node* CodeStubAssembler::IsPropertyCell(Node* object) { | 3336 Node* CodeStubAssembler::IsPropertyCell(Node* object) { |
3216 return IsPropertyCellMap(LoadMap(object)); | 3337 return IsPropertyCellMap(LoadMap(object)); |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3332 | 3453 |
3333 var_result.Bind(Int32Constant(0)); | 3454 var_result.Bind(Int32Constant(0)); |
3334 Goto(&out); | 3455 Goto(&out); |
3335 | 3456 |
3336 BIND(&out); | 3457 BIND(&out); |
3337 return var_result.value(); | 3458 return var_result.value(); |
3338 } | 3459 } |
3339 | 3460 |
3340 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index, | 3461 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index, |
3341 ParameterMode parameter_mode) { | 3462 ParameterMode parameter_mode) { |
3342 if (parameter_mode == SMI_PARAMETERS) CSA_ASSERT(this, TaggedIsSmi(index)); | 3463 CSA_ASSERT(this, IsParameterMode(index, parameter_mode)); |
3343 CSA_ASSERT(this, IsString(string)); | 3464 CSA_ASSERT(this, IsString(string)); |
3344 | 3465 |
3345 // Translate the {index} into a Word. | 3466 // Translate the {index} into a Word. |
3346 Node* const int_index = ParameterToWord(index, parameter_mode); | 3467 Node* const int_index = ParameterToWord(index, parameter_mode); |
3347 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(int_index, IntPtrConstant(0))); | 3468 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(int_index, IntPtrConstant(0))); |
3348 | 3469 |
3349 VARIABLE(var_result, MachineRepresentation::kWord32); | 3470 VARIABLE(var_result, MachineRepresentation::kWord32); |
3350 | 3471 |
3351 Label out(this, &var_result), runtime_generic(this), runtime_external(this); | 3472 Label out(this, &var_result), runtime_generic(this), runtime_external(this); |
3352 | 3473 |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3444 // Allocate a new SeqTwoByteString for {code}. | 3565 // Allocate a new SeqTwoByteString for {code}. |
3445 Node* result = AllocateSeqTwoByteString(1); | 3566 Node* result = AllocateSeqTwoByteString(1); |
3446 StoreNoWriteBarrier( | 3567 StoreNoWriteBarrier( |
3447 MachineRepresentation::kWord16, result, | 3568 MachineRepresentation::kWord16, result, |
3448 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code); | 3569 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code); |
3449 var_result.Bind(result); | 3570 var_result.Bind(result); |
3450 Goto(&if_done); | 3571 Goto(&if_done); |
3451 } | 3572 } |
3452 | 3573 |
3453 BIND(&if_done); | 3574 BIND(&if_done); |
3575 CSA_ASSERT(this, IsString(var_result.value())); | |
3454 return var_result.value(); | 3576 return var_result.value(); |
3455 } | 3577 } |
3456 | 3578 |
3457 namespace { | 3579 namespace { |
3458 | 3580 |
3459 // A wrapper around CopyStringCharacters which determines the correct string | 3581 // A wrapper around CopyStringCharacters which determines the correct string |
3460 // encoding, allocates a corresponding sequential string, and then copies the | 3582 // encoding, allocates a corresponding sequential string, and then copies the |
3461 // given character range using CopyStringCharacters. | 3583 // given character range using CopyStringCharacters. |
3462 // |from_string| must be a sequential string. |from_index| and | 3584 // |from_string| must be a sequential string. |from_index| and |
3463 // |character_count| must be Smis s.t. | 3585 // |character_count| must be Smis s.t. |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3641 | 3763 |
3642 // Fall back to a runtime call. | 3764 // Fall back to a runtime call. |
3643 BIND(&runtime); | 3765 BIND(&runtime); |
3644 { | 3766 { |
3645 var_result.Bind( | 3767 var_result.Bind( |
3646 CallRuntime(Runtime::kSubString, context, string, from, to)); | 3768 CallRuntime(Runtime::kSubString, context, string, from, to)); |
3647 Goto(&end); | 3769 Goto(&end); |
3648 } | 3770 } |
3649 | 3771 |
3650 BIND(&end); | 3772 BIND(&end); |
3773 CSA_ASSERT(this, IsString(var_result.value())); | |
3651 return var_result.value(); | 3774 return var_result.value(); |
3652 } | 3775 } |
3653 | 3776 |
3654 ToDirectStringAssembler::ToDirectStringAssembler( | 3777 ToDirectStringAssembler::ToDirectStringAssembler( |
3655 compiler::CodeAssemblerState* state, Node* string) | 3778 compiler::CodeAssemblerState* state, Node* string) |
3656 : CodeStubAssembler(state), | 3779 : CodeStubAssembler(state), |
3657 var_string_(this, MachineRepresentation::kTagged, string), | 3780 var_string_(this, MachineRepresentation::kTagged, string), |
3658 var_instance_type_(this, MachineRepresentation::kWord32), | 3781 var_instance_type_(this, MachineRepresentation::kWord32), |
3659 var_offset_(this, MachineType::PointerRepresentation()), | 3782 var_offset_(this, MachineType::PointerRepresentation()), |
3660 var_is_external_(this, MachineRepresentation::kWord32) { | 3783 var_is_external_(this, MachineRepresentation::kWord32) { |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3783 kHeapObjectTag)); | 3906 kHeapObjectTag)); |
3784 } | 3907 } |
3785 var_result.Bind(result); | 3908 var_result.Bind(result); |
3786 Goto(&out); | 3909 Goto(&out); |
3787 } | 3910 } |
3788 | 3911 |
3789 BIND(&out); | 3912 BIND(&out); |
3790 return var_result.value(); | 3913 return var_result.value(); |
3791 } | 3914 } |
3792 | 3915 |
3793 Node* CodeStubAssembler::TryDerefExternalString(Node* const string, | |
3794 Node* const instance_type, | |
3795 Label* if_bailout) { | |
3796 Label out(this); | |
3797 | |
3798 CSA_ASSERT(this, IsExternalStringInstanceType(instance_type)); | |
3799 GotoIf(IsShortExternalStringInstanceType(instance_type), if_bailout); | |
3800 | |
3801 // Move the pointer so that offset-wise, it looks like a sequential string. | |
3802 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); | |
3803 | |
3804 Node* resource_data = LoadObjectField( | |
3805 string, ExternalString::kResourceDataOffset, MachineType::Pointer()); | |
3806 Node* const fake_sequential_string = | |
3807 IntPtrSub(resource_data, | |
3808 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | |
3809 | |
3810 return fake_sequential_string; | |
3811 } | |
3812 | |
3813 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string, | 3916 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string, |
3814 Node* instance_type, | 3917 Node* instance_type, |
3815 Variable* var_did_something) { | 3918 Variable* var_did_something) { |
3816 Label deref(this), done(this, var_did_something); | 3919 Label deref(this), done(this, var_did_something); |
3817 Node* representation = | 3920 Node* representation = |
3818 Word32And(instance_type, Int32Constant(kStringRepresentationMask)); | 3921 Word32And(instance_type, Int32Constant(kStringRepresentationMask)); |
3819 GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), &deref); | 3922 GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), &deref); |
3820 GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), &done); | 3923 GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), &done); |
3821 // Cons string. | 3924 // Cons string. |
3822 Node* rhs = LoadObjectField(var_string->value(), ConsString::kSecondOffset); | 3925 Node* rhs = LoadObjectField(var_string->value(), ConsString::kSecondOffset); |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4008 Node* value = AllocateSeqTwoByteString(2); | 4111 Node* value = AllocateSeqTwoByteString(2); |
4009 StoreNoWriteBarrier( | 4112 StoreNoWriteBarrier( |
4010 MachineRepresentation::kWord32, value, | 4113 MachineRepresentation::kWord32, value, |
4011 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), | 4114 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), |
4012 codepoint); | 4115 codepoint); |
4013 var_result.Bind(value); | 4116 var_result.Bind(value); |
4014 Goto(&return_result); | 4117 Goto(&return_result); |
4015 } | 4118 } |
4016 | 4119 |
4017 BIND(&return_result); | 4120 BIND(&return_result); |
4121 CSA_ASSERT(this, IsString(var_result.value())); | |
4018 return var_result.value(); | 4122 return var_result.value(); |
4019 } | 4123 } |
4020 | 4124 |
4021 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) { | 4125 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) { |
4126 CSA_SLOW_ASSERT(this, IsString(input)); | |
4022 Label runtime(this, Label::kDeferred); | 4127 Label runtime(this, Label::kDeferred); |
4023 Label end(this); | 4128 Label end(this); |
4024 | 4129 |
4025 VARIABLE(var_result, MachineRepresentation::kTagged); | 4130 VARIABLE(var_result, MachineRepresentation::kTagged); |
4026 | 4131 |
4027 // Check if string has a cached array index. | 4132 // Check if string has a cached array index. |
4028 Node* hash = LoadNameHashField(input); | 4133 Node* hash = LoadNameHashField(input); |
4029 Node* bit = | 4134 Node* bit = |
4030 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask)); | 4135 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask)); |
4031 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime); | 4136 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime); |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4111 GotoIf(WordNotEqual(smi_key, argument), &runtime); | 4216 GotoIf(WordNotEqual(smi_key, argument), &runtime); |
4112 | 4217 |
4113 // Smi match, return value from cache entry. | 4218 // Smi match, return value from cache entry. |
4114 IncrementCounter(isolate()->counters()->number_to_string_native(), 1); | 4219 IncrementCounter(isolate()->counters()->number_to_string_native(), 1); |
4115 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index, | 4220 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index, |
4116 kPointerSize, SMI_PARAMETERS)); | 4221 kPointerSize, SMI_PARAMETERS)); |
4117 Goto(&done); | 4222 Goto(&done); |
4118 } | 4223 } |
4119 | 4224 |
4120 BIND(&done); | 4225 BIND(&done); |
4226 CSA_ASSERT(this, IsString(result.value())); | |
4121 return result.value(); | 4227 return result.value(); |
4122 } | 4228 } |
4123 | 4229 |
4124 Node* CodeStubAssembler::ToName(Node* context, Node* value) { | 4230 Node* CodeStubAssembler::ToName(Node* context, Node* value) { |
4125 Label end(this); | 4231 Label end(this); |
4126 VARIABLE(var_result, MachineRepresentation::kTagged); | 4232 VARIABLE(var_result, MachineRepresentation::kTagged); |
4127 | 4233 |
4128 Label is_number(this); | 4234 Label is_number(this); |
4129 GotoIf(TaggedIsSmi(value), &is_number); | 4235 GotoIf(TaggedIsSmi(value), &is_number); |
4130 | 4236 |
(...skipping 26 matching lines...) Expand all Loading... | |
4157 Goto(&end); | 4263 Goto(&end); |
4158 | 4264 |
4159 BIND(¬_oddball); | 4265 BIND(¬_oddball); |
4160 { | 4266 { |
4161 var_result.Bind(CallRuntime(Runtime::kToName, context, value)); | 4267 var_result.Bind(CallRuntime(Runtime::kToName, context, value)); |
4162 Goto(&end); | 4268 Goto(&end); |
4163 } | 4269 } |
4164 } | 4270 } |
4165 | 4271 |
4166 BIND(&end); | 4272 BIND(&end); |
4273 CSA_ASSERT(this, IsName(var_result.value())); | |
4167 return var_result.value(); | 4274 return var_result.value(); |
4168 } | 4275 } |
4169 | 4276 |
4170 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) { | 4277 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) { |
4171 // Assert input is a HeapObject (not smi or heap number) | 4278 // Assert input is a HeapObject (not smi or heap number) |
4172 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input))); | 4279 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input))); |
4173 CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input)))); | 4280 CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input)))); |
4174 | 4281 |
4175 // We might need to loop once here due to ToPrimitive conversions. | 4282 // We might need to loop once here due to ToPrimitive conversions. |
4176 VARIABLE(var_input, MachineRepresentation::kTagged, input); | 4283 VARIABLE(var_input, MachineRepresentation::kTagged, input); |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4245 // Note: We cannot tail call to the runtime here, as js-to-wasm | 4352 // Note: We cannot tail call to the runtime here, as js-to-wasm |
4246 // trampolines also use this code currently, and they declare all | 4353 // trampolines also use this code currently, and they declare all |
4247 // outgoing parameters as untagged, while we would push a tagged | 4354 // outgoing parameters as untagged, while we would push a tagged |
4248 // object here. | 4355 // object here. |
4249 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input)); | 4356 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input)); |
4250 Goto(&end); | 4357 Goto(&end); |
4251 } | 4358 } |
4252 } | 4359 } |
4253 | 4360 |
4254 BIND(&end); | 4361 BIND(&end); |
4362 CSA_ASSERT(this, IsNumber(var_result.value())); | |
4255 return var_result.value(); | 4363 return var_result.value(); |
4256 } | 4364 } |
4257 | 4365 |
4258 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) { | 4366 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) { |
4259 VARIABLE(var_result, MachineRepresentation::kTagged); | 4367 VARIABLE(var_result, MachineRepresentation::kTagged); |
4260 Label end(this); | 4368 Label end(this); |
4261 | 4369 |
4262 Label not_smi(this, Label::kDeferred); | 4370 Label not_smi(this, Label::kDeferred); |
4263 GotoIfNot(TaggedIsSmi(input), ¬_smi); | 4371 GotoIfNot(TaggedIsSmi(input), ¬_smi); |
4264 var_result.Bind(input); | 4372 var_result.Bind(input); |
4265 Goto(&end); | 4373 Goto(&end); |
4266 | 4374 |
4267 BIND(¬_smi); | 4375 BIND(¬_smi); |
4268 { | 4376 { |
4269 Label not_heap_number(this, Label::kDeferred); | 4377 Label not_heap_number(this, Label::kDeferred); |
4270 Node* input_map = LoadMap(input); | 4378 Node* input_map = LoadMap(input); |
4271 GotoIfNot(IsHeapNumberMap(input_map), ¬_heap_number); | 4379 GotoIfNot(IsHeapNumberMap(input_map), ¬_heap_number); |
4272 | 4380 |
4273 var_result.Bind(input); | 4381 var_result.Bind(input); |
4274 Goto(&end); | 4382 Goto(&end); |
4275 | 4383 |
4276 BIND(¬_heap_number); | 4384 BIND(¬_heap_number); |
4277 { | 4385 { |
4278 var_result.Bind(NonNumberToNumber(context, input)); | 4386 var_result.Bind(NonNumberToNumber(context, input)); |
4279 Goto(&end); | 4387 Goto(&end); |
4280 } | 4388 } |
4281 } | 4389 } |
4282 | 4390 |
4283 BIND(&end); | 4391 BIND(&end); |
4392 CSA_ASSERT(this, IsNumber(var_result.value())); | |
4284 return var_result.value(); | 4393 return var_result.value(); |
4285 } | 4394 } |
4286 | 4395 |
4287 // ES#sec-touint32 | 4396 // ES#sec-touint32 |
4288 Node* CodeStubAssembler::ToUint32(Node* context, Node* input) { | 4397 Node* CodeStubAssembler::ToUint32(Node* context, Node* input) { |
4289 Node* const float_zero = Float64Constant(0.0); | 4398 Node* const float_zero = Float64Constant(0.0); |
4290 Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32)); | 4399 Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32)); |
4291 | 4400 |
4292 Label out(this); | 4401 Label out(this); |
4293 | 4402 |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4376 } | 4485 } |
4377 | 4486 |
4378 BIND(&return_zero); | 4487 BIND(&return_zero); |
4379 { | 4488 { |
4380 var_result.Bind(SmiConstant(Smi::kZero)); | 4489 var_result.Bind(SmiConstant(Smi::kZero)); |
4381 Goto(&out); | 4490 Goto(&out); |
4382 } | 4491 } |
4383 } | 4492 } |
4384 | 4493 |
4385 BIND(&out); | 4494 BIND(&out); |
4495 CSA_ASSERT(this, IsNumber(var_result.value())); | |
4386 return var_result.value(); | 4496 return var_result.value(); |
4387 } | 4497 } |
4388 | 4498 |
4389 Node* CodeStubAssembler::ToString(Node* context, Node* input) { | 4499 Node* CodeStubAssembler::ToString(Node* context, Node* input) { |
4390 Label is_number(this); | 4500 Label is_number(this); |
4391 Label runtime(this, Label::kDeferred); | 4501 Label runtime(this, Label::kDeferred); |
4392 VARIABLE(result, MachineRepresentation::kTagged); | 4502 VARIABLE(result, MachineRepresentation::kTagged); |
4393 Label done(this, &result); | 4503 Label done(this, &result); |
4394 | 4504 |
4395 GotoIf(TaggedIsSmi(input), &is_number); | 4505 GotoIf(TaggedIsSmi(input), &is_number); |
(...skipping 19 matching lines...) Expand all Loading... | |
4415 Goto(&done); | 4525 Goto(&done); |
4416 } | 4526 } |
4417 | 4527 |
4418 BIND(&runtime); | 4528 BIND(&runtime); |
4419 { | 4529 { |
4420 result.Bind(CallRuntime(Runtime::kToString, context, input)); | 4530 result.Bind(CallRuntime(Runtime::kToString, context, input)); |
4421 Goto(&done); | 4531 Goto(&done); |
4422 } | 4532 } |
4423 | 4533 |
4424 BIND(&done); | 4534 BIND(&done); |
4535 CSA_ASSERT(this, IsString(result.value())); | |
4425 return result.value(); | 4536 return result.value(); |
4426 } | 4537 } |
4427 | 4538 |
4428 Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) { | 4539 Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) { |
4429 Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this); | 4540 Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this); |
4430 VARIABLE(result, MachineRepresentation::kTagged); | 4541 VARIABLE(result, MachineRepresentation::kTagged); |
4431 Label done(this, &result); | 4542 Label done(this, &result); |
4432 | 4543 |
4433 BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver); | 4544 BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver); |
4434 | 4545 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4468 Goto(&negative_check); | 4579 Goto(&negative_check); |
4469 | 4580 |
4470 BIND(&negative_check); | 4581 BIND(&negative_check); |
4471 Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done); | 4582 Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done); |
4472 | 4583 |
4473 BIND(&return_zero); | 4584 BIND(&return_zero); |
4474 result.Bind(SmiConstant(0)); | 4585 result.Bind(SmiConstant(0)); |
4475 Goto(&done); | 4586 Goto(&done); |
4476 | 4587 |
4477 BIND(&done); | 4588 BIND(&done); |
4589 CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value())); | |
4478 return result.value(); | 4590 return result.value(); |
4479 } | 4591 } |
4480 | 4592 |
4481 Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context, | 4593 Node* CodeStubAssembler::ToSmiLength(Node* input, Node* const context, |
4482 Label* range_error) { | 4594 Label* range_error) { |
4483 VARIABLE(result, MachineRepresentation::kTagged, input); | 4595 VARIABLE(result, MachineRepresentation::kTagged, input); |
4484 Label to_integer(this), negative_check(this), return_zero(this), done(this); | 4596 Label to_integer(this), negative_check(this), return_zero(this), done(this); |
4485 Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer); | 4597 Branch(TaggedIsSmi(result.value()), &negative_check, &to_integer); |
4486 | 4598 |
4487 BIND(&to_integer); | 4599 BIND(&to_integer); |
4488 result.Bind(ToInteger(context, result.value(), | 4600 result.Bind(ToInteger(context, result.value(), |
4489 CodeStubAssembler::kTruncateMinusZero)); | 4601 CodeStubAssembler::kTruncateMinusZero)); |
4490 GotoIfNot(TaggedIsSmi(result.value()), range_error); | 4602 GotoIfNot(TaggedIsSmi(result.value()), range_error); |
4491 CSA_ASSERT(this, TaggedIsSmi(result.value())); | 4603 CSA_ASSERT(this, TaggedIsSmi(result.value())); |
4492 Goto(&negative_check); | 4604 Goto(&negative_check); |
4493 | 4605 |
4494 BIND(&negative_check); | 4606 BIND(&negative_check); |
4495 Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done); | 4607 Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done); |
4496 | 4608 |
4497 BIND(&return_zero); | 4609 BIND(&return_zero); |
4498 result.Bind(SmiConstant(0)); | 4610 result.Bind(SmiConstant(0)); |
4499 Goto(&done); | 4611 Goto(&done); |
4500 | 4612 |
4501 BIND(&done); | 4613 BIND(&done); |
4614 CSA_SLOW_ASSERT(this, TaggedIsSmi(result.value())); | |
4502 return result.value(); | 4615 return result.value(); |
4503 } | 4616 } |
4504 | 4617 |
4505 Node* CodeStubAssembler::ToInteger(Node* context, Node* input, | 4618 Node* CodeStubAssembler::ToInteger(Node* context, Node* input, |
4506 ToIntegerTruncationMode mode) { | 4619 ToIntegerTruncationMode mode) { |
4507 // We might need to loop once for ToNumber conversion. | 4620 // We might need to loop once for ToNumber conversion. |
4508 VARIABLE(var_arg, MachineRepresentation::kTagged, input); | 4621 VARIABLE(var_arg, MachineRepresentation::kTagged, input); |
4509 Label loop(this, &var_arg), out(this); | 4622 Label loop(this, &var_arg), out(this); |
4510 Goto(&loop); | 4623 Goto(&loop); |
4511 BIND(&loop); | 4624 BIND(&loop); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4552 var_arg.Bind(CallStub(callable, context, arg)); | 4665 var_arg.Bind(CallStub(callable, context, arg)); |
4553 Goto(&loop); | 4666 Goto(&loop); |
4554 } | 4667 } |
4555 | 4668 |
4556 BIND(&return_zero); | 4669 BIND(&return_zero); |
4557 var_arg.Bind(SmiConstant(Smi::kZero)); | 4670 var_arg.Bind(SmiConstant(Smi::kZero)); |
4558 Goto(&out); | 4671 Goto(&out); |
4559 } | 4672 } |
4560 | 4673 |
4561 BIND(&out); | 4674 BIND(&out); |
4675 CSA_SLOW_ASSERT(this, IsNumber(var_arg.value())); | |
4562 return var_arg.value(); | 4676 return var_arg.value(); |
4563 } | 4677 } |
4564 | 4678 |
4565 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift, | 4679 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift, |
4566 uint32_t mask) { | 4680 uint32_t mask) { |
4567 return Word32Shr(Word32And(word32, Int32Constant(mask)), | 4681 return Word32Shr(Word32And(word32, Int32Constant(mask)), |
4568 static_cast<int>(shift)); | 4682 static_cast<int>(shift)); |
4569 } | 4683 } |
4570 | 4684 |
4571 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) { | 4685 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) { |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4668 BIND(&if_hascachedindex); | 4782 BIND(&if_hascachedindex); |
4669 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash)); | 4783 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash)); |
4670 Goto(if_keyisindex); | 4784 Goto(if_keyisindex); |
4671 } | 4785 } |
4672 | 4786 |
4673 void CodeStubAssembler::TryInternalizeString( | 4787 void CodeStubAssembler::TryInternalizeString( |
4674 Node* string, Label* if_index, Variable* var_index, Label* if_internalized, | 4788 Node* string, Label* if_index, Variable* var_index, Label* if_internalized, |
4675 Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) { | 4789 Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) { |
4676 DCHECK(var_index->rep() == MachineType::PointerRepresentation()); | 4790 DCHECK(var_index->rep() == MachineType::PointerRepresentation()); |
4677 DCHECK(var_internalized->rep() == MachineRepresentation::kTagged); | 4791 DCHECK(var_internalized->rep() == MachineRepresentation::kTagged); |
4792 CSA_SLOW_ASSERT(this, IsString(string)); | |
4678 Node* function = ExternalConstant( | 4793 Node* function = ExternalConstant( |
4679 ExternalReference::try_internalize_string_function(isolate())); | 4794 ExternalReference::try_internalize_string_function(isolate())); |
4680 Node* result = CallCFunction1(MachineType::AnyTagged(), | 4795 Node* result = CallCFunction1(MachineType::AnyTagged(), |
4681 MachineType::AnyTagged(), function, string); | 4796 MachineType::AnyTagged(), function, string); |
4682 Label internalized(this); | 4797 Label internalized(this); |
4683 GotoIf(TaggedIsNotSmi(result), &internalized); | 4798 GotoIf(TaggedIsNotSmi(result), &internalized); |
4684 Node* word_result = SmiUntag(result); | 4799 Node* word_result = SmiUntag(result); |
4685 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)), | 4800 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)), |
4686 if_not_internalized); | 4801 if_not_internalized); |
4687 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)), | 4802 GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)), |
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4915 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value, | 5030 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value, |
4916 Node* index, Node* enum_index) { | 5031 Node* index, Node* enum_index) { |
4917 UNREACHABLE(); // Use specializations instead. | 5032 UNREACHABLE(); // Use specializations instead. |
4918 } | 5033 } |
4919 | 5034 |
4920 template <> | 5035 template <> |
4921 void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary, | 5036 void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary, |
4922 Node* name, Node* value, | 5037 Node* name, Node* value, |
4923 Node* index, | 5038 Node* index, |
4924 Node* enum_index) { | 5039 Node* enum_index) { |
5040 CSA_SLOW_ASSERT(this, IsDictionary(dictionary)); | |
5041 | |
4925 // Store name and value. | 5042 // Store name and value. |
4926 StoreFixedArrayElement(dictionary, index, name); | 5043 StoreFixedArrayElement(dictionary, index, name); |
4927 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value); | 5044 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value); |
4928 | 5045 |
4929 // Prepare details of the new property. | 5046 // Prepare details of the new property. |
4930 const int kInitialIndex = 0; | 5047 const int kInitialIndex = 0; |
4931 PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell); | 5048 PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell); |
4932 enum_index = | 5049 enum_index = |
4933 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift); | 5050 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift); |
4934 STATIC_ASSERT(kInitialIndex == 0); | 5051 STATIC_ASSERT(kInitialIndex == 0); |
(...skipping 21 matching lines...) Expand all Loading... | |
4956 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary, | 5073 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary, |
4957 Node* key, Node* value, | 5074 Node* key, Node* value, |
4958 Node* index, | 5075 Node* index, |
4959 Node* enum_index) { | 5076 Node* enum_index) { |
4960 UNIMPLEMENTED(); | 5077 UNIMPLEMENTED(); |
4961 } | 5078 } |
4962 | 5079 |
4963 template <class Dictionary> | 5080 template <class Dictionary> |
4964 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value, | 5081 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value, |
4965 Label* bailout) { | 5082 Label* bailout) { |
5083 CSA_SLOW_ASSERT(this, IsDictionary(dictionary)); | |
4966 Node* capacity = GetCapacity<Dictionary>(dictionary); | 5084 Node* capacity = GetCapacity<Dictionary>(dictionary); |
4967 Node* nof = GetNumberOfElements<Dictionary>(dictionary); | 5085 Node* nof = GetNumberOfElements<Dictionary>(dictionary); |
4968 Node* new_nof = SmiAdd(nof, SmiConstant(1)); | 5086 Node* new_nof = SmiAdd(nof, SmiConstant(1)); |
4969 // Require 33% to still be free after adding additional_elements. | 5087 // Require 33% to still be free after adding additional_elements. |
4970 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi! | 5088 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi! |
4971 // But that's OK here because it's only used for a comparison. | 5089 // But that's OK here because it's only used for a comparison. |
4972 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1)); | 5090 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1)); |
4973 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout); | 5091 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout); |
4974 // Require rehashing if more than 50% of free elements are deleted elements. | 5092 // Require rehashing if more than 50% of free elements are deleted elements. |
4975 Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary); | 5093 Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary); |
(...skipping 1617 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6593 // Store the WeakCell in the feedback vector. | 6711 // Store the WeakCell in the feedback vector. |
6594 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0, | 6712 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0, |
6595 CodeStubAssembler::SMI_PARAMETERS); | 6713 CodeStubAssembler::SMI_PARAMETERS); |
6596 return cell; | 6714 return cell; |
6597 } | 6715 } |
6598 | 6716 |
6599 Node* CodeStubAssembler::BuildFastLoop( | 6717 Node* CodeStubAssembler::BuildFastLoop( |
6600 const CodeStubAssembler::VariableList& vars, Node* start_index, | 6718 const CodeStubAssembler::VariableList& vars, Node* start_index, |
6601 Node* end_index, const FastLoopBody& body, int increment, | 6719 Node* end_index, const FastLoopBody& body, int increment, |
6602 ParameterMode parameter_mode, IndexAdvanceMode advance_mode) { | 6720 ParameterMode parameter_mode, IndexAdvanceMode advance_mode) { |
6721 CSA_SLOW_ASSERT(this, IsParameterMode(start_index, parameter_mode)); | |
6722 CSA_SLOW_ASSERT(this, IsParameterMode(end_index, parameter_mode)); | |
6603 MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS) | 6723 MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS) |
6604 ? MachineType::PointerRepresentation() | 6724 ? MachineType::PointerRepresentation() |
6605 : MachineRepresentation::kTaggedSigned; | 6725 : MachineRepresentation::kTaggedSigned; |
6606 VARIABLE(var, index_rep, start_index); | 6726 VARIABLE(var, index_rep, start_index); |
6607 VariableList vars_copy(vars, zone()); | 6727 VariableList vars_copy(vars, zone()); |
6608 vars_copy.Add(&var, zone()); | 6728 vars_copy.Add(&var, zone()); |
6609 Label loop(this, vars_copy); | 6729 Label loop(this, vars_copy); |
6610 Label after_loop(this); | 6730 Label after_loop(this); |
6611 // Introduce an explicit second check of the termination condition before the | 6731 // Introduce an explicit second check of the termination condition before the |
6612 // loop that helps turbofan generate better code. If there's only a single | 6732 // loop that helps turbofan generate better code. If there's only a single |
(...skipping 17 matching lines...) Expand all Loading... | |
6630 BIND(&after_loop); | 6750 BIND(&after_loop); |
6631 return var.value(); | 6751 return var.value(); |
6632 } | 6752 } |
6633 | 6753 |
6634 void CodeStubAssembler::BuildFastFixedArrayForEach( | 6754 void CodeStubAssembler::BuildFastFixedArrayForEach( |
6635 const CodeStubAssembler::VariableList& vars, Node* fixed_array, | 6755 const CodeStubAssembler::VariableList& vars, Node* fixed_array, |
6636 ElementsKind kind, Node* first_element_inclusive, | 6756 ElementsKind kind, Node* first_element_inclusive, |
6637 Node* last_element_exclusive, const FastFixedArrayForEachBody& body, | 6757 Node* last_element_exclusive, const FastFixedArrayForEachBody& body, |
6638 ParameterMode mode, ForEachDirection direction) { | 6758 ParameterMode mode, ForEachDirection direction) { |
6639 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); | 6759 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize); |
6760 CSA_SLOW_ASSERT(this, IsParameterMode(first_element_inclusive, mode)); | |
6761 CSA_SLOW_ASSERT(this, IsParameterMode(last_element_exclusive, mode)); | |
6762 CSA_SLOW_ASSERT(this, IsFixedArray(fixed_array)); | |
6640 int32_t first_val; | 6763 int32_t first_val; |
6641 bool constant_first = ToInt32Constant(first_element_inclusive, first_val); | 6764 bool constant_first = ToInt32Constant(first_element_inclusive, first_val); |
6642 int32_t last_val; | 6765 int32_t last_val; |
6643 bool constent_last = ToInt32Constant(last_element_exclusive, last_val); | 6766 bool constent_last = ToInt32Constant(last_element_exclusive, last_val); |
6644 if (constant_first && constent_last) { | 6767 if (constant_first && constent_last) { |
6645 int delta = last_val - first_val; | 6768 int delta = last_val - first_val; |
6646 DCHECK(delta >= 0); | 6769 DCHECK(delta >= 0); |
6647 if (delta <= kElementLoopUnrollThreshold) { | 6770 if (delta <= kElementLoopUnrollThreshold) { |
6648 if (direction == ForEachDirection::kForward) { | 6771 if (direction == ForEachDirection::kForward) { |
6649 for (int i = first_val; i < last_val; ++i) { | 6772 for (int i = first_val; i < last_val; ++i) { |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6690 (kMaxRegularHeapObjectSize - base_size) / kPointerSize; | 6813 (kMaxRegularHeapObjectSize - base_size) / kPointerSize; |
6691 GotoIf(IntPtrOrSmiGreaterThan( | 6814 GotoIf(IntPtrOrSmiGreaterThan( |
6692 element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode), | 6815 element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode), |
6693 mode), | 6816 mode), |
6694 doesnt_fit); | 6817 doesnt_fit); |
6695 } | 6818 } |
6696 | 6819 |
6697 void CodeStubAssembler::InitializeFieldsWithRoot( | 6820 void CodeStubAssembler::InitializeFieldsWithRoot( |
6698 Node* object, Node* start_offset, Node* end_offset, | 6821 Node* object, Node* start_offset, Node* end_offset, |
6699 Heap::RootListIndex root_index) { | 6822 Heap::RootListIndex root_index) { |
6823 CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object)); | |
6700 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag)); | 6824 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag)); |
6701 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag)); | 6825 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag)); |
6702 Node* root_value = LoadRoot(root_index); | 6826 Node* root_value = LoadRoot(root_index); |
6703 BuildFastLoop(end_offset, start_offset, | 6827 BuildFastLoop(end_offset, start_offset, |
6704 [this, object, root_value](Node* current) { | 6828 [this, object, root_value](Node* current) { |
6705 StoreNoWriteBarrier(MachineRepresentation::kTagged, object, | 6829 StoreNoWriteBarrier(MachineRepresentation::kTagged, object, |
6706 current, root_value); | 6830 current, root_value); |
6707 }, | 6831 }, |
6708 -kPointerSize, INTPTR_PARAMETERS, | 6832 -kPointerSize, INTPTR_PARAMETERS, |
6709 CodeStubAssembler::IndexAdvanceMode::kPre); | 6833 CodeStubAssembler::IndexAdvanceMode::kPre); |
6710 } | 6834 } |
6711 | 6835 |
6712 void CodeStubAssembler::BranchIfNumericRelationalComparison( | 6836 void CodeStubAssembler::BranchIfNumericRelationalComparison( |
6713 RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true, | 6837 RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true, |
6714 Label* if_false) { | 6838 Label* if_false) { |
6839 CSA_SLOW_ASSERT(this, IsNumber(lhs)); | |
6840 CSA_SLOW_ASSERT(this, IsNumber(rhs)); | |
6841 | |
6715 Label end(this); | 6842 Label end(this); |
6716 VARIABLE(result, MachineRepresentation::kTagged); | 6843 VARIABLE(result, MachineRepresentation::kTagged); |
6717 | 6844 |
6718 // Shared entry for floating point comparison. | 6845 // Shared entry for floating point comparison. |
6719 Label do_fcmp(this); | 6846 Label do_fcmp(this); |
6720 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); | 6847 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); |
6721 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); | 6848 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); |
6722 | 6849 |
6723 // Check if the {lhs} is a Smi or a HeapObject. | 6850 // Check if the {lhs} is a Smi or a HeapObject. |
6724 Label if_lhsissmi(this), if_lhsisnotsmi(this); | 6851 Label if_lhsissmi(this), if_lhsisnotsmi(this); |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6816 void CodeStubAssembler::GotoUnlessNumberLessThan(Node* lhs, Node* rhs, | 6943 void CodeStubAssembler::GotoUnlessNumberLessThan(Node* lhs, Node* rhs, |
6817 Label* if_false) { | 6944 Label* if_false) { |
6818 Label if_true(this); | 6945 Label if_true(this); |
6819 BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false); | 6946 BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false); |
6820 BIND(&if_true); | 6947 BIND(&if_true); |
6821 } | 6948 } |
6822 | 6949 |
6823 Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode, | 6950 Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode, |
6824 Node* lhs, Node* rhs, | 6951 Node* lhs, Node* rhs, |
6825 Node* context) { | 6952 Node* context) { |
6953 CSA_SLOW_ASSERT(this, IsNumber(lhs)); | |
6954 CSA_SLOW_ASSERT(this, IsNumber(rhs)); | |
6955 | |
6826 Label return_true(this), return_false(this), end(this); | 6956 Label return_true(this), return_false(this), end(this); |
6827 VARIABLE(result, MachineRepresentation::kTagged); | 6957 VARIABLE(result, MachineRepresentation::kTagged); |
6828 | 6958 |
6829 // Shared entry for floating point comparison. | 6959 // Shared entry for floating point comparison. |
6830 Label do_fcmp(this); | 6960 Label do_fcmp(this); |
6831 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); | 6961 VARIABLE(var_fcmp_lhs, MachineRepresentation::kFloat64); |
6832 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); | 6962 VARIABLE(var_fcmp_rhs, MachineRepresentation::kFloat64); |
6833 | 6963 |
6834 // We might need to loop several times due to ToPrimitive and/or ToNumber | 6964 // We might need to loop several times due to ToPrimitive and/or ToNumber |
6835 // conversions. | 6965 // conversions. |
(...skipping 1481 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8317 | 8447 |
8318 BIND(&return_false); | 8448 BIND(&return_false); |
8319 var_result.Bind(FalseConstant()); | 8449 var_result.Bind(FalseConstant()); |
8320 Goto(&return_result); | 8450 Goto(&return_result); |
8321 | 8451 |
8322 BIND(&return_result); | 8452 BIND(&return_result); |
8323 return var_result.value(); | 8453 return var_result.value(); |
8324 } | 8454 } |
8325 | 8455 |
8326 Node* CodeStubAssembler::NumberInc(Node* value) { | 8456 Node* CodeStubAssembler::NumberInc(Node* value) { |
8457 CSA_SLOW_ASSERT(this, IsNumber(value)); | |
8458 | |
8327 VARIABLE(var_result, MachineRepresentation::kTagged); | 8459 VARIABLE(var_result, MachineRepresentation::kTagged); |
8328 VARIABLE(var_finc_value, MachineRepresentation::kFloat64); | 8460 VARIABLE(var_finc_value, MachineRepresentation::kFloat64); |
8329 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); | 8461 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this); |
8330 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); | 8462 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); |
8331 | 8463 |
8332 BIND(&if_issmi); | 8464 BIND(&if_issmi); |
8333 { | 8465 { |
8334 // Try fast Smi addition first. | 8466 // Try fast Smi addition first. |
8335 Node* one = SmiConstant(Smi::FromInt(1)); | 8467 Node* one = SmiConstant(Smi::FromInt(1)); |
8336 Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value), | 8468 Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value), |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8369 Node* finc_result = Float64Add(finc_value, one); | 8501 Node* finc_result = Float64Add(finc_value, one); |
8370 var_result.Bind(AllocateHeapNumberWithValue(finc_result)); | 8502 var_result.Bind(AllocateHeapNumberWithValue(finc_result)); |
8371 Goto(&end); | 8503 Goto(&end); |
8372 } | 8504 } |
8373 | 8505 |
8374 BIND(&end); | 8506 BIND(&end); |
8375 return var_result.value(); | 8507 return var_result.value(); |
8376 } | 8508 } |
8377 | 8509 |
8378 Node* CodeStubAssembler::NumberDec(Node* value) { | 8510 Node* CodeStubAssembler::NumberDec(Node* value) { |
8511 CSA_SLOW_ASSERT(this, IsNumber(value)); | |
8512 | |
8379 VARIABLE(var_result, MachineRepresentation::kTagged); | 8513 VARIABLE(var_result, MachineRepresentation::kTagged); |
8380 VARIABLE(var_fdec_value, MachineRepresentation::kFloat64); | 8514 VARIABLE(var_fdec_value, MachineRepresentation::kFloat64); |
8381 Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this); | 8515 Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this); |
8382 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); | 8516 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi); |
8383 | 8517 |
8384 BIND(&if_issmi); | 8518 BIND(&if_issmi); |
8385 { | 8519 { |
8386 // Try fast Smi addition first. | 8520 // Try fast Smi addition first. |
8387 Node* one = SmiConstant(Smi::FromInt(1)); | 8521 Node* one = SmiConstant(Smi::FromInt(1)); |
8388 Node* pair = IntPtrSubWithOverflow(BitcastTaggedToWord(value), | 8522 Node* pair = IntPtrSubWithOverflow(BitcastTaggedToWord(value), |
(...skipping 384 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8773 Load(MachineType::Uint8(), | 8907 Load(MachineType::Uint8(), |
8774 ExternalConstant( | 8908 ExternalConstant( |
8775 ExternalReference::promise_hook_or_debug_is_active_address( | 8909 ExternalReference::promise_hook_or_debug_is_active_address( |
8776 isolate()))); | 8910 isolate()))); |
8777 return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0)); | 8911 return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0)); |
8778 } | 8912 } |
8779 | 8913 |
8780 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map, | 8914 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map, |
8781 Node* shared_info, | 8915 Node* shared_info, |
8782 Node* context) { | 8916 Node* context) { |
8917 CSA_SLOW_ASSERT(this, IsMap(map)); | |
8918 | |
8783 Node* const code = BitcastTaggedToWord( | 8919 Node* const code = BitcastTaggedToWord( |
8784 LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset)); | 8920 LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset)); |
8785 Node* const code_entry = | 8921 Node* const code_entry = |
8786 IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); | 8922 IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); |
8787 | 8923 |
8788 Node* const fun = Allocate(JSFunction::kSize); | 8924 Node* const fun = Allocate(JSFunction::kSize); |
8789 StoreMapNoWriteBarrier(fun, map); | 8925 StoreMapNoWriteBarrier(fun, map); |
8790 StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset, | 8926 StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset, |
8791 Heap::kEmptyFixedArrayRootIndex); | 8927 Heap::kEmptyFixedArrayRootIndex); |
8792 StoreObjectFieldRoot(fun, JSObject::kElementsOffset, | 8928 StoreObjectFieldRoot(fun, JSObject::kElementsOffset, |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
8859 formatted.c_str(), TENURED); | 8995 formatted.c_str(), TENURED); |
8860 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), | 8996 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), |
8861 HeapConstant(string)); | 8997 HeapConstant(string)); |
8862 } | 8998 } |
8863 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value); | 8999 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value); |
8864 #endif | 9000 #endif |
8865 } | 9001 } |
8866 | 9002 |
8867 } // namespace internal | 9003 } // namespace internal |
8868 } // namespace v8 | 9004 } // namespace v8 |
OLD | NEW |