Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(294)

Side by Side Diff: src/code-stub-assembler.cc

Issue 2419433008: Improve CodeStubAssembler assert functionality (Closed)
Patch Set: Fix windows Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/runtime/runtime-test.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2016 the V8 project authors. All rights reserved. 1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/code-stub-assembler.h" 5 #include "src/code-stub-assembler.h"
6 #include "src/code-factory.h" 6 #include "src/code-factory.h"
7 #include "src/frames-inl.h" 7 #include "src/frames-inl.h"
8 #include "src/frames.h" 8 #include "src/frames.h"
9 #include "src/ic/handler-configuration.h" 9 #include "src/ic/handler-configuration.h"
10 #include "src/ic/stub-cache.h" 10 #include "src/ic/stub-cache.h"
11 11
12 namespace v8 { 12 namespace v8 {
13 namespace internal { 13 namespace internal {
14 14
15 using compiler::Node; 15 using compiler::Node;
16 16
17 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, 17 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
18 const CallInterfaceDescriptor& descriptor, 18 const CallInterfaceDescriptor& descriptor,
19 Code::Flags flags, const char* name, 19 Code::Flags flags, const char* name,
20 size_t result_size) 20 size_t result_size)
21 : compiler::CodeAssembler(isolate, zone, descriptor, flags, name, 21 : compiler::CodeAssembler(isolate, zone, descriptor, flags, name,
22 result_size) {} 22 result_size) {}
23 23
24 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone, 24 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
25 int parameter_count, Code::Flags flags, 25 int parameter_count, Code::Flags flags,
26 const char* name) 26 const char* name)
27 : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {} 27 : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {}
28 28
29 void CodeStubAssembler::Assert(Node* condition) { 29 void CodeStubAssembler::Assert(Node* condition, const char* message,
30 const char* file, int line) {
30 #if defined(DEBUG) 31 #if defined(DEBUG)
31 Label ok(this); 32 Label ok(this);
32 Comment("[ Assert"); 33 Vector<char> buffer(Vector<char>::New(1024));
34 if (message != nullptr && FLAG_code_comments) {
35 SNPrintF(buffer, "[ Assert: %s", message);
36 printf("%d\n", buffer.length());
epertoso 2016/10/14 09:40:14 Spurious printf.
danno 2016/10/14 10:53:25 Done.
37 char* comment = new char[strlen(&(buffer[0])) + 1];
38 memcpy(comment, &(buffer[0]), strlen(&(buffer[0])) + 1);
39 Comment(comment);
epertoso 2016/10/14 09:40:14 Just Coment("[ Assert: %s", message) should be suf
danno 2016/10/14 10:53:25 Done.
40 } else {
41 Comment("[ Assert ");
42 }
43
33 GotoIf(condition, &ok); 44 GotoIf(condition, &ok);
45 if (message != nullptr) {
46 if (file != nullptr) {
47 SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]", message, file, line);
48 } else {
49 SNPrintF(buffer, "CSA_ASSERT failed: %s", message);
50 }
51 CallRuntime(
52 Runtime::kDebugPrint, SmiConstant(Smi::kZero),
Igor Sheludko 2016/10/14 09:48:59 You can probably call kGlobalPrint which will just
danno 2016/10/14 10:53:25 Done.
53 HeapConstant(factory()->NewStringFromAsciiChecked(&(buffer[0]))));
54 }
34 DebugBreak(); 55 DebugBreak();
35 Goto(&ok); 56 Goto(&ok);
36 Bind(&ok); 57 Bind(&ok);
37 Comment("] Assert"); 58 Comment("] Assert");
38 #endif 59 #endif
39 } 60 }
40 61
41 Node* CodeStubAssembler::NoContextConstant() { return NumberConstant(0); } 62 Node* CodeStubAssembler::NoContextConstant() { return NumberConstant(0); }
42 63
43 #define HEAP_CONSTANT_ACCESSOR(rootName, name) \ 64 #define HEAP_CONSTANT_ACCESSOR(rootName, name) \
(...skipping 852 matching lines...) Expand 10 before | Expand all | Expand 10 after
896 Node* CodeStubAssembler::LoadMap(Node* object) { 917 Node* CodeStubAssembler::LoadMap(Node* object) {
897 return LoadObjectField(object, HeapObject::kMapOffset); 918 return LoadObjectField(object, HeapObject::kMapOffset);
898 } 919 }
899 920
900 Node* CodeStubAssembler::LoadInstanceType(Node* object) { 921 Node* CodeStubAssembler::LoadInstanceType(Node* object) {
901 return LoadMapInstanceType(LoadMap(object)); 922 return LoadMapInstanceType(LoadMap(object));
902 } 923 }
903 924
904 void CodeStubAssembler::AssertInstanceType(Node* object, 925 void CodeStubAssembler::AssertInstanceType(Node* object,
905 InstanceType instance_type) { 926 InstanceType instance_type) {
906 Assert(Word32Equal(LoadInstanceType(object), Int32Constant(instance_type))); 927 CSA_ASSERT(
epertoso 2016/10/14 09:40:14 Doesn't this always print out code-stub-assembler.
danno 2016/10/14 10:53:25 Is that a problem? I think that's fine, it gives y
928 Word32Equal(LoadInstanceType(object), Int32Constant(instance_type)));
907 } 929 }
908 930
909 Node* CodeStubAssembler::LoadProperties(Node* object) { 931 Node* CodeStubAssembler::LoadProperties(Node* object) {
910 return LoadObjectField(object, JSObject::kPropertiesOffset); 932 return LoadObjectField(object, JSObject::kPropertiesOffset);
911 } 933 }
912 934
913 Node* CodeStubAssembler::LoadElements(Node* object) { 935 Node* CodeStubAssembler::LoadElements(Node* object) {
914 return LoadObjectField(object, JSObject::kElementsOffset); 936 return LoadObjectField(object, JSObject::kElementsOffset);
915 } 937 }
916 938
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
956 } 978 }
957 979
958 Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) { 980 Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
959 return ChangeUint32ToWord( 981 return ChangeUint32ToWord(
960 LoadObjectField(map, Map::kInstanceSizeOffset, MachineType::Uint8())); 982 LoadObjectField(map, Map::kInstanceSizeOffset, MachineType::Uint8()));
961 } 983 }
962 984
963 Node* CodeStubAssembler::LoadMapInobjectProperties(Node* map) { 985 Node* CodeStubAssembler::LoadMapInobjectProperties(Node* map) {
964 // See Map::GetInObjectProperties() for details. 986 // See Map::GetInObjectProperties() for details.
965 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE); 987 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
966 Assert(Int32GreaterThanOrEqual(LoadMapInstanceType(map), 988 CSA_ASSERT(Int32GreaterThanOrEqual(LoadMapInstanceType(map),
967 Int32Constant(FIRST_JS_OBJECT_TYPE))); 989 Int32Constant(FIRST_JS_OBJECT_TYPE)));
968 return ChangeUint32ToWord(LoadObjectField( 990 return ChangeUint32ToWord(LoadObjectField(
969 map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset, 991 map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
970 MachineType::Uint8())); 992 MachineType::Uint8()));
971 } 993 }
972 994
973 Node* CodeStubAssembler::LoadMapConstructorFunctionIndex(Node* map) { 995 Node* CodeStubAssembler::LoadMapConstructorFunctionIndex(Node* map) {
974 // See Map::GetConstructorFunctionIndex() for details. 996 // See Map::GetConstructorFunctionIndex() for details.
975 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); 997 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
976 Assert(Int32LessThanOrEqual(LoadMapInstanceType(map), 998 CSA_ASSERT(Int32LessThanOrEqual(LoadMapInstanceType(map),
977 Int32Constant(LAST_PRIMITIVE_TYPE))); 999 Int32Constant(LAST_PRIMITIVE_TYPE)));
978 return ChangeUint32ToWord(LoadObjectField( 1000 return ChangeUint32ToWord(LoadObjectField(
979 map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset, 1001 map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
980 MachineType::Uint8())); 1002 MachineType::Uint8()));
981 } 1003 }
982 1004
983 Node* CodeStubAssembler::LoadMapConstructor(Node* map) { 1005 Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
984 Variable result(this, MachineRepresentation::kTagged); 1006 Variable result(this, MachineRepresentation::kTagged);
985 result.Bind(LoadObjectField(map, Map::kConstructorOrBackPointerOffset)); 1007 result.Bind(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
986 1008
987 Label done(this), loop(this, &result); 1009 Label done(this), loop(this, &result);
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after
1381 MachineRepresentation::kTagged); 1403 MachineRepresentation::kTagged);
1382 StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second, 1404 StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
1383 MachineRepresentation::kTagged); 1405 MachineRepresentation::kTagged);
1384 return result; 1406 return result;
1385 } 1407 }
1386 1408
1387 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length, 1409 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length,
1388 Node* index, Node* input) { 1410 Node* index, Node* input) {
1389 Node* const max_length = 1411 Node* const max_length =
1390 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray)); 1412 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray));
1391 Assert(SmiLessThanOrEqual(length, max_length)); 1413 CSA_ASSERT(SmiLessThanOrEqual(length, max_length));
1392 1414
1393 // Allocate the JSRegExpResult. 1415 // Allocate the JSRegExpResult.
1394 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove 1416 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove
1395 // unneeded store of elements. 1417 // unneeded store of elements.
1396 Node* const result = Allocate(JSRegExpResult::kSize); 1418 Node* const result = Allocate(JSRegExpResult::kSize);
1397 1419
1398 // TODO(jgruber): Store map as Heap constant? 1420 // TODO(jgruber): Store map as Heap constant?
1399 Node* const native_context = LoadNativeContext(context); 1421 Node* const native_context = LoadNativeContext(context);
1400 Node* const map = 1422 Node* const map =
1401 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX); 1423 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX);
(...skipping 1749 matching lines...) Expand 10 before | Expand all | Expand 10 after
3151 Goto(&end); 3173 Goto(&end);
3152 } 3174 }
3153 } 3175 }
3154 3176
3155 Bind(&end); 3177 Bind(&end);
3156 return var_result.value(); 3178 return var_result.value();
3157 } 3179 }
3158 3180
3159 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) { 3181 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
3160 // Assert input is a HeapObject (not smi or heap number) 3182 // Assert input is a HeapObject (not smi or heap number)
3161 Assert(Word32BinaryNot(TaggedIsSmi(input))); 3183 CSA_ASSERT(Word32BinaryNot(TaggedIsSmi(input)));
3162 Assert(Word32NotEqual(LoadMap(input), HeapNumberMapConstant())); 3184 CSA_ASSERT(Word32NotEqual(LoadMap(input), HeapNumberMapConstant()));
3163 3185
3164 // We might need to loop once here due to ToPrimitive conversions. 3186 // We might need to loop once here due to ToPrimitive conversions.
3165 Variable var_input(this, MachineRepresentation::kTagged); 3187 Variable var_input(this, MachineRepresentation::kTagged);
3166 Variable var_result(this, MachineRepresentation::kTagged); 3188 Variable var_result(this, MachineRepresentation::kTagged);
3167 Label loop(this, &var_input); 3189 Label loop(this, &var_input);
3168 Label end(this); 3190 Label end(this);
3169 var_input.Bind(input); 3191 var_input.Bind(input);
3170 Goto(&loop); 3192 Goto(&loop);
3171 Bind(&loop); 3193 Bind(&loop);
3172 { 3194 {
(...skipping 441 matching lines...) Expand 10 before | Expand all | Expand 10 after
3614 3636
3615 Label if_objectisspecial(this); 3637 Label if_objectisspecial(this);
3616 STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE); 3638 STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
3617 GotoIf(Int32LessThanOrEqual(instance_type, 3639 GotoIf(Int32LessThanOrEqual(instance_type,
3618 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)), 3640 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
3619 &if_objectisspecial); 3641 &if_objectisspecial);
3620 3642
3621 Node* bit_field = LoadMapBitField(map); 3643 Node* bit_field = LoadMapBitField(map);
3622 Node* mask = Int32Constant(1 << Map::kHasNamedInterceptor | 3644 Node* mask = Int32Constant(1 << Map::kHasNamedInterceptor |
3623 1 << Map::kIsAccessCheckNeeded); 3645 1 << Map::kIsAccessCheckNeeded);
3624 Assert(Word32Equal(Word32And(bit_field, mask), Int32Constant(0))); 3646 CSA_ASSERT(Word32Equal(Word32And(bit_field, mask), Int32Constant(0)));
3625 3647
3626 Node* bit_field3 = LoadMapBitField3(map); 3648 Node* bit_field3 = LoadMapBitField3(map);
3627 Node* bit = BitFieldDecode<Map::DictionaryMap>(bit_field3); 3649 Node* bit = BitFieldDecode<Map::DictionaryMap>(bit_field3);
3628 Label if_isfastmap(this), if_isslowmap(this); 3650 Label if_isfastmap(this), if_isslowmap(this);
3629 Branch(Word32Equal(bit, Int32Constant(0)), &if_isfastmap, &if_isslowmap); 3651 Branch(Word32Equal(bit, Int32Constant(0)), &if_isfastmap, &if_isslowmap);
3630 Bind(&if_isfastmap); 3652 Bind(&if_isfastmap);
3631 { 3653 {
3632 Comment("DescriptorArrayLookup"); 3654 Comment("DescriptorArrayLookup");
3633 Node* nof = BitFieldDecodeWord<Map::NumberOfOwnDescriptorsBits>(bit_field3); 3655 Node* nof = BitFieldDecodeWord<Map::NumberOfOwnDescriptorsBits>(bit_field3);
3634 // Bail out to the runtime for large numbers of own descriptors. The stub 3656 // Bail out to the runtime for large numbers of own descriptors. The stub
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after
4029 { 4051 {
4030 Variable var_entry(this, MachineType::PointerRepresentation()); 4052 Variable var_entry(this, MachineType::PointerRepresentation());
4031 Node* elements = LoadElements(object); 4053 Node* elements = LoadElements(object);
4032 NumberDictionaryLookup<SeededNumberDictionary>( 4054 NumberDictionaryLookup<SeededNumberDictionary>(
4033 elements, intptr_index, if_found, &var_entry, if_not_found); 4055 elements, intptr_index, if_found, &var_entry, if_not_found);
4034 } 4056 }
4035 Bind(&if_isfaststringwrapper); 4057 Bind(&if_isfaststringwrapper);
4036 { 4058 {
4037 AssertInstanceType(object, JS_VALUE_TYPE); 4059 AssertInstanceType(object, JS_VALUE_TYPE);
4038 Node* string = LoadJSValueValue(object); 4060 Node* string = LoadJSValueValue(object);
4039 Assert(IsStringInstanceType(LoadInstanceType(string))); 4061 CSA_ASSERT(IsStringInstanceType(LoadInstanceType(string)));
4040 Node* length = LoadStringLength(string); 4062 Node* length = LoadStringLength(string);
4041 GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found); 4063 GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
4042 Goto(&if_isobjectorsmi); 4064 Goto(&if_isobjectorsmi);
4043 } 4065 }
4044 Bind(&if_isslowstringwrapper); 4066 Bind(&if_isslowstringwrapper);
4045 { 4067 {
4046 AssertInstanceType(object, JS_VALUE_TYPE); 4068 AssertInstanceType(object, JS_VALUE_TYPE);
4047 Node* string = LoadJSValueValue(object); 4069 Node* string = LoadJSValueValue(object);
4048 Assert(IsStringInstanceType(LoadInstanceType(string))); 4070 CSA_ASSERT(IsStringInstanceType(LoadInstanceType(string)));
4049 Node* length = LoadStringLength(string); 4071 Node* length = LoadStringLength(string);
4050 GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found); 4072 GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
4051 Goto(&if_isdictionary); 4073 Goto(&if_isdictionary);
4052 } 4074 }
4053 Bind(&if_oob); 4075 Bind(&if_oob);
4054 { 4076 {
4055 // Positive OOB indices mean "not found", negative indices must be 4077 // Positive OOB indices mean "not found", negative indices must be
4056 // converted to property names. 4078 // converted to property names.
4057 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout); 4079 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
4058 Goto(if_not_found); 4080 Goto(if_not_found);
(...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after
4469 Goto(&loop); 4491 Goto(&loop);
4470 } 4492 }
4471 } 4493 }
4472 4494
4473 compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name, 4495 compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name,
4474 compiler::Node* map) { 4496 compiler::Node* map) {
4475 // See v8::internal::StubCache::PrimaryOffset(). 4497 // See v8::internal::StubCache::PrimaryOffset().
4476 STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift); 4498 STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift);
4477 // Compute the hash of the name (use entire hash field). 4499 // Compute the hash of the name (use entire hash field).
4478 Node* hash_field = LoadNameHashField(name); 4500 Node* hash_field = LoadNameHashField(name);
4479 Assert(Word32Equal( 4501 CSA_ASSERT(Word32Equal(
4480 Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)), 4502 Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)),
4481 Int32Constant(0))); 4503 Int32Constant(0)));
4482 4504
4483 // Using only the low bits in 64-bit mode is unlikely to increase the 4505 // Using only the low bits in 64-bit mode is unlikely to increase the
4484 // risk of collision even if the heap is spread over an area larger than 4506 // risk of collision even if the heap is spread over an area larger than
4485 // 4Gb (and not at all if it isn't). 4507 // 4Gb (and not at all if it isn't).
4486 Node* hash = Int32Add(hash_field, map); 4508 Node* hash = Int32Add(hash_field, map);
4487 // Base the offset on a simple combination of name and map. 4509 // Base the offset on a simple combination of name and map.
4488 hash = Word32Xor(hash, Int32Constant(StubCache::kPrimaryMagic)); 4510 hash = Word32Xor(hash, Int32Constant(StubCache::kPrimaryMagic));
4489 uint32_t mask = (StubCache::kPrimaryTableSize - 1) 4511 uint32_t mask = (StubCache::kPrimaryTableSize - 1)
(...skipping 460 matching lines...) Expand 10 before | Expand all | Expand 10 after
4950 Node* validity_cell = LoadObjectField(handler, Tuple3::kValue1Offset); 4972 Node* validity_cell = LoadObjectField(handler, Tuple3::kValue1Offset);
4951 Node* cell_value = LoadObjectField(validity_cell, Cell::kValueOffset); 4973 Node* cell_value = LoadObjectField(validity_cell, Cell::kValueOffset);
4952 GotoIf(WordNotEqual(cell_value, 4974 GotoIf(WordNotEqual(cell_value,
4953 SmiConstant(Smi::FromInt(Map::kPrototypeChainValid))), 4975 SmiConstant(Smi::FromInt(Map::kPrototypeChainValid))),
4954 miss); 4976 miss);
4955 4977
4956 Node* holder = 4978 Node* holder =
4957 LoadWeakCellValue(LoadObjectField(handler, Tuple3::kValue2Offset)); 4979 LoadWeakCellValue(LoadObjectField(handler, Tuple3::kValue2Offset));
4958 // The |holder| is guaranteed to be alive at this point since we passed 4980 // The |holder| is guaranteed to be alive at this point since we passed
4959 // both the receiver map check and the validity cell check. 4981 // both the receiver map check and the validity cell check.
4960 Assert(WordNotEqual(holder, IntPtrConstant(0))); 4982 CSA_ASSERT(WordNotEqual(holder, IntPtrConstant(0)));
4961 4983
4962 Node* smi_handler = LoadObjectField(handler, Tuple3::kValue3Offset); 4984 Node* smi_handler = LoadObjectField(handler, Tuple3::kValue3Offset);
4963 Assert(TaggedIsSmi(smi_handler)); 4985 CSA_ASSERT(TaggedIsSmi(smi_handler));
4964 4986
4965 var_holder.Bind(holder); 4987 var_holder.Bind(holder);
4966 var_smi_handler.Bind(smi_handler); 4988 var_smi_handler.Bind(smi_handler);
4967 Goto(&if_smi_handler); 4989 Goto(&if_smi_handler);
4968 } 4990 }
4969 4991
4970 // |handler| is a heap object. Must be code, call it. 4992 // |handler| is a heap object. Must be code, call it.
4971 Bind(&call_handler); 4993 Bind(&call_handler);
4972 { 4994 {
4973 typedef LoadWithVectorDescriptor Descriptor; 4995 typedef LoadWithVectorDescriptor Descriptor;
(...skipping 363 matching lines...) Expand 10 before | Expand all | Expand 10 after
5337 5359
5338 Node* delta = IntPtrOrSmiConstant(JSObject::kFieldsAdded, mode); 5360 Node* delta = IntPtrOrSmiConstant(JSObject::kFieldsAdded, mode);
5339 Node* new_capacity = IntPtrAdd(length, delta); 5361 Node* new_capacity = IntPtrAdd(length, delta);
5340 5362
5341 // Grow properties array. 5363 // Grow properties array.
5342 ElementsKind kind = FAST_ELEMENTS; 5364 ElementsKind kind = FAST_ELEMENTS;
5343 DCHECK(kMaxNumberOfDescriptors + JSObject::kFieldsAdded < 5365 DCHECK(kMaxNumberOfDescriptors + JSObject::kFieldsAdded <
5344 FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind)); 5366 FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind));
5345 // The size of a new properties backing store is guaranteed to be small 5367 // The size of a new properties backing store is guaranteed to be small
5346 // enough that the new backing store will be allocated in new space. 5368 // enough that the new backing store will be allocated in new space.
5347 Assert(UintPtrLessThan(new_capacity, IntPtrConstant(kMaxNumberOfDescriptors + 5369 CSA_ASSERT(UintPtrLessThan(
5348 JSObject::kFieldsAdded))); 5370 new_capacity,
5371 IntPtrConstant(kMaxNumberOfDescriptors + JSObject::kFieldsAdded)));
5349 5372
5350 Node* new_properties = AllocateFixedArray(kind, new_capacity, mode); 5373 Node* new_properties = AllocateFixedArray(kind, new_capacity, mode);
5351 5374
5352 FillFixedArrayWithValue(kind, new_properties, length, new_capacity, 5375 FillFixedArrayWithValue(kind, new_properties, length, new_capacity,
5353 Heap::kUndefinedValueRootIndex, mode); 5376 Heap::kUndefinedValueRootIndex, mode);
5354 5377
5355 // |new_properties| is guaranteed to be in new space, so we can skip 5378 // |new_properties| is guaranteed to be in new space, so we can skip
5356 // the write barrier. 5379 // the write barrier.
5357 CopyFixedArrayElements(kind, properties, new_properties, length, 5380 CopyFixedArrayElements(kind, properties, new_properties, length,
5358 SKIP_WRITE_BARRIER, mode); 5381 SKIP_WRITE_BARRIER, mode);
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after
5485 Node* adjusted_length = IntPtrSub(elements_length, intptr_two); 5508 Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
5486 5509
5487 GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped); 5510 GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
5488 5511
5489 Node* mapped_index = LoadFixedArrayElement( 5512 Node* mapped_index = LoadFixedArrayElement(
5490 elements, IntPtrAdd(key, intptr_two), 0, INTPTR_PARAMETERS); 5513 elements, IntPtrAdd(key, intptr_two), 0, INTPTR_PARAMETERS);
5491 Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped); 5514 Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
5492 5515
5493 Bind(&if_mapped); 5516 Bind(&if_mapped);
5494 { 5517 {
5495 Assert(TaggedIsSmi(mapped_index)); 5518 CSA_ASSERT(TaggedIsSmi(mapped_index));
5496 mapped_index = SmiUntag(mapped_index); 5519 mapped_index = SmiUntag(mapped_index);
5497 Node* the_context = LoadFixedArrayElement(elements, IntPtrConstant(0), 0, 5520 Node* the_context = LoadFixedArrayElement(elements, IntPtrConstant(0), 0,
5498 INTPTR_PARAMETERS); 5521 INTPTR_PARAMETERS);
5499 // Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement 5522 // Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
5500 // methods for accessing Context. 5523 // methods for accessing Context.
5501 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); 5524 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
5502 DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag, 5525 DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
5503 FixedArray::OffsetOfElementAt(0)); 5526 FixedArray::OffsetOfElementAt(0));
5504 if (is_load) { 5527 if (is_load) {
5505 Node* result = LoadFixedArrayElement(the_context, mapped_index, 0, 5528 Node* result = LoadFixedArrayElement(the_context, mapped_index, 0,
5506 INTPTR_PARAMETERS); 5529 INTPTR_PARAMETERS);
5507 Assert(WordNotEqual(result, TheHoleConstant())); 5530 CSA_ASSERT(WordNotEqual(result, TheHoleConstant()));
5508 var_result.Bind(result); 5531 var_result.Bind(result);
5509 } else { 5532 } else {
5510 StoreFixedArrayElement(the_context, mapped_index, value, 5533 StoreFixedArrayElement(the_context, mapped_index, value,
5511 UPDATE_WRITE_BARRIER, INTPTR_PARAMETERS); 5534 UPDATE_WRITE_BARRIER, INTPTR_PARAMETERS);
5512 } 5535 }
5513 Goto(&end); 5536 Goto(&end);
5514 } 5537 }
5515 5538
5516 Bind(&if_unmapped); 5539 Bind(&if_unmapped);
5517 { 5540 {
(...skipping 1843 matching lines...) Expand 10 before | Expand all | Expand 10 after
7361 7384
7362 GotoIf(IsStringInstanceType(instance_type), &return_string); 7385 GotoIf(IsStringInstanceType(instance_type), &return_string);
7363 7386
7364 #define SIMD128_BRANCH(TYPE, Type, type, lane_count, lane_type) \ 7387 #define SIMD128_BRANCH(TYPE, Type, type, lane_count, lane_type) \
7365 Label return_##type(this); \ 7388 Label return_##type(this); \
7366 Node* type##_map = HeapConstant(factory()->type##_map()); \ 7389 Node* type##_map = HeapConstant(factory()->type##_map()); \
7367 GotoIf(WordEqual(map, type##_map), &return_##type); 7390 GotoIf(WordEqual(map, type##_map), &return_##type);
7368 SIMD128_TYPES(SIMD128_BRANCH) 7391 SIMD128_TYPES(SIMD128_BRANCH)
7369 #undef SIMD128_BRANCH 7392 #undef SIMD128_BRANCH
7370 7393
7371 Assert(Word32Equal(instance_type, Int32Constant(SYMBOL_TYPE))); 7394 CSA_ASSERT(Word32Equal(instance_type, Int32Constant(SYMBOL_TYPE)));
7372 result_var.Bind(HeapConstant(isolate()->factory()->symbol_string())); 7395 result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
7373 Goto(&return_result); 7396 Goto(&return_result);
7374 7397
7375 Bind(&return_number); 7398 Bind(&return_number);
7376 { 7399 {
7377 result_var.Bind(HeapConstant(isolate()->factory()->number_string())); 7400 result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
7378 Goto(&return_result); 7401 Goto(&return_result);
7379 } 7402 }
7380 7403
7381 Bind(&if_oddball); 7404 Bind(&if_oddball);
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
7452 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable)); 7475 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable));
7453 Goto(&end); 7476 Goto(&end);
7454 } 7477 }
7455 7478
7456 Bind(&end); 7479 Bind(&end);
7457 return result.value(); 7480 return result.value();
7458 } 7481 }
7459 7482
7460 } // namespace internal 7483 } // namespace internal
7461 } // namespace v8 7484 } // namespace v8
OLDNEW
« no previous file with comments | « src/code-stub-assembler.h ('k') | src/runtime/runtime-test.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698