| OLD | NEW |
| 1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
| 6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
| 7 #include "src/frames-inl.h" | 7 #include "src/frames-inl.h" |
| 8 #include "src/frames.h" | 8 #include "src/frames.h" |
| 9 #include "src/ic/handler-configuration.h" | 9 #include "src/ic/handler-configuration.h" |
| 10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 79 } | 79 } |
| 80 | 80 |
| 81 Node* CodeStubAssembler::StaleRegisterConstant() { | 81 Node* CodeStubAssembler::StaleRegisterConstant() { |
| 82 return LoadRoot(Heap::kStaleRegisterRootIndex); | 82 return LoadRoot(Heap::kStaleRegisterRootIndex); |
| 83 } | 83 } |
| 84 | 84 |
| 85 Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) { | 85 Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) { |
| 86 if (mode == SMI_PARAMETERS) { | 86 if (mode == SMI_PARAMETERS) { |
| 87 return SmiConstant(Smi::FromInt(value)); | 87 return SmiConstant(Smi::FromInt(value)); |
| 88 } else { | 88 } else { |
| 89 DCHECK_EQ(INTEGER_PARAMETERS, mode); | 89 DCHECK(mode == INTEGER_PARAMETERS || mode == INTPTR_PARAMETERS); |
| 90 return IntPtrConstant(value); | 90 return IntPtrConstant(value); |
| 91 } | 91 } |
| 92 } | 92 } |
| 93 | 93 |
| 94 Node* CodeStubAssembler::Float64Round(Node* x) { | 94 Node* CodeStubAssembler::Float64Round(Node* x) { |
| 95 Node* one = Float64Constant(1.0); | 95 Node* one = Float64Constant(1.0); |
| 96 Node* one_half = Float64Constant(0.5); | 96 Node* one_half = Float64Constant(0.5); |
| 97 | 97 |
| 98 Variable var_x(this, MachineRepresentation::kFloat64); | 98 Variable var_x(this, MachineRepresentation::kFloat64); |
| 99 Label return_x(this); | 99 Label return_x(this); |
| (...skipping 990 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1090 } | 1090 } |
| 1091 | 1091 |
| 1092 Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell, Label* if_cleared) { | 1092 Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell, Label* if_cleared) { |
| 1093 Node* value = LoadObjectField(weak_cell, WeakCell::kValueOffset); | 1093 Node* value = LoadObjectField(weak_cell, WeakCell::kValueOffset); |
| 1094 if (if_cleared != nullptr) { | 1094 if (if_cleared != nullptr) { |
| 1095 GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared); | 1095 GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared); |
| 1096 } | 1096 } |
| 1097 return value; | 1097 return value; |
| 1098 } | 1098 } |
| 1099 | 1099 |
| 1100 Node* CodeStubAssembler::AllocateUninitializedFixedArray(Node* length) { | |
| 1101 Node* header_size = IntPtrConstant(FixedArray::kHeaderSize); | |
| 1102 Node* data_size = WordShl(length, IntPtrConstant(kPointerSizeLog2)); | |
| 1103 Node* total_size = IntPtrAdd(data_size, header_size); | |
| 1104 | |
| 1105 Node* result = Allocate(total_size, kNone); | |
| 1106 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kFixedArrayMapRootIndex)); | |
| 1107 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset, | |
| 1108 SmiTag(length)); | |
| 1109 | |
| 1110 return result; | |
| 1111 } | |
| 1112 | |
| 1113 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, | 1100 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node, |
| 1114 int additional_offset, | 1101 int additional_offset, |
| 1115 ParameterMode parameter_mode) { | 1102 ParameterMode parameter_mode) { |
| 1116 int32_t header_size = | 1103 int32_t header_size = |
| 1117 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; | 1104 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag; |
| 1118 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, | 1105 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, |
| 1119 parameter_mode, header_size); | 1106 parameter_mode, header_size); |
| 1120 return Load(MachineType::AnyTagged(), object, offset); | 1107 return Load(MachineType::AnyTagged(), object, offset); |
| 1121 } | 1108 } |
| 1122 | 1109 |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1154 Int32Constant(Context::NATIVE_CONTEXT_INDEX)); | 1141 Int32Constant(Context::NATIVE_CONTEXT_INDEX)); |
| 1155 } | 1142 } |
| 1156 | 1143 |
| 1157 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind, | 1144 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind, |
| 1158 Node* native_context) { | 1145 Node* native_context) { |
| 1159 return LoadFixedArrayElement(native_context, | 1146 return LoadFixedArrayElement(native_context, |
| 1160 Int32Constant(Context::ArrayMapIndex(kind))); | 1147 Int32Constant(Context::ArrayMapIndex(kind))); |
| 1161 } | 1148 } |
| 1162 | 1149 |
| 1163 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { | 1150 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) { |
| 1164 return StoreNoWriteBarrier( | 1151 return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value, |
| 1165 MachineRepresentation::kFloat64, object, | 1152 MachineRepresentation::kFloat64); |
| 1166 IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value); | |
| 1167 } | 1153 } |
| 1168 | 1154 |
| 1169 Node* CodeStubAssembler::StoreObjectField( | 1155 Node* CodeStubAssembler::StoreObjectField( |
| 1170 Node* object, int offset, Node* value) { | 1156 Node* object, int offset, Node* value) { |
| 1171 return Store(MachineRepresentation::kTagged, object, | 1157 return Store(MachineRepresentation::kTagged, object, |
| 1172 IntPtrConstant(offset - kHeapObjectTag), value); | 1158 IntPtrConstant(offset - kHeapObjectTag), value); |
| 1173 } | 1159 } |
| 1174 | 1160 |
| 1175 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( | 1161 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier( |
| 1176 Node* object, int offset, Node* value, MachineRepresentation rep) { | 1162 Node* object, int offset, Node* value, MachineRepresentation rep) { |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1353 Node* total_size = | 1339 Node* total_size = |
| 1354 ElementOffsetFromIndex(capacity_node, kind, mode, base_size); | 1340 ElementOffsetFromIndex(capacity_node, kind, mode, base_size); |
| 1355 | 1341 |
| 1356 // Allocate both array and elements object, and initialize the JSArray. | 1342 // Allocate both array and elements object, and initialize the JSArray. |
| 1357 Heap* heap = isolate()->heap(); | 1343 Heap* heap = isolate()->heap(); |
| 1358 Node* array = Allocate(total_size); | 1344 Node* array = Allocate(total_size); |
| 1359 StoreMapNoWriteBarrier(array, array_map); | 1345 StoreMapNoWriteBarrier(array, array_map); |
| 1360 Node* empty_properties = LoadRoot(Heap::kEmptyFixedArrayRootIndex); | 1346 Node* empty_properties = LoadRoot(Heap::kEmptyFixedArrayRootIndex); |
| 1361 StoreObjectFieldNoWriteBarrier(array, JSArray::kPropertiesOffset, | 1347 StoreObjectFieldNoWriteBarrier(array, JSArray::kPropertiesOffset, |
| 1362 empty_properties); | 1348 empty_properties); |
| 1363 StoreObjectFieldNoWriteBarrier( | 1349 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, |
| 1364 array, JSArray::kLengthOffset, | 1350 TagParameter(length_node, mode)); |
| 1365 mode == SMI_PARAMETERS ? length_node : SmiTag(length_node)); | |
| 1366 | 1351 |
| 1367 if (allocation_site != nullptr) { | 1352 if (allocation_site != nullptr) { |
| 1368 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); | 1353 InitializeAllocationMemento(array, JSArray::kSize, allocation_site); |
| 1369 } | 1354 } |
| 1370 | 1355 |
| 1371 // Setup elements object. | 1356 // Setup elements object. |
| 1372 Node* elements = InnerAllocate(array, elements_offset); | 1357 Node* elements = InnerAllocate(array, elements_offset); |
| 1373 StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements); | 1358 StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements); |
| 1374 Handle<Map> elements_map(is_double ? heap->fixed_double_array_map() | 1359 Handle<Map> elements_map(is_double ? heap->fixed_double_array_map() |
| 1375 : heap->fixed_array_map()); | 1360 : heap->fixed_array_map()); |
| 1376 StoreMapNoWriteBarrier(elements, HeapConstant(elements_map)); | 1361 StoreMapNoWriteBarrier(elements, HeapConstant(elements_map)); |
| 1377 StoreObjectFieldNoWriteBarrier( | 1362 StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, |
| 1378 elements, FixedArray::kLengthOffset, | 1363 TagParameter(capacity_node, mode)); |
| 1379 mode == SMI_PARAMETERS ? capacity_node : SmiTag(capacity_node)); | |
| 1380 | 1364 |
| 1381 // Fill in the elements with holes. | 1365 // Fill in the elements with holes. |
| 1382 FillFixedArrayWithValue(kind, elements, IntPtrConstant(0), capacity_node, | 1366 FillFixedArrayWithValue(kind, elements, IntPtrConstant(0), capacity_node, |
| 1383 Heap::kTheHoleValueRootIndex, mode); | 1367 Heap::kTheHoleValueRootIndex, mode); |
| 1384 | 1368 |
| 1385 return array; | 1369 return array; |
| 1386 } | 1370 } |
| 1387 | 1371 |
| 1388 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, | 1372 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, |
| 1389 Node* capacity_node, | 1373 Node* capacity_node, |
| 1390 ParameterMode mode, | 1374 ParameterMode mode, |
| 1391 AllocationFlags flags) { | 1375 AllocationFlags flags) { |
| 1392 Node* total_size = GetFixedAarrayAllocationSize(capacity_node, kind, mode); | 1376 Node* total_size = GetFixedAarrayAllocationSize(capacity_node, kind, mode); |
| 1393 | 1377 |
| 1394 // Allocate both array and elements object, and initialize the JSArray. | 1378 // Allocate both array and elements object, and initialize the JSArray. |
| 1395 Node* array = Allocate(total_size, flags); | 1379 Node* array = Allocate(total_size, flags); |
| 1396 Heap* heap = isolate()->heap(); | 1380 Heap* heap = isolate()->heap(); |
| 1397 Handle<Map> map(IsFastDoubleElementsKind(kind) | 1381 Handle<Map> map(IsFastDoubleElementsKind(kind) |
| 1398 ? heap->fixed_double_array_map() | 1382 ? heap->fixed_double_array_map() |
| 1399 : heap->fixed_array_map()); | 1383 : heap->fixed_array_map()); |
| 1400 if (flags & kPretenured) { | 1384 if (flags & kPretenured) { |
| 1401 StoreObjectField(array, JSObject::kMapOffset, HeapConstant(map)); | 1385 StoreObjectField(array, JSObject::kMapOffset, HeapConstant(map)); |
| 1402 } else { | 1386 } else { |
| 1403 StoreMapNoWriteBarrier(array, HeapConstant(map)); | 1387 StoreMapNoWriteBarrier(array, HeapConstant(map)); |
| 1404 } | 1388 } |
| 1405 StoreObjectFieldNoWriteBarrier( | 1389 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, |
| 1406 array, FixedArray::kLengthOffset, | 1390 TagParameter(capacity_node, mode)); |
| 1407 mode == INTEGER_PARAMETERS ? SmiTag(capacity_node) : capacity_node); | |
| 1408 return array; | 1391 return array; |
| 1409 } | 1392 } |
| 1410 | 1393 |
| 1411 void CodeStubAssembler::FillFixedArrayWithValue( | 1394 void CodeStubAssembler::FillFixedArrayWithValue( |
| 1412 ElementsKind kind, Node* array, Node* from_node, Node* to_node, | 1395 ElementsKind kind, Node* array, Node* from_node, Node* to_node, |
| 1413 Heap::RootListIndex value_root_index, ParameterMode mode) { | 1396 Heap::RootListIndex value_root_index, ParameterMode mode) { |
| 1414 bool is_double = IsFastDoubleElementsKind(kind); | 1397 bool is_double = IsFastDoubleElementsKind(kind); |
| 1415 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || | 1398 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex || |
| 1416 value_root_index == Heap::kUndefinedValueRootIndex); | 1399 value_root_index == Heap::kUndefinedValueRootIndex); |
| 1417 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); | 1400 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex); |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1546 | 1529 |
| 1547 Bind(&done); | 1530 Bind(&done); |
| 1548 } | 1531 } |
| 1549 | 1532 |
| 1550 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity, | 1533 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity, |
| 1551 ParameterMode mode) { | 1534 ParameterMode mode) { |
| 1552 Node* half_old_capacity = WordShr(old_capacity, IntPtrConstant(1)); | 1535 Node* half_old_capacity = WordShr(old_capacity, IntPtrConstant(1)); |
| 1553 Node* new_capacity = IntPtrAdd(half_old_capacity, old_capacity); | 1536 Node* new_capacity = IntPtrAdd(half_old_capacity, old_capacity); |
| 1554 Node* unconditioned_result = | 1537 Node* unconditioned_result = |
| 1555 IntPtrAdd(new_capacity, IntPtrOrSmiConstant(16, mode)); | 1538 IntPtrAdd(new_capacity, IntPtrOrSmiConstant(16, mode)); |
| 1556 if (mode == INTEGER_PARAMETERS) { | 1539 if (mode == INTEGER_PARAMETERS || mode == INTPTR_PARAMETERS) { |
| 1557 return unconditioned_result; | 1540 return unconditioned_result; |
| 1558 } else { | 1541 } else { |
| 1559 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize; | 1542 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize; |
| 1560 return WordAnd(unconditioned_result, | 1543 return WordAnd(unconditioned_result, |
| 1561 IntPtrConstant(static_cast<size_t>(-1) << kSmiShiftBits)); | 1544 IntPtrConstant(static_cast<size_t>(-1) << kSmiShiftBits)); |
| 1562 } | 1545 } |
| 1563 } | 1546 } |
| 1564 | 1547 |
| 1565 Node* CodeStubAssembler::CheckAndGrowElementsCapacity(Node* context, | 1548 Node* CodeStubAssembler::CheckAndGrowElementsCapacity(Node* context, |
| 1566 Node* elements, | 1549 Node* elements, |
| 1567 ElementsKind kind, | 1550 ElementsKind kind, |
| 1568 Node* key, Label* fail) { | 1551 Node* key, Label* fail) { |
| 1569 Node* capacity = LoadFixedArrayBaseLength(elements); | 1552 Node* capacity = LoadFixedArrayBaseLength(elements); |
| 1570 | 1553 |
| 1571 // On 32-bit platforms, there is a slight performance advantage to doing all | 1554 ParameterMode mode = OptimalParameterMode(); |
| 1572 // of the arithmetic for the new backing store with SMIs, since it's possible | 1555 capacity = UntagParameter(capacity, mode); |
| 1573 // to save a few tag/untag operations without paying an extra expense when | 1556 key = UntagParameter(key, mode); |
| 1574 // calculating array offset (the smi math can be folded away) and there are | |
| 1575 // fewer live ranges. Thus only convert |capacity| and |key| to untagged value | |
| 1576 // on 64-bit platforms. | |
| 1577 ParameterMode mode = Is64() ? INTEGER_PARAMETERS : SMI_PARAMETERS; | |
| 1578 if (mode == INTEGER_PARAMETERS) { | |
| 1579 capacity = SmiUntag(capacity); | |
| 1580 key = SmiUntag(key); | |
| 1581 } | |
| 1582 | 1557 |
| 1583 // If the gap growth is too big, fall back to the runtime. | 1558 // If the gap growth is too big, fall back to the runtime. |
| 1584 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode); | 1559 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode); |
| 1585 Node* max_capacity = IntPtrAdd(capacity, max_gap); | 1560 Node* max_capacity = IntPtrAdd(capacity, max_gap); |
| 1586 GotoIf(UintPtrGreaterThanOrEqual(key, max_capacity), fail); | 1561 GotoIf(UintPtrGreaterThanOrEqual(key, max_capacity), fail); |
| 1587 | 1562 |
| 1588 // Calculate the capacity of the new backing tore | 1563 // Calculate the capacity of the new backing tore |
| 1589 Node* new_capacity = CalculateNewElementsCapacity( | 1564 Node* new_capacity = CalculateNewElementsCapacity( |
| 1590 IntPtrAdd(key, IntPtrOrSmiConstant(1, mode)), mode); | 1565 IntPtrAdd(key, IntPtrOrSmiConstant(1, mode)), mode); |
| 1591 | 1566 |
| (...skipping 2618 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4210 Heap::kTheHoleValueRootIndex); | 4185 Heap::kTheHoleValueRootIndex); |
| 4211 | 4186 |
| 4212 // Store the WeakCell in the feedback vector. | 4187 // Store the WeakCell in the feedback vector. |
| 4213 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, | 4188 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, |
| 4214 CodeStubAssembler::SMI_PARAMETERS); | 4189 CodeStubAssembler::SMI_PARAMETERS); |
| 4215 return cell; | 4190 return cell; |
| 4216 } | 4191 } |
| 4217 | 4192 |
| 4218 } // namespace internal | 4193 } // namespace internal |
| 4219 } // namespace v8 | 4194 } // namespace v8 |
| OLD | NEW |