| OLD | NEW |
| 1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 #include "src/code-stub-assembler.h" | 4 #include "src/code-stub-assembler.h" |
| 5 #include "src/code-factory.h" | 5 #include "src/code-factory.h" |
| 6 #include "src/frames-inl.h" | 6 #include "src/frames-inl.h" |
| 7 #include "src/frames.h" | 7 #include "src/frames.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| (...skipping 455 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 466 } | 466 } |
| 467 | 467 |
| 468 Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) { | 468 Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) { |
| 469 return IntPtrLessThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b)); | 469 return IntPtrLessThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b)); |
| 470 } | 470 } |
| 471 | 471 |
| 472 Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) { | 472 Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) { |
| 473 return IntPtrLessThanOrEqual(BitcastTaggedToWord(a), BitcastTaggedToWord(b)); | 473 return IntPtrLessThanOrEqual(BitcastTaggedToWord(a), BitcastTaggedToWord(b)); |
| 474 } | 474 } |
| 475 | 475 |
| 476 Node* CodeStubAssembler::SmiGreaterThan(Node* a, Node* b) { |
| 477 return IntPtrGreaterThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b)); |
| 478 } |
| 479 |
| 476 Node* CodeStubAssembler::SmiMax(Node* a, Node* b) { | 480 Node* CodeStubAssembler::SmiMax(Node* a, Node* b) { |
| 477 return SelectTaggedConstant(SmiLessThan(a, b), b, a); | 481 return SelectTaggedConstant(SmiLessThan(a, b), b, a); |
| 478 } | 482 } |
| 479 | 483 |
| 480 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) { | 484 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) { |
| 481 return SelectTaggedConstant(SmiLessThan(a, b), a, b); | 485 return SelectTaggedConstant(SmiLessThan(a, b), a, b); |
| 482 } | 486 } |
| 483 | 487 |
| 484 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) { | 488 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) { |
| 485 Variable var_result(this, MachineRepresentation::kTagged); | 489 Variable var_result(this, MachineRepresentation::kTagged); |
| (...skipping 1564 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2050 // Setup elements object. | 2054 // Setup elements object. |
| 2051 Heap::RootListIndex elements_map_index = | 2055 Heap::RootListIndex elements_map_index = |
| 2052 IsFastDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex | 2056 IsFastDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex |
| 2053 : Heap::kFixedArrayMapRootIndex; | 2057 : Heap::kFixedArrayMapRootIndex; |
| 2054 DCHECK(Heap::RootIsImmortalImmovable(elements_map_index)); | 2058 DCHECK(Heap::RootIsImmortalImmovable(elements_map_index)); |
| 2055 StoreMapNoWriteBarrier(elements, elements_map_index); | 2059 StoreMapNoWriteBarrier(elements, elements_map_index); |
| 2056 StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, | 2060 StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, |
| 2057 TagParameter(capacity, capacity_mode)); | 2061 TagParameter(capacity, capacity_mode)); |
| 2058 | 2062 |
| 2059 // Fill in the elements with holes. | 2063 // Fill in the elements with holes. |
| 2060 FillFixedArrayWithValue( | 2064 FillFixedArrayWithValue(kind, elements, IntPtrOrSmiConstant(0, capacity_mode), |
| 2061 kind, elements, capacity_mode == SMI_PARAMETERS ? SmiConstant(Smi::kZero) | 2065 capacity, Heap::kTheHoleValueRootIndex, |
| 2062 : IntPtrConstant(0), | 2066 capacity_mode); |
| 2063 capacity, Heap::kTheHoleValueRootIndex, capacity_mode); | |
| 2064 | 2067 |
| 2065 return array; | 2068 return array; |
| 2066 } | 2069 } |
| 2067 | 2070 |
| 2068 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, | 2071 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind, |
| 2069 Node* capacity_node, | 2072 Node* capacity_node, |
| 2070 ParameterMode mode, | 2073 ParameterMode mode, |
| 2071 AllocationFlags flags) { | 2074 AllocationFlags flags) { |
| 2072 CSA_ASSERT(this, | 2075 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node, |
| 2073 IntPtrGreaterThan(capacity_node, IntPtrOrSmiConstant(0, mode))); | 2076 IntPtrOrSmiConstant(0, mode), mode)); |
| 2074 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode); | 2077 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode); |
| 2075 | 2078 |
| 2076 // Allocate both array and elements object, and initialize the JSArray. | 2079 // Allocate both array and elements object, and initialize the JSArray. |
| 2077 Node* array = Allocate(total_size, flags); | 2080 Node* array = Allocate(total_size, flags); |
| 2078 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind) | 2081 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind) |
| 2079 ? Heap::kFixedDoubleArrayMapRootIndex | 2082 ? Heap::kFixedDoubleArrayMapRootIndex |
| 2080 : Heap::kFixedArrayMapRootIndex; | 2083 : Heap::kFixedArrayMapRootIndex; |
| 2081 DCHECK(Heap::RootIsImmortalImmovable(map_index)); | 2084 DCHECK(Heap::RootIsImmortalImmovable(map_index)); |
| 2082 StoreMapNoWriteBarrier(array, map_index); | 2085 StoreMapNoWriteBarrier(array, map_index); |
| 2083 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, | 2086 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset, |
| (...skipping 3413 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5497 } | 5500 } |
| 5498 | 5501 |
| 5499 void CodeStubAssembler::ExtendPropertiesBackingStore(Node* object) { | 5502 void CodeStubAssembler::ExtendPropertiesBackingStore(Node* object) { |
| 5500 Node* properties = LoadProperties(object); | 5503 Node* properties = LoadProperties(object); |
| 5501 Node* length = LoadFixedArrayBaseLength(properties); | 5504 Node* length = LoadFixedArrayBaseLength(properties); |
| 5502 | 5505 |
| 5503 ParameterMode mode = OptimalParameterMode(); | 5506 ParameterMode mode = OptimalParameterMode(); |
| 5504 length = UntagParameter(length, mode); | 5507 length = UntagParameter(length, mode); |
| 5505 | 5508 |
| 5506 Node* delta = IntPtrOrSmiConstant(JSObject::kFieldsAdded, mode); | 5509 Node* delta = IntPtrOrSmiConstant(JSObject::kFieldsAdded, mode); |
| 5507 Node* new_capacity = IntPtrAdd(length, delta); | 5510 Node* new_capacity = IntPtrOrSmiAdd(length, delta, mode); |
| 5508 | 5511 |
| 5509 // Grow properties array. | 5512 // Grow properties array. |
| 5510 ElementsKind kind = FAST_ELEMENTS; | 5513 ElementsKind kind = FAST_ELEMENTS; |
| 5511 DCHECK(kMaxNumberOfDescriptors + JSObject::kFieldsAdded < | 5514 DCHECK(kMaxNumberOfDescriptors + JSObject::kFieldsAdded < |
| 5512 FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind)); | 5515 FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind)); |
| 5513 // The size of a new properties backing store is guaranteed to be small | 5516 // The size of a new properties backing store is guaranteed to be small |
| 5514 // enough that the new backing store will be allocated in new space. | 5517 // enough that the new backing store will be allocated in new space. |
| 5515 CSA_ASSERT(this, UintPtrLessThan(new_capacity, | 5518 CSA_ASSERT(this, |
| 5516 IntPtrConstant(kMaxNumberOfDescriptors + | 5519 UintPtrOrSmiLessThan( |
| 5517 JSObject::kFieldsAdded))); | 5520 new_capacity, |
| 5521 IntPtrOrSmiConstant( |
| 5522 kMaxNumberOfDescriptors + JSObject::kFieldsAdded, mode), |
| 5523 mode)); |
| 5518 | 5524 |
| 5519 Node* new_properties = AllocateFixedArray(kind, new_capacity, mode); | 5525 Node* new_properties = AllocateFixedArray(kind, new_capacity, mode); |
| 5520 | 5526 |
| 5521 FillFixedArrayWithValue(kind, new_properties, length, new_capacity, | 5527 FillFixedArrayWithValue(kind, new_properties, length, new_capacity, |
| 5522 Heap::kUndefinedValueRootIndex, mode); | 5528 Heap::kUndefinedValueRootIndex, mode); |
| 5523 | 5529 |
| 5524 // |new_properties| is guaranteed to be in new space, so we can skip | 5530 // |new_properties| is guaranteed to be in new space, so we can skip |
| 5525 // the write barrier. | 5531 // the write barrier. |
| 5526 CopyFixedArrayElements(kind, properties, new_properties, length, | 5532 CopyFixedArrayElements(kind, properties, new_properties, length, |
| 5527 SKIP_WRITE_BARRIER, mode); | 5533 SKIP_WRITE_BARRIER, mode); |
| (...skipping 2698 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8226 | 8232 |
| 8227 Node* CodeStubAssembler::IsDebugActive() { | 8233 Node* CodeStubAssembler::IsDebugActive() { |
| 8228 Node* is_debug_active = Load( | 8234 Node* is_debug_active = Load( |
| 8229 MachineType::Uint8(), | 8235 MachineType::Uint8(), |
| 8230 ExternalConstant(ExternalReference::debug_is_active_address(isolate()))); | 8236 ExternalConstant(ExternalReference::debug_is_active_address(isolate()))); |
| 8231 return WordNotEqual(is_debug_active, Int32Constant(0)); | 8237 return WordNotEqual(is_debug_active, Int32Constant(0)); |
| 8232 } | 8238 } |
| 8233 | 8239 |
| 8234 } // namespace internal | 8240 } // namespace internal |
| 8235 } // namespace v8 | 8241 } // namespace v8 |
| OLD | NEW |