OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2016 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-stub-assembler.h" | 5 #include "src/code-stub-assembler.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 | 10 |
(...skipping 387 matching lines...) Loading... |
398 Bind(&doesnt_need_filler); | 398 Bind(&doesnt_need_filler); |
399 Goto(&merge_address); | 399 Goto(&merge_address); |
400 | 400 |
401 Bind(&merge_address); | 401 Bind(&merge_address); |
402 // Update the top. | 402 // Update the top. |
403 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, | 403 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, |
404 IntPtrAdd(top, adjusted_size.value())); | 404 IntPtrAdd(top, adjusted_size.value())); |
405 return address.value(); | 405 return address.value(); |
406 } | 406 } |
407 | 407 |
408 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { | 408 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) { |
409 bool const new_space = !(flags & kPretenured); | 409 bool const new_space = !(flags & kPretenured); |
410 Node* top_address = ExternalConstant( | 410 Node* top_address = ExternalConstant( |
411 new_space | 411 new_space |
412 ? ExternalReference::new_space_allocation_top_address(isolate()) | 412 ? ExternalReference::new_space_allocation_top_address(isolate()) |
413 : ExternalReference::old_space_allocation_top_address(isolate())); | 413 : ExternalReference::old_space_allocation_top_address(isolate())); |
414 Node* limit_address = ExternalConstant( | 414 Node* limit_address = ExternalConstant( |
415 new_space | 415 new_space |
416 ? ExternalReference::new_space_allocation_limit_address(isolate()) | 416 ? ExternalReference::new_space_allocation_limit_address(isolate()) |
417 : ExternalReference::old_space_allocation_limit_address(isolate())); | 417 : ExternalReference::old_space_allocation_limit_address(isolate())); |
418 | 418 |
419 #ifdef V8_HOST_ARCH_32_BIT | 419 #ifdef V8_HOST_ARCH_32_BIT |
420 if (flags & kDoubleAlignment) { | 420 if (flags & kDoubleAlignment) { |
421 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address, | 421 return AllocateRawAligned(size_in_bytes, flags, top_address, limit_address); |
422 limit_address); | |
423 } | 422 } |
424 #endif | 423 #endif |
425 | 424 |
426 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address, | 425 return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address); |
427 limit_address); | 426 } |
| 427 |
| 428 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { |
| 429 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags); |
428 } | 430 } |
429 | 431 |
430 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { | 432 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) { |
431 return IntPtrAdd(previous, IntPtrConstant(offset)); | 433 return IntPtrAdd(previous, IntPtrConstant(offset)); |
432 } | 434 } |
433 | 435 |
434 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, | 436 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset, |
435 MachineType rep) { | 437 MachineType rep) { |
436 return Load(rep, buffer, IntPtrConstant(offset)); | 438 return Load(rep, buffer, IntPtrConstant(offset)); |
437 } | 439 } |
(...skipping 46 matching lines...) Loading... |
484 | 486 |
485 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) { | 487 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) { |
486 return LoadObjectField(map, Map::kDescriptorsOffset); | 488 return LoadObjectField(map, Map::kDescriptorsOffset); |
487 } | 489 } |
488 | 490 |
489 Node* CodeStubAssembler::LoadNameHash(Node* name) { | 491 Node* CodeStubAssembler::LoadNameHash(Node* name) { |
490 return Load(MachineType::Uint32(), name, | 492 return Load(MachineType::Uint32(), name, |
491 IntPtrConstant(Name::kHashFieldOffset - kHeapObjectTag)); | 493 IntPtrConstant(Name::kHashFieldOffset - kHeapObjectTag)); |
492 } | 494 } |
493 | 495 |
| 496 Node* CodeStubAssembler::AllocateUninitializedFixedArray(Node* length) { |
| 497 Node* header_size = IntPtrConstant(FixedArray::kHeaderSize); |
| 498 Node* data_size = WordShl(length, IntPtrConstant(kPointerSizeLog2)); |
| 499 Node* total_size = IntPtrAdd(data_size, header_size); |
| 500 |
| 501 Node* result = Allocate(total_size, kNone); |
| 502 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kFixedArrayMapRootIndex)); |
| 503 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset, |
| 504 SmiTag(length)); |
| 505 |
| 506 return result; |
| 507 } |
| 508 |
494 Node* CodeStubAssembler::LoadFixedArrayElementInt32Index( | 509 Node* CodeStubAssembler::LoadFixedArrayElementInt32Index( |
495 Node* object, Node* index, int additional_offset) { | 510 Node* object, Node* index, int additional_offset) { |
496 Node* header_size = IntPtrConstant(additional_offset + | 511 Node* header_size = IntPtrConstant(additional_offset + |
497 FixedArray::kHeaderSize - kHeapObjectTag); | 512 FixedArray::kHeaderSize - kHeapObjectTag); |
498 if (Is64()) { | 513 if (Is64()) { |
499 index = ChangeInt32ToInt64(index); | 514 index = ChangeInt32ToInt64(index); |
500 } | 515 } |
501 Node* scaled_index = WordShl(index, IntPtrConstant(kPointerSizeLog2)); | 516 Node* scaled_index = WordShl(index, IntPtrConstant(kPointerSizeLog2)); |
502 Node* offset = IntPtrAdd(scaled_index, header_size); | 517 Node* offset = IntPtrAdd(scaled_index, header_size); |
503 return Load(MachineType::AnyTagged(), object, offset); | 518 return Load(MachineType::AnyTagged(), object, offset); |
(...skipping 619 matching lines...) Loading... |
1123 } | 1138 } |
1124 | 1139 |
1125 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, | 1140 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, |
1126 uint32_t mask) { | 1141 uint32_t mask) { |
1127 return Word32Shr(Word32And(word32, Int32Constant(mask)), | 1142 return Word32Shr(Word32And(word32, Int32Constant(mask)), |
1128 Int32Constant(shift)); | 1143 Int32Constant(shift)); |
1129 } | 1144 } |
1130 | 1145 |
1131 } // namespace internal | 1146 } // namespace internal |
1132 } // namespace v8 | 1147 } // namespace v8 |
OLD | NEW |