OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
46 MacroAssembler::MacroAssembler(Isolate* arg_isolate, | 46 MacroAssembler::MacroAssembler(Isolate* arg_isolate, |
47 byte * buffer, | 47 byte * buffer, |
48 unsigned buffer_size) | 48 unsigned buffer_size) |
49 : Assembler(arg_isolate, buffer, buffer_size), | 49 : Assembler(arg_isolate, buffer, buffer_size), |
50 generating_stub_(false), | 50 generating_stub_(false), |
51 #if DEBUG | 51 #if DEBUG |
52 allow_macro_instructions_(true), | 52 allow_macro_instructions_(true), |
53 #endif | 53 #endif |
54 has_frame_(false), | 54 has_frame_(false), |
55 use_real_aborts_(true), | 55 use_real_aborts_(true), |
56 sp_(jssp), tmp_list_(ip0, ip1), fptmp_list_(fp_scratch) { | 56 sp_(jssp), tmp_list_(ip0, ip1), fptmp_list_(fp_scratch1, fp_scratch2) { |
57 if (isolate() != NULL) { | 57 if (isolate() != NULL) { |
58 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), | 58 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), |
59 isolate()); | 59 isolate()); |
60 } | 60 } |
61 } | 61 } |
62 | 62 |
63 | 63 |
64 void MacroAssembler::LogicalMacro(const Register& rd, | 64 void MacroAssembler::LogicalMacro(const Register& rd, |
65 const Register& rn, | 65 const Register& rn, |
66 const Operand& operand, | 66 const Operand& operand, |
(...skipping 1122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1189 Abort(kTheCurrentStackPointerIsBelowCsp); | 1189 Abort(kTheCurrentStackPointerIsBelowCsp); |
1190 | 1190 |
1191 bind(&ok); | 1191 bind(&ok); |
1192 // Restore StackPointer(). | 1192 // Restore StackPointer(). |
1193 sub(StackPointer(), csp, StackPointer()); | 1193 sub(StackPointer(), csp, StackPointer()); |
1194 } | 1194 } |
1195 } | 1195 } |
1196 } | 1196 } |
1197 | 1197 |
1198 | 1198 |
1199 void MacroAssembler::LoadRoot(Register destination, | 1199 void MacroAssembler::LoadRoot(CPURegister destination, |
1200 Heap::RootListIndex index) { | 1200 Heap::RootListIndex index) { |
1201 // TODO(jbramley): Most root values are constants, and can be synthesized | 1201 // TODO(jbramley): Most root values are constants, and can be synthesized |
1202 // without a load. Refer to the ARM back end for details. | 1202 // without a load. Refer to the ARM back end for details. |
1203 Ldr(destination, MemOperand(root, index << kPointerSizeLog2)); | 1203 Ldr(destination, MemOperand(root, index << kPointerSizeLog2)); |
1204 } | 1204 } |
1205 | 1205 |
1206 | 1206 |
1207 void MacroAssembler::StoreRoot(Register source, | 1207 void MacroAssembler::StoreRoot(Register source, |
1208 Heap::RootListIndex index) { | 1208 Heap::RootListIndex index) { |
1209 Str(source, MemOperand(root, index << kPointerSizeLog2)); | 1209 Str(source, MemOperand(root, index << kPointerSizeLog2)); |
(...skipping 2050 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3260 | 3260 |
3261 // Calculate new top and bail out if new space is exhausted. | 3261 // Calculate new top and bail out if new space is exhausted. |
3262 Adds(scratch3, result, object_size); | 3262 Adds(scratch3, result, object_size); |
3263 B(vs, gc_required); | 3263 B(vs, gc_required); |
3264 Cmp(scratch3, allocation_limit); | 3264 Cmp(scratch3, allocation_limit); |
3265 B(hi, gc_required); | 3265 B(hi, gc_required); |
3266 Str(scratch3, MemOperand(top_address)); | 3266 Str(scratch3, MemOperand(top_address)); |
3267 | 3267 |
3268 // Tag the object if requested. | 3268 // Tag the object if requested. |
3269 if ((flags & TAG_OBJECT) != 0) { | 3269 if ((flags & TAG_OBJECT) != 0) { |
3270 Orr(result, result, kHeapObjectTag); | 3270 ObjectTag(result, result); |
3271 } | 3271 } |
3272 } | 3272 } |
3273 | 3273 |
3274 | 3274 |
3275 void MacroAssembler::Allocate(Register object_size, | 3275 void MacroAssembler::Allocate(Register object_size, |
3276 Register result, | 3276 Register result, |
3277 Register scratch1, | 3277 Register scratch1, |
3278 Register scratch2, | 3278 Register scratch2, |
3279 Label* gc_required, | 3279 Label* gc_required, |
3280 AllocationFlags flags) { | 3280 AllocationFlags flags) { |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3342 Check(eq, kUnalignedAllocationInNewSpace); | 3342 Check(eq, kUnalignedAllocationInNewSpace); |
3343 } | 3343 } |
3344 | 3344 |
3345 B(vs, gc_required); | 3345 B(vs, gc_required); |
3346 Cmp(scratch3, allocation_limit); | 3346 Cmp(scratch3, allocation_limit); |
3347 B(hi, gc_required); | 3347 B(hi, gc_required); |
3348 Str(scratch3, MemOperand(top_address)); | 3348 Str(scratch3, MemOperand(top_address)); |
3349 | 3349 |
3350 // Tag the object if requested. | 3350 // Tag the object if requested. |
3351 if ((flags & TAG_OBJECT) != 0) { | 3351 if ((flags & TAG_OBJECT) != 0) { |
3352 Orr(result, result, kHeapObjectTag); | 3352 ObjectTag(result, result); |
3353 } | 3353 } |
3354 } | 3354 } |
3355 | 3355 |
3356 | 3356 |
3357 void MacroAssembler::UndoAllocationInNewSpace(Register object, | 3357 void MacroAssembler::UndoAllocationInNewSpace(Register object, |
3358 Register scratch) { | 3358 Register scratch) { |
3359 ExternalReference new_space_allocation_top = | 3359 ExternalReference new_space_allocation_top = |
3360 ExternalReference::new_space_allocation_top_address(isolate()); | 3360 ExternalReference::new_space_allocation_top_address(isolate()); |
3361 | 3361 |
3362 // Make sure the object has no tag before resetting top. | 3362 // Make sure the object has no tag before resetting top. |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3526 scratch2); | 3526 scratch2); |
3527 } | 3527 } |
3528 | 3528 |
3529 | 3529 |
3530 // Allocates a heap number or jumps to the need_gc label if the young space | 3530 // Allocates a heap number or jumps to the need_gc label if the young space |
3531 // is full and a scavenge is needed. | 3531 // is full and a scavenge is needed. |
3532 void MacroAssembler::AllocateHeapNumber(Register result, | 3532 void MacroAssembler::AllocateHeapNumber(Register result, |
3533 Label* gc_required, | 3533 Label* gc_required, |
3534 Register scratch1, | 3534 Register scratch1, |
3535 Register scratch2, | 3535 Register scratch2, |
3536 Register heap_number_map) { | 3536 CPURegister value, |
| 3537 CPURegister heap_number_map) { |
| 3538 ASSERT(!value.IsValid() || value.Is64Bits()); |
| 3539 UseScratchRegisterScope temps(this); |
| 3540 |
3537 // Allocate an object in the heap for the heap number and tag it as a heap | 3541 // Allocate an object in the heap for the heap number and tag it as a heap |
3538 // object. | 3542 // object. |
3539 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, | 3543 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, |
3540 TAG_OBJECT); | 3544 NO_ALLOCATION_FLAGS); |
3541 | 3545 |
3542 // Store heap number map in the allocated object. | 3546 // Prepare the heap number map. |
3543 if (heap_number_map.Is(NoReg)) { | 3547 if (!heap_number_map.IsValid()) { |
3544 heap_number_map = scratch1; | 3548 // If we have a valid value register, use the same type of register to store |
| 3549 // the map so we can use STP to store both in one instruction. |
| 3550 if (value.IsValid() && value.IsFPRegister()) { |
| 3551 heap_number_map = temps.AcquireD(); |
| 3552 } else { |
| 3553 heap_number_map = scratch1; |
| 3554 } |
3545 LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 3555 LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
3546 } | 3556 } |
3547 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 3557 if (emit_debug_code()) { |
3548 Str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); | 3558 Register map; |
| 3559 if (heap_number_map.IsFPRegister()) { |
| 3560 map = scratch1; |
| 3561 Fmov(map, DoubleRegister(heap_number_map)); |
| 3562 } else { |
| 3563 map = Register(heap_number_map); |
| 3564 } |
| 3565 AssertRegisterIsRoot(map, Heap::kHeapNumberMapRootIndex); |
| 3566 } |
| 3567 |
| 3568 // Store the heap number map and the value in the allocated object. |
| 3569 if (value.IsSameSizeAndType(heap_number_map)) { |
| 3570 STATIC_ASSERT(HeapObject::kMapOffset + kPointerSize == |
| 3571 HeapNumber::kValueOffset); |
| 3572 Stp(heap_number_map, value, MemOperand(result, HeapObject::kMapOffset)); |
| 3573 } else { |
| 3574 Str(heap_number_map, MemOperand(result, HeapObject::kMapOffset)); |
| 3575 if (value.IsValid()) { |
| 3576 Str(value, MemOperand(result, HeapNumber::kValueOffset)); |
| 3577 } |
| 3578 } |
| 3579 ObjectTag(result, result); |
3549 } | 3580 } |
3550 | 3581 |
3551 | 3582 |
3552 void MacroAssembler::AllocateHeapNumberWithValue(Register result, | |
3553 DoubleRegister value, | |
3554 Label* gc_required, | |
3555 Register scratch1, | |
3556 Register scratch2, | |
3557 Register heap_number_map) { | |
3558 // TODO(all): Check if it would be more efficient to use STP to store both | |
3559 // the map and the value. | |
3560 AllocateHeapNumber(result, gc_required, scratch1, scratch2, heap_number_map); | |
3561 Str(value, FieldMemOperand(result, HeapNumber::kValueOffset)); | |
3562 } | |
3563 | |
3564 | |
3565 void MacroAssembler::JumpIfObjectType(Register object, | 3583 void MacroAssembler::JumpIfObjectType(Register object, |
3566 Register map, | 3584 Register map, |
3567 Register type_reg, | 3585 Register type_reg, |
3568 InstanceType type, | 3586 InstanceType type, |
3569 Label* if_cond_pass, | 3587 Label* if_cond_pass, |
3570 Condition cond) { | 3588 Condition cond) { |
3571 CompareObjectType(object, map, type_reg, type); | 3589 CompareObjectType(object, map, type_reg, type); |
3572 B(cond, if_cond_pass); | 3590 B(cond, if_cond_pass); |
3573 } | 3591 } |
3574 | 3592 |
(...skipping 1600 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5175 } | 5193 } |
5176 } | 5194 } |
5177 | 5195 |
5178 | 5196 |
5179 #undef __ | 5197 #undef __ |
5180 | 5198 |
5181 | 5199 |
5182 } } // namespace v8::internal | 5200 } } // namespace v8::internal |
5183 | 5201 |
5184 #endif // V8_TARGET_ARCH_ARM64 | 5202 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |