OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_ARM | 5 #if V8_TARGET_ARCH_ARM |
6 | 6 |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/api-arguments.h" | 8 #include "src/api-arguments.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 5530 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5541 ExternalReference thunk_ref = | 5541 ExternalReference thunk_ref = |
5542 ExternalReference::invoke_accessor_getter_callback(isolate()); | 5542 ExternalReference::invoke_accessor_getter_callback(isolate()); |
5543 | 5543 |
5544 // +3 is to skip prolog, return address and name handle. | 5544 // +3 is to skip prolog, return address and name handle. |
5545 MemOperand return_value_operand( | 5545 MemOperand return_value_operand( |
5546 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5546 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
5547 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5547 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
5548 kStackUnwindSpace, NULL, return_value_operand, NULL); | 5548 kStackUnwindSpace, NULL, return_value_operand, NULL); |
5549 } | 5549 } |
5550 | 5550 |
| 5551 namespace { |
| 5552 |
| 5553 void GetTypedArrayBackingStore(MacroAssembler* masm, Register backing_store, |
| 5554 Register object, Register scratch, |
| 5555 LowDwVfpRegister double_scratch) { |
| 5556 Label offset_is_not_smi, done; |
| 5557 __ ldr(scratch, FieldMemOperand(object, JSTypedArray::kBufferOffset)); |
| 5558 __ ldr(backing_store, |
| 5559 FieldMemOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); |
| 5560 __ ldr(scratch, |
| 5561 FieldMemOperand(object, JSArrayBufferView::kByteOffsetOffset)); |
| 5562 __ JumpIfNotSmi(scratch, &offset_is_not_smi); |
| 5563 // offset is smi |
| 5564 __ add(backing_store, backing_store, Operand::SmiUntag(scratch)); |
| 5565 __ jmp(&done); |
| 5566 |
| 5567 // offset is a heap number |
| 5568 __ bind(&offset_is_not_smi); |
| 5569 __ vldr(double_scratch, scratch, HeapNumber::kValueOffset - kHeapObjectTag); |
| 5570 __ vcvt_u32_f64(double_scratch.low(), double_scratch); |
| 5571 __ vmov(scratch, double_scratch.low()); |
| 5572 __ add(backing_store, backing_store, scratch); |
| 5573 __ bind(&done); |
| 5574 } |
| 5575 |
| 5576 void TypedArrayJumpTable(MacroAssembler* masm, Register object, |
| 5577 Register scratch, Label* i8, Label* u8, Label* i16, |
| 5578 Label* u16, Label* i32, Label* u32, Label* u8c) { |
| 5579 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); |
| 5580 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); |
| 5581 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); |
| 5582 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); |
| 5583 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); |
| 5584 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); |
| 5585 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); |
| 5586 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); |
| 5587 |
| 5588 __ ldr(scratch, FieldMemOperand(object, JSObject::kElementsOffset)); |
| 5589 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 5590 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 5591 __ sub(scratch, scratch, Operand(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE)), |
| 5592 SetCC); |
| 5593 __ Assert(ge, kOffsetOutOfRange); |
| 5594 |
| 5595 Label abort; |
| 5596 |
| 5597 { |
| 5598 Assembler::BlockConstPoolScope scope(masm); |
| 5599 __ add(pc, pc, Operand(scratch, LSL, 2)); |
| 5600 __ nop(); |
| 5601 __ b(i8); // Int8Array |
| 5602 __ b(u8); // Uint8Array |
| 5603 __ b(i16); // Int16Array |
| 5604 __ b(u16); // Uint16Array |
| 5605 __ b(i32); // Int32Array |
| 5606 __ b(u32); // Uint32Array |
| 5607 __ b(&abort); // Float32Array |
| 5608 __ b(&abort); // Float64Array |
| 5609 __ b(u8c); // Uint8ClampedArray |
| 5610 } |
| 5611 |
| 5612 __ bind(&abort); |
| 5613 __ Abort(kNoReason); |
| 5614 } |
| 5615 |
| 5616 void ReturnInteger32(MacroAssembler* masm, DwVfpRegister dst, Register value, |
| 5617 SwVfpRegister single_scratch, Label* use_heap_number) { |
| 5618 Label not_smi; |
| 5619 __ TrySmiTag(r0, value, ¬_smi); |
| 5620 __ Ret(); |
| 5621 |
| 5622 __ bind(¬_smi); |
| 5623 __ vmov(single_scratch, value); |
| 5624 __ vcvt_f64_s32(dst, single_scratch); |
| 5625 __ jmp(use_heap_number); |
| 5626 } |
| 5627 |
| 5628 void ReturnUnsignedInteger32(MacroAssembler* masm, DwVfpRegister dst, |
| 5629 Register value, SwVfpRegister single_scratch, |
| 5630 Label* use_heap_number) { |
| 5631 Label not_smi; |
| 5632 __ cmp(value, Operand(0x40000000U)); |
| 5633 __ b(cs, ¬_smi); |
| 5634 __ SmiTag(r0, value); |
| 5635 __ Ret(); |
| 5636 |
| 5637 __ bind(¬_smi); |
| 5638 __ vmov(single_scratch, value); |
| 5639 __ vcvt_f64_u32(dst, single_scratch); |
| 5640 __ jmp(use_heap_number); |
| 5641 } |
| 5642 |
| 5643 void ReturnAllocatedHeapNumber(MacroAssembler* masm, DwVfpRegister value, |
| 5644 Register scratch, Register scratch2, |
| 5645 Register scratch3) { |
| 5646 Label call_runtime; |
| 5647 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); |
| 5648 __ AllocateHeapNumber(r0, scratch, scratch2, scratch3, &call_runtime); |
| 5649 __ vstr(value, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
| 5650 __ Ret(); |
| 5651 |
| 5652 __ bind(&call_runtime); |
| 5653 { |
| 5654 FrameScope scope(masm, StackFrame::INTERNAL); |
| 5655 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 5656 __ vstr(value, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
| 5657 } |
| 5658 __ Ret(); |
| 5659 } |
| 5660 |
| 5661 } // anonymous namespace |
| 5662 |
| 5663 void AtomicsLoadStub::Generate(MacroAssembler* masm) { |
| 5664 Register object = r1; |
| 5665 Register index = r0; // Index is an untagged word32. |
| 5666 Register backing_store = r2; |
| 5667 Label i8, u8, i16, u16, i32, u32; |
| 5668 |
| 5669 GetTypedArrayBackingStore(masm, backing_store, object, r3, d0); |
| 5670 TypedArrayJumpTable(masm, object, r3, &i8, &u8, &i16, &u16, &i32, &u32, &u8); |
| 5671 |
| 5672 __ bind(&i8); |
| 5673 __ ldrsb(r0, MemOperand(backing_store, index)); |
| 5674 __ dmb(ISH); |
| 5675 __ SmiTag(r0); |
| 5676 __ Ret(); |
| 5677 |
| 5678 __ bind(&u8); |
| 5679 __ ldrb(r0, MemOperand(backing_store, index)); |
| 5680 __ dmb(ISH); |
| 5681 __ SmiTag(r0); |
| 5682 __ Ret(); |
| 5683 |
| 5684 __ bind(&i16); |
| 5685 __ ldrsh(r0, MemOperand(backing_store, index, LSL, 1)); |
| 5686 __ dmb(ISH); |
| 5687 __ SmiTag(r0); |
| 5688 __ Ret(); |
| 5689 |
| 5690 __ bind(&u16); |
| 5691 __ ldrh(r0, MemOperand(backing_store, index, LSL, 1)); |
| 5692 __ dmb(ISH); |
| 5693 __ SmiTag(r0); |
| 5694 __ Ret(); |
| 5695 |
| 5696 Label use_heap_number; |
| 5697 |
| 5698 __ bind(&i32); |
| 5699 __ ldr(r0, MemOperand(backing_store, index, LSL, 2)); |
| 5700 __ dmb(ISH); |
| 5701 ReturnInteger32(masm, d0, r0, s2, &use_heap_number); |
| 5702 |
| 5703 __ bind(&u32); |
| 5704 __ ldr(r0, MemOperand(backing_store, index, LSL, 2)); |
| 5705 __ dmb(ISH); |
| 5706 ReturnUnsignedInteger32(masm, d0, r0, s2, &use_heap_number); |
| 5707 |
| 5708 __ bind(&use_heap_number); |
| 5709 ReturnAllocatedHeapNumber(masm, d0, r1, r2, r3); |
| 5710 } |
| 5711 |
5551 #undef __ | 5712 #undef __ |
5552 | 5713 |
5553 } // namespace internal | 5714 } // namespace internal |
5554 } // namespace v8 | 5715 } // namespace v8 |
5555 | 5716 |
5556 #endif // V8_TARGET_ARCH_ARM | 5717 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |