OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 5444 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5455 | 5455 |
5456 // +3 is to skip prolog, return address and name handle. | 5456 // +3 is to skip prolog, return address and name handle. |
5457 Operand return_value_operand( | 5457 Operand return_value_operand( |
5458 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); | 5458 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); |
5459 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, | 5459 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, |
5460 kStackUnwindSpace, nullptr, return_value_operand, | 5460 kStackUnwindSpace, nullptr, return_value_operand, |
5461 NULL); | 5461 NULL); |
5462 } | 5462 } |
5463 | 5463 |
5464 | 5464 |
| 5465 static void GetTypedArrayBackingStore(MacroAssembler* masm, |
| 5466 Register backing_store, Register object, |
| 5467 Register scratch) { |
| 5468 Label offset_is_not_smi, done; |
| 5469 __ movp(scratch, FieldOperand(object, JSTypedArray::kBufferOffset)); |
| 5470 __ movp(backing_store, |
| 5471 FieldOperand(scratch, JSArrayBuffer::kBackingStoreOffset)); |
| 5472 |
| 5473 __ movp(scratch, FieldOperand(object, JSArrayBufferView::kByteOffsetOffset)); |
| 5474 __ JumpIfNotSmi(scratch, &offset_is_not_smi, Label::kNear); |
| 5475 // offset is smi |
| 5476 __ SmiToInteger32(scratch, scratch); |
| 5477 __ addp(backing_store, scratch); |
| 5478 __ jmp(&done, Label::kNear); |
| 5479 |
| 5480 // offset is a heap number |
| 5481 __ bind(&offset_is_not_smi); |
| 5482 __ Movsd(xmm0, FieldOperand(scratch, HeapNumber::kValueOffset)); |
| 5483 __ Cvttsd2siq(scratch, xmm0); |
| 5484 __ addp(backing_store, scratch); |
| 5485 __ bind(&done); |
| 5486 } |
| 5487 |
| 5488 |
| 5489 static void TaggedToInteger32(MacroAssembler* masm, Register value) { |
| 5490 Label not_smi, done; |
| 5491 __ JumpIfNotSmi(value, ¬_smi, Label::kNear); |
| 5492 __ SmiToInteger32(value, value); |
| 5493 __ jmp(&done, Label::kNear); |
| 5494 |
| 5495 __ bind(¬_smi); |
| 5496 __ Movsd(xmm0, FieldOperand(value, HeapNumber::kValueOffset)); |
| 5497 __ Cvttsd2siq(value, xmm0); |
| 5498 __ bind(&done); |
| 5499 } |
| 5500 |
| 5501 static void TypedArrayJumpTablePrologue(MacroAssembler* masm, Register object, |
| 5502 Register scratch, Register scratch2, |
| 5503 Label* table) { |
| 5504 __ movp(scratch, FieldOperand(object, JSObject::kElementsOffset)); |
| 5505 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
| 5506 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); |
| 5507 __ subl(scratch, Immediate(static_cast<uint8_t>(FIXED_INT8_ARRAY_TYPE))); |
| 5508 __ Assert(above_equal, kOffsetOutOfRange); |
| 5509 __ leaq(scratch2, Operand(table)); |
| 5510 __ jmp(Operand(scratch2, scratch, times_8, 0)); |
| 5511 } |
| 5512 |
| 5513 static void TypedArrayJumpTableEpilogue(MacroAssembler* masm, Label* table, |
| 5514 Label* i8, Label* u8, Label* i16, |
| 5515 Label* u16, Label* i32, Label* u32, |
| 5516 Label* u8c) { |
| 5517 STATIC_ASSERT(FIXED_UINT8_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 1); |
| 5518 STATIC_ASSERT(FIXED_INT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 2); |
| 5519 STATIC_ASSERT(FIXED_UINT16_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 3); |
| 5520 STATIC_ASSERT(FIXED_INT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 4); |
| 5521 STATIC_ASSERT(FIXED_UINT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 5); |
| 5522 STATIC_ASSERT(FIXED_FLOAT32_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 6); |
| 5523 STATIC_ASSERT(FIXED_FLOAT64_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 7); |
| 5524 STATIC_ASSERT(FIXED_UINT8_CLAMPED_ARRAY_TYPE == FIXED_INT8_ARRAY_TYPE + 8); |
| 5525 |
| 5526 Label abort; |
| 5527 __ bind(table); |
| 5528 __ dq(i8); // Int8Array |
| 5529 __ dq(u8); // Uint8Array |
| 5530 __ dq(i16); // Int16Array |
| 5531 __ dq(u16); // Uint16Array |
| 5532 __ dq(i32); // Int32Array |
| 5533 __ dq(u32); // Uint32Array |
| 5534 __ dq(&abort); // Float32Array |
| 5535 __ dq(&abort); // Float64Array |
| 5536 __ dq(u8c); // Uint8ClampedArray |
| 5537 |
| 5538 __ bind(&abort); |
| 5539 __ Abort(kNoReason); |
| 5540 } |
| 5541 |
| 5542 |
| 5543 static void ReturnInteger32(MacroAssembler* masm, XMMRegister dst, |
| 5544 Register value, Label* use_heap_number) { |
| 5545 Label not_smi; |
| 5546 if (!value.is(rax)) { |
| 5547 __ movp(rax, value); |
| 5548 } |
| 5549 __ JumpIfNotValidSmiValue(rax, ¬_smi, Label::kNear); |
| 5550 __ Integer32ToSmi(rax, rax); |
| 5551 __ Ret(); |
| 5552 |
| 5553 __ bind(¬_smi); |
| 5554 __ Cvtlsi2sd(dst, rax); |
| 5555 __ jmp(use_heap_number); |
| 5556 } |
| 5557 |
| 5558 |
| 5559 static void ReturnUnsignedInteger32(MacroAssembler* masm, XMMRegister dst, |
| 5560 Register value, Label* use_heap_number) { |
| 5561 Label not_smi; |
| 5562 if (!value.is(rax)) { |
| 5563 __ movp(rax, value); |
| 5564 } |
| 5565 __ JumpIfUIntNotValidSmiValue(rax, ¬_smi, Label::kNear); |
| 5566 __ Integer32ToSmi(rax, rax); |
| 5567 __ Ret(); |
| 5568 |
| 5569 __ bind(¬_smi); |
| 5570 __ Cvtqsi2sd(dst, rax); |
| 5571 __ jmp(use_heap_number); |
| 5572 } |
| 5573 |
| 5574 |
| 5575 static void ReturnAllocatedHeapNumber(MacroAssembler* masm, XMMRegister value, |
| 5576 Register scratch) { |
| 5577 Label call_runtime; |
| 5578 __ AllocateHeapNumber(rax, scratch, &call_runtime); |
| 5579 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), value); |
| 5580 __ Ret(); |
| 5581 |
| 5582 __ bind(&call_runtime); |
| 5583 { |
| 5584 FrameScope scope(masm, StackFrame::INTERNAL); |
| 5585 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); |
| 5586 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), value); |
| 5587 } |
| 5588 __ Ret(); |
| 5589 } |
| 5590 |
| 5591 |
| 5592 void AtomicsLoadStub::Generate(MacroAssembler* masm) { |
| 5593 Register object = rdx; |
| 5594 Register backing_store = rbx; |
| 5595 Register index = rax; |
| 5596 Label table; |
| 5597 |
| 5598 GetTypedArrayBackingStore(masm, backing_store, object, kScratchRegister); |
| 5599 TaggedToInteger32(masm, index); |
| 5600 TypedArrayJumpTablePrologue(masm, object, rcx, kScratchRegister, &table); |
| 5601 |
| 5602 Label i8, u8, i16, u16, i32, u32; |
| 5603 |
| 5604 __ bind(&i8); |
| 5605 __ movb(rax, Operand(backing_store, index, times_1, 0)); |
| 5606 __ movsxbl(rax, rax); |
| 5607 __ Integer32ToSmi(rax, rax); |
| 5608 __ Ret(); |
| 5609 |
| 5610 __ bind(&u8); |
| 5611 __ movb(rax, Operand(backing_store, index, times_1, 0)); |
| 5612 __ movzxbl(rax, rax); |
| 5613 __ Integer32ToSmi(rax, rax); |
| 5614 __ Ret(); |
| 5615 |
| 5616 __ bind(&i16); |
| 5617 __ movw(rax, Operand(backing_store, index, times_2, 0)); |
| 5618 __ movsxwl(rax, rax); |
| 5619 __ Integer32ToSmi(rax, rax); |
| 5620 __ Ret(); |
| 5621 |
| 5622 __ bind(&u16); |
| 5623 __ movw(rax, Operand(backing_store, index, times_2, 0)); |
| 5624 __ movzxwl(rax, rax); |
| 5625 __ Integer32ToSmi(rax, rax); |
| 5626 __ Ret(); |
| 5627 |
| 5628 Label use_heap_number; |
| 5629 |
| 5630 __ bind(&i32); |
| 5631 __ movl(rax, Operand(backing_store, index, times_4, 0)); |
| 5632 ReturnInteger32(masm, xmm0, rax, &use_heap_number); |
| 5633 |
| 5634 __ bind(&u32); |
| 5635 __ movl(rax, Operand(backing_store, index, times_4, 0)); |
| 5636 ReturnUnsignedInteger32(masm, xmm0, rax, &use_heap_number); |
| 5637 |
| 5638 __ bind(&use_heap_number); |
| 5639 ReturnAllocatedHeapNumber(masm, xmm0, rcx); |
| 5640 |
| 5641 TypedArrayJumpTableEpilogue(masm, &table, &i8, &u8, &i16, &u16, &i32, &u32, |
| 5642 &u8); |
| 5643 } |
| 5644 |
| 5645 |
5465 #undef __ | 5646 #undef __ |
5466 | 5647 |
5467 } // namespace internal | 5648 } // namespace internal |
5468 } // namespace v8 | 5649 } // namespace v8 |
5469 | 5650 |
5470 #endif // V8_TARGET_ARCH_X64 | 5651 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |