OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2638 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2649 if (scratch.is(kSmiConstantRegister)) { | 2649 if (scratch.is(kSmiConstantRegister)) { |
2650 // Restore kSmiConstantRegister. | 2650 // Restore kSmiConstantRegister. |
2651 movp(kSmiConstantRegister, | 2651 movp(kSmiConstantRegister, |
2652 reinterpret_cast<void*>(Smi::FromInt(kSmiConstantRegisterValue)), | 2652 reinterpret_cast<void*>(Smi::FromInt(kSmiConstantRegisterValue)), |
2653 Assembler::RelocInfoNone()); | 2653 Assembler::RelocInfoNone()); |
2654 } | 2654 } |
2655 } | 2655 } |
2656 } | 2656 } |
2657 | 2657 |
2658 | 2658 |
2659 void MacroAssembler::TestBit(const Operand& src, int bits) { | 2659 void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst, |
2660 int byte_offset = bits / kBitsPerByte; | 2660 Register base, |
2661 int bit_in_byte = bits & (kBitsPerByte - 1); | 2661 int offset) { |
2662 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte)); | 2662 ASSERT(offset > SharedFunctionInfo::kLengthOffset && |
| 2663 offset <= SharedFunctionInfo::kSize && |
| 2664 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1)); |
| 2665 if (kPointerSize == kInt64Size) { |
| 2666 movsxlq(dst, FieldOperand(base, offset)); |
| 2667 } else { |
| 2668 movp(dst, FieldOperand(base, offset)); |
| 2669 SmiToInteger32(dst, dst); |
| 2670 } |
2663 } | 2671 } |
2664 | 2672 |
2665 | 2673 |
| 2674 void MacroAssembler::TestBitSharedFunctionInfoSpecialField(Register base, |
| 2675 int offset, |
| 2676 int bits) { |
| 2677 ASSERT(offset > SharedFunctionInfo::kLengthOffset && |
| 2678 offset <= SharedFunctionInfo::kSize && |
| 2679 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1)); |
| 2680 if (kPointerSize == kInt32Size) { |
| 2681 // On x32, this field is represented by SMI. |
| 2682 bits += kSmiShift; |
| 2683 } |
| 2684 int byte_offset = bits / kBitsPerByte; |
| 2685 int bit_in_byte = bits & (kBitsPerByte - 1); |
| 2686 testb(FieldOperand(base, offset + byte_offset), Immediate(1 << bit_in_byte)); |
| 2687 } |
| 2688 |
| 2689 |
2666 void MacroAssembler::Jump(ExternalReference ext) { | 2690 void MacroAssembler::Jump(ExternalReference ext) { |
2667 LoadAddress(kScratchRegister, ext); | 2691 LoadAddress(kScratchRegister, ext); |
2668 jmp(kScratchRegister); | 2692 jmp(kScratchRegister); |
2669 } | 2693 } |
2670 | 2694 |
2671 | 2695 |
2672 void MacroAssembler::Jump(const Operand& op) { | 2696 void MacroAssembler::Jump(const Operand& op) { |
2673 if (kPointerSize == kInt64Size) { | 2697 if (kPointerSize == kInt64Size) { |
2674 jmp(op); | 2698 jmp(op); |
2675 } else { | 2699 } else { |
(...skipping 815 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3491 | 3515 |
3492 // Check that the function really is a function. | 3516 // Check that the function really is a function. |
3493 CmpObjectType(function, JS_FUNCTION_TYPE, result); | 3517 CmpObjectType(function, JS_FUNCTION_TYPE, result); |
3494 j(not_equal, miss); | 3518 j(not_equal, miss); |
3495 | 3519 |
3496 if (miss_on_bound_function) { | 3520 if (miss_on_bound_function) { |
3497 movp(kScratchRegister, | 3521 movp(kScratchRegister, |
3498 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 3522 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
3499 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte | 3523 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte |
3500 // field). | 3524 // field). |
3501 TestBit(FieldOperand(kScratchRegister, | 3525 TestBitSharedFunctionInfoSpecialField(kScratchRegister, |
3502 SharedFunctionInfo::kCompilerHintsOffset), | 3526 SharedFunctionInfo::kCompilerHintsOffset, |
3503 SharedFunctionInfo::kBoundFunction); | 3527 SharedFunctionInfo::kBoundFunction); |
3504 j(not_zero, miss); | 3528 j(not_zero, miss); |
3505 } | 3529 } |
3506 | 3530 |
3507 // Make sure that the function has an instance prototype. | 3531 // Make sure that the function has an instance prototype. |
3508 Label non_instance; | 3532 Label non_instance; |
3509 testb(FieldOperand(result, Map::kBitFieldOffset), | 3533 testb(FieldOperand(result, Map::kBitFieldOffset), |
3510 Immediate(1 << Map::kHasNonInstancePrototype)); | 3534 Immediate(1 << Map::kHasNonInstancePrototype)); |
3511 j(not_zero, &non_instance, Label::kNear); | 3535 j(not_zero, &non_instance, Label::kNear); |
3512 | 3536 |
3513 // Get the prototype or initial map from the function. | 3537 // Get the prototype or initial map from the function. |
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3620 void MacroAssembler::InvokeFunction(Register function, | 3644 void MacroAssembler::InvokeFunction(Register function, |
3621 const ParameterCount& actual, | 3645 const ParameterCount& actual, |
3622 InvokeFlag flag, | 3646 InvokeFlag flag, |
3623 const CallWrapper& call_wrapper) { | 3647 const CallWrapper& call_wrapper) { |
3624 // You can't call a function without a valid frame. | 3648 // You can't call a function without a valid frame. |
3625 ASSERT(flag == JUMP_FUNCTION || has_frame()); | 3649 ASSERT(flag == JUMP_FUNCTION || has_frame()); |
3626 | 3650 |
3627 ASSERT(function.is(rdi)); | 3651 ASSERT(function.is(rdi)); |
3628 movp(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); | 3652 movp(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); |
3629 movp(rsi, FieldOperand(function, JSFunction::kContextOffset)); | 3653 movp(rsi, FieldOperand(function, JSFunction::kContextOffset)); |
3630 movsxlq(rbx, | 3654 LoadSharedFunctionInfoSpecialField(rbx, rdx, |
3631 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); | 3655 SharedFunctionInfo::kFormalParameterCountOffset); |
3632 // Advances rdx to the end of the Code object header, to the start of | 3656 // Advances rdx to the end of the Code object header, to the start of |
3633 // the executable code. | 3657 // the executable code. |
3634 movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | 3658 movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); |
3635 | 3659 |
3636 ParameterCount expected(rbx); | 3660 ParameterCount expected(rbx); |
3637 InvokeCode(rdx, expected, actual, flag, call_wrapper); | 3661 InvokeCode(rdx, expected, actual, flag, call_wrapper); |
3638 } | 3662 } |
3639 | 3663 |
3640 | 3664 |
3641 void MacroAssembler::InvokeFunction(Register function, | 3665 void MacroAssembler::InvokeFunction(Register function, |
(...skipping 1443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5085 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift())); | 5109 if (ms.shift() > 0) sarl(rdx, Immediate(ms.shift())); |
5086 movl(rax, dividend); | 5110 movl(rax, dividend); |
5087 shrl(rax, Immediate(31)); | 5111 shrl(rax, Immediate(31)); |
5088 addl(rdx, rax); | 5112 addl(rdx, rax); |
5089 } | 5113 } |
5090 | 5114 |
5091 | 5115 |
5092 } } // namespace v8::internal | 5116 } } // namespace v8::internal |
5093 | 5117 |
5094 #endif // V8_TARGET_ARCH_X64 | 5118 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |