Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(204)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 22715004: Version 3.20.15 (Closed) Base URL: https://v8.googlecode.com/svn/trunk
Patch Set: Add TypedArray API and correctness patches r16033 and r16084 Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
148 // Operand is lea(scratch, Operand(kRootRegister, delta)); 148 // Operand is lea(scratch, Operand(kRootRegister, delta));
149 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7. 149 // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
150 int size = 4; 150 int size = 4;
151 if (!is_int8(static_cast<int32_t>(delta))) { 151 if (!is_int8(static_cast<int32_t>(delta))) {
152 size += 3; // Need full four-byte displacement in lea. 152 size += 3; // Need full four-byte displacement in lea.
153 } 153 }
154 return size; 154 return size;
155 } 155 }
156 } 156 }
157 // Size of movq(destination, src); 157 // Size of movq(destination, src);
158 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength; 158 return 10;
159 } 159 }
160 160
161 161
162 void MacroAssembler::PushAddress(ExternalReference source) { 162 void MacroAssembler::PushAddress(ExternalReference source) {
163 int64_t address = reinterpret_cast<int64_t>(source.address()); 163 int64_t address = reinterpret_cast<int64_t>(source.address());
164 if (is_int32(address) && !Serializer::enabled()) { 164 if (is_int32(address) && !Serializer::enabled()) {
165 if (emit_debug_code()) { 165 if (emit_debug_code()) {
166 movq(kScratchRegister, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 166 movq(kScratchRegister, BitCast<int64_t>(kZapValue), RelocInfo::NONE64);
167 } 167 }
168 push(Immediate(static_cast<int32_t>(address))); 168 push(Immediate(static_cast<int32_t>(address)));
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after
442 442
443 // Clobber clobbered registers when running with the debug-code flag 443 // Clobber clobbered registers when running with the debug-code flag
444 // turned on to provoke errors. 444 // turned on to provoke errors.
445 if (emit_debug_code()) { 445 if (emit_debug_code()) {
446 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 446 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE64);
447 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 447 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64);
448 } 448 }
449 } 449 }
450 450
451 451
452 void MacroAssembler::Assert(Condition cc, BailoutReason reason) { 452 void MacroAssembler::Assert(Condition cc, const char* msg) {
453 if (emit_debug_code()) Check(cc, reason); 453 if (emit_debug_code()) Check(cc, msg);
454 } 454 }
455 455
456 456
457 void MacroAssembler::AssertFastElements(Register elements) { 457 void MacroAssembler::AssertFastElements(Register elements) {
458 if (emit_debug_code()) { 458 if (emit_debug_code()) {
459 Label ok; 459 Label ok;
460 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 460 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
461 Heap::kFixedArrayMapRootIndex); 461 Heap::kFixedArrayMapRootIndex);
462 j(equal, &ok, Label::kNear); 462 j(equal, &ok, Label::kNear);
463 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 463 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
464 Heap::kFixedDoubleArrayMapRootIndex); 464 Heap::kFixedDoubleArrayMapRootIndex);
465 j(equal, &ok, Label::kNear); 465 j(equal, &ok, Label::kNear);
466 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 466 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
467 Heap::kFixedCOWArrayMapRootIndex); 467 Heap::kFixedCOWArrayMapRootIndex);
468 j(equal, &ok, Label::kNear); 468 j(equal, &ok, Label::kNear);
469 Abort(kJSObjectWithFastElementsMapHasSlowElements); 469 Abort("JSObject with fast elements map has slow elements");
470 bind(&ok); 470 bind(&ok);
471 } 471 }
472 } 472 }
473 473
474 474
475 void MacroAssembler::Check(Condition cc, BailoutReason reason) { 475 void MacroAssembler::Check(Condition cc, const char* msg) {
476 Label L; 476 Label L;
477 j(cc, &L, Label::kNear); 477 j(cc, &L, Label::kNear);
478 Abort(reason); 478 Abort(msg);
479 // Control will not return here. 479 // Control will not return here.
480 bind(&L); 480 bind(&L);
481 } 481 }
482 482
483 483
484 void MacroAssembler::CheckStackAlignment() { 484 void MacroAssembler::CheckStackAlignment() {
485 int frame_alignment = OS::ActivationFrameAlignment(); 485 int frame_alignment = OS::ActivationFrameAlignment();
486 int frame_alignment_mask = frame_alignment - 1; 486 int frame_alignment_mask = frame_alignment - 1;
487 if (frame_alignment > kPointerSize) { 487 if (frame_alignment > kPointerSize) {
488 ASSERT(IsPowerOf2(frame_alignment)); 488 ASSERT(IsPowerOf2(frame_alignment));
(...skipping 12 matching lines...) Expand all
501 Label* then_label) { 501 Label* then_label) {
502 Label ok; 502 Label ok;
503 testl(result, result); 503 testl(result, result);
504 j(not_zero, &ok, Label::kNear); 504 j(not_zero, &ok, Label::kNear);
505 testl(op, op); 505 testl(op, op);
506 j(sign, then_label); 506 j(sign, then_label);
507 bind(&ok); 507 bind(&ok);
508 } 508 }
509 509
510 510
511 void MacroAssembler::Abort(BailoutReason reason) { 511 void MacroAssembler::Abort(const char* msg) {
512 // We want to pass the msg string like a smi to avoid GC 512 // We want to pass the msg string like a smi to avoid GC
513 // problems, however msg is not guaranteed to be aligned 513 // problems, however msg is not guaranteed to be aligned
514 // properly. Instead, we pass an aligned pointer that is 514 // properly. Instead, we pass an aligned pointer that is
515 // a proper v8 smi, but also pass the alignment difference 515 // a proper v8 smi, but also pass the alignment difference
516 // from the real pointer as a smi. 516 // from the real pointer as a smi.
517 const char* msg = GetBailoutReason(reason);
518 intptr_t p1 = reinterpret_cast<intptr_t>(msg); 517 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
519 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; 518 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
520 // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag. 519 // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
521 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); 520 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
522 #ifdef DEBUG 521 #ifdef DEBUG
523 if (msg != NULL) { 522 if (msg != NULL) {
524 RecordComment("Abort message: "); 523 RecordComment("Abort message: ");
525 RecordComment(msg); 524 RecordComment(msg);
526 } 525 }
527 #endif 526 #endif
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after
832 831
833 CompareRoot(return_value, Heap::kTrueValueRootIndex); 832 CompareRoot(return_value, Heap::kTrueValueRootIndex);
834 j(equal, &ok, Label::kNear); 833 j(equal, &ok, Label::kNear);
835 834
836 CompareRoot(return_value, Heap::kFalseValueRootIndex); 835 CompareRoot(return_value, Heap::kFalseValueRootIndex);
837 j(equal, &ok, Label::kNear); 836 j(equal, &ok, Label::kNear);
838 837
839 CompareRoot(return_value, Heap::kNullValueRootIndex); 838 CompareRoot(return_value, Heap::kNullValueRootIndex);
840 j(equal, &ok, Label::kNear); 839 j(equal, &ok, Label::kNear);
841 840
842 Abort(kAPICallReturnedInvalidObject); 841 Abort("API call returned invalid object");
843 842
844 bind(&ok); 843 bind(&ok);
845 #endif 844 #endif
846 845
847 LeaveApiExitFrame(); 846 LeaveApiExitFrame();
848 ret(stack_space * kPointerSize); 847 ret(stack_space * kPointerSize);
849 848
850 bind(&promote_scheduled_exception); 849 bind(&promote_scheduled_exception);
851 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); 850 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
852 851
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
1032 } 1031 }
1033 1032
1034 1033
1035 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { 1034 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
1036 if (emit_debug_code()) { 1035 if (emit_debug_code()) {
1037 movq(dst, 1036 movq(dst,
1038 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)), 1037 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
1039 RelocInfo::NONE64); 1038 RelocInfo::NONE64);
1040 cmpq(dst, kSmiConstantRegister); 1039 cmpq(dst, kSmiConstantRegister);
1041 if (allow_stub_calls()) { 1040 if (allow_stub_calls()) {
1042 Assert(equal, kUninitializedKSmiConstantRegister); 1041 Assert(equal, "Uninitialized kSmiConstantRegister");
1043 } else { 1042 } else {
1044 Label ok; 1043 Label ok;
1045 j(equal, &ok, Label::kNear); 1044 j(equal, &ok, Label::kNear);
1046 int3(); 1045 int3();
1047 bind(&ok); 1046 bind(&ok);
1048 } 1047 }
1049 } 1048 }
1050 int value = source->value(); 1049 int value = source->value();
1051 if (value == 0) { 1050 if (value == 0) {
1052 xorl(dst, dst); 1051 xorl(dst, dst);
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
1100 shl(dst, Immediate(kSmiShift)); 1099 shl(dst, Immediate(kSmiShift));
1101 } 1100 }
1102 1101
1103 1102
1104 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { 1103 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
1105 if (emit_debug_code()) { 1104 if (emit_debug_code()) {
1106 testb(dst, Immediate(0x01)); 1105 testb(dst, Immediate(0x01));
1107 Label ok; 1106 Label ok;
1108 j(zero, &ok, Label::kNear); 1107 j(zero, &ok, Label::kNear);
1109 if (allow_stub_calls()) { 1108 if (allow_stub_calls()) {
1110 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation); 1109 Abort("Integer32ToSmiField writing to non-smi location");
1111 } else { 1110 } else {
1112 int3(); 1111 int3();
1113 } 1112 }
1114 bind(&ok); 1113 bind(&ok);
1115 } 1114 }
1116 ASSERT(kSmiShift % kBitsPerByte == 0); 1115 ASSERT(kSmiShift % kBitsPerByte == 0);
1117 movl(Operand(dst, kSmiShift / kBitsPerByte), src); 1116 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1118 } 1117 }
1119 1118
1120 1119
(...skipping 562 matching lines...) Expand 10 before | Expand all | Expand 10 after
1683 1682
1684 void MacroAssembler::SmiAdd(Register dst, 1683 void MacroAssembler::SmiAdd(Register dst,
1685 Register src1, 1684 Register src1,
1686 Register src2) { 1685 Register src2) {
1687 // No overflow checking. Use only when it's known that 1686 // No overflow checking. Use only when it's known that
1688 // overflowing is impossible. 1687 // overflowing is impossible.
1689 if (!dst.is(src1)) { 1688 if (!dst.is(src1)) {
1690 if (emit_debug_code()) { 1689 if (emit_debug_code()) {
1691 movq(kScratchRegister, src1); 1690 movq(kScratchRegister, src1);
1692 addq(kScratchRegister, src2); 1691 addq(kScratchRegister, src2);
1693 Check(no_overflow, kSmiAdditionOverflow); 1692 Check(no_overflow, "Smi addition overflow");
1694 } 1693 }
1695 lea(dst, Operand(src1, src2, times_1, 0)); 1694 lea(dst, Operand(src1, src2, times_1, 0));
1696 } else { 1695 } else {
1697 addq(dst, src2); 1696 addq(dst, src2);
1698 Assert(no_overflow, kSmiAdditionOverflow); 1697 Assert(no_overflow, "Smi addition overflow");
1699 } 1698 }
1700 } 1699 }
1701 1700
1702 1701
1703 void MacroAssembler::SmiSub(Register dst, 1702 void MacroAssembler::SmiSub(Register dst,
1704 Register src1, 1703 Register src1,
1705 Register src2, 1704 Register src2,
1706 Label* on_not_smi_result, 1705 Label* on_not_smi_result,
1707 Label::Distance near_jump) { 1706 Label::Distance near_jump) {
1708 ASSERT_NOT_NULL(on_not_smi_result); 1707 ASSERT_NOT_NULL(on_not_smi_result);
(...skipping 11 matching lines...) Expand all
1720 1719
1721 1720
1722 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) { 1721 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1723 // No overflow checking. Use only when it's known that 1722 // No overflow checking. Use only when it's known that
1724 // overflowing is impossible (e.g., subtracting two positive smis). 1723 // overflowing is impossible (e.g., subtracting two positive smis).
1725 ASSERT(!dst.is(src2)); 1724 ASSERT(!dst.is(src2));
1726 if (!dst.is(src1)) { 1725 if (!dst.is(src1)) {
1727 movq(dst, src1); 1726 movq(dst, src1);
1728 } 1727 }
1729 subq(dst, src2); 1728 subq(dst, src2);
1730 Assert(no_overflow, kSmiSubtractionOverflow); 1729 Assert(no_overflow, "Smi subtraction overflow");
1731 } 1730 }
1732 1731
1733 1732
1734 void MacroAssembler::SmiSub(Register dst, 1733 void MacroAssembler::SmiSub(Register dst,
1735 Register src1, 1734 Register src1,
1736 const Operand& src2, 1735 const Operand& src2,
1737 Label* on_not_smi_result, 1736 Label* on_not_smi_result,
1738 Label::Distance near_jump) { 1737 Label::Distance near_jump) {
1739 ASSERT_NOT_NULL(on_not_smi_result); 1738 ASSERT_NOT_NULL(on_not_smi_result);
1740 if (dst.is(src1)) { 1739 if (dst.is(src1)) {
(...skipping 11 matching lines...) Expand all
1752 1751
1753 void MacroAssembler::SmiSub(Register dst, 1752 void MacroAssembler::SmiSub(Register dst,
1754 Register src1, 1753 Register src1,
1755 const Operand& src2) { 1754 const Operand& src2) {
1756 // No overflow checking. Use only when it's known that 1755 // No overflow checking. Use only when it's known that
1757 // overflowing is impossible (e.g., subtracting two positive smis). 1756 // overflowing is impossible (e.g., subtracting two positive smis).
1758 if (!dst.is(src1)) { 1757 if (!dst.is(src1)) {
1759 movq(dst, src1); 1758 movq(dst, src1);
1760 } 1759 }
1761 subq(dst, src2); 1760 subq(dst, src2);
1762 Assert(no_overflow, kSmiSubtractionOverflow); 1761 Assert(no_overflow, "Smi subtraction overflow");
1763 } 1762 }
1764 1763
1765 1764
1766 void MacroAssembler::SmiMul(Register dst, 1765 void MacroAssembler::SmiMul(Register dst,
1767 Register src1, 1766 Register src1,
1768 Register src2, 1767 Register src2,
1769 Label* on_not_smi_result, 1768 Label* on_not_smi_result,
1770 Label::Distance near_jump) { 1769 Label::Distance near_jump) {
1771 ASSERT(!dst.is(src2)); 1770 ASSERT(!dst.is(src2));
1772 ASSERT(!dst.is(kScratchRegister)); 1771 ASSERT(!dst.is(kScratchRegister));
(...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after
2149 Label::Distance near_jump) { 2148 Label::Distance near_jump) {
2150 ASSERT(!dst.is(kScratchRegister)); 2149 ASSERT(!dst.is(kScratchRegister));
2151 ASSERT(!src1.is(kScratchRegister)); 2150 ASSERT(!src1.is(kScratchRegister));
2152 ASSERT(!src2.is(kScratchRegister)); 2151 ASSERT(!src2.is(kScratchRegister));
2153 ASSERT(!dst.is(src1)); 2152 ASSERT(!dst.is(src1));
2154 ASSERT(!dst.is(src2)); 2153 ASSERT(!dst.is(src2));
2155 // Both operands must not be smis. 2154 // Both operands must not be smis.
2156 #ifdef DEBUG 2155 #ifdef DEBUG
2157 if (allow_stub_calls()) { // Check contains a stub call. 2156 if (allow_stub_calls()) { // Check contains a stub call.
2158 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2)); 2157 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2159 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi); 2158 Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
2160 } 2159 }
2161 #endif 2160 #endif
2162 STATIC_ASSERT(kSmiTag == 0); 2161 STATIC_ASSERT(kSmiTag == 0);
2163 ASSERT_EQ(0, Smi::FromInt(0)); 2162 ASSERT_EQ(0, Smi::FromInt(0));
2164 movl(kScratchRegister, Immediate(kSmiTagMask)); 2163 movl(kScratchRegister, Immediate(kSmiTagMask));
2165 and_(kScratchRegister, src1); 2164 and_(kScratchRegister, src1);
2166 testl(kScratchRegister, src2); 2165 testl(kScratchRegister, src2);
2167 // If non-zero then both are smis. 2166 // If non-zero then both are smis.
2168 j(not_zero, on_not_smis, near_jump); 2167 j(not_zero, on_not_smis, near_jump);
2169 2168
(...skipping 334 matching lines...) Expand 10 before | Expand all | Expand 10 after
2504 2503
2505 2504
2506 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) { 2505 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
2507 // TODO(X64): Inline this 2506 // TODO(X64): Inline this
2508 jmp(code_object, rmode); 2507 jmp(code_object, rmode);
2509 } 2508 }
2510 2509
2511 2510
2512 int MacroAssembler::CallSize(ExternalReference ext) { 2511 int MacroAssembler::CallSize(ExternalReference ext) {
2513 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes). 2512 // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
2514 return LoadAddressSize(ext) + 2513 const int kCallInstructionSize = 3;
2515 Assembler::kCallScratchRegisterInstructionLength; 2514 return LoadAddressSize(ext) + kCallInstructionSize;
2516 } 2515 }
2517 2516
2518 2517
2519 void MacroAssembler::Call(ExternalReference ext) { 2518 void MacroAssembler::Call(ExternalReference ext) {
2520 #ifdef DEBUG 2519 #ifdef DEBUG
2521 int end_position = pc_offset() + CallSize(ext); 2520 int end_position = pc_offset() + CallSize(ext);
2522 #endif 2521 #endif
2523 LoadAddress(kScratchRegister, ext); 2522 LoadAddress(kScratchRegister, ext);
2524 call(kScratchRegister); 2523 call(kScratchRegister);
2525 #ifdef DEBUG 2524 #ifdef DEBUG
(...skipping 266 matching lines...) Expand 10 before | Expand all | Expand 10 after
2792 2791
2793 void MacroAssembler::Ret() { 2792 void MacroAssembler::Ret() {
2794 ret(0); 2793 ret(0);
2795 } 2794 }
2796 2795
2797 2796
2798 void MacroAssembler::Ret(int bytes_dropped, Register scratch) { 2797 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2799 if (is_uint16(bytes_dropped)) { 2798 if (is_uint16(bytes_dropped)) {
2800 ret(bytes_dropped); 2799 ret(bytes_dropped);
2801 } else { 2800 } else {
2802 PopReturnAddressTo(scratch); 2801 pop(scratch);
2803 addq(rsp, Immediate(bytes_dropped)); 2802 addq(rsp, Immediate(bytes_dropped));
2804 PushReturnAddressFrom(scratch); 2803 push(scratch);
2805 ret(0); 2804 ret(0);
2806 } 2805 }
2807 } 2806 }
2808 2807
2809 2808
2810 void MacroAssembler::FCmp() { 2809 void MacroAssembler::FCmp() {
2811 fucomip(); 2810 fucomip();
2812 fstp(0); 2811 fstp(0);
2813 } 2812 }
2814 2813
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after
2978 Set(result_reg, 255); 2977 Set(result_reg, 255);
2979 bind(&done); 2978 bind(&done);
2980 } 2979 }
2981 2980
2982 2981
2983 void MacroAssembler::LoadUint32(XMMRegister dst, 2982 void MacroAssembler::LoadUint32(XMMRegister dst,
2984 Register src, 2983 Register src,
2985 XMMRegister scratch) { 2984 XMMRegister scratch) {
2986 if (FLAG_debug_code) { 2985 if (FLAG_debug_code) {
2987 cmpq(src, Immediate(0xffffffff)); 2986 cmpq(src, Immediate(0xffffffff));
2988 Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared); 2987 Assert(below_equal, "input GPR is expected to have upper32 cleared");
2989 } 2988 }
2990 cvtqsi2sd(dst, src); 2989 cvtqsi2sd(dst, src);
2991 } 2990 }
2992 2991
2993 2992
2994 void MacroAssembler::LoadInstanceDescriptors(Register map, 2993 void MacroAssembler::LoadInstanceDescriptors(Register map,
2995 Register descriptors) { 2994 Register descriptors) {
2996 movq(descriptors, FieldOperand(map, Map::kDescriptorsOffset)); 2995 movq(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2997 } 2996 }
2998 2997
(...skipping 28 matching lines...) Expand all
3027 } 3026 }
3028 3027
3029 3028
3030 void MacroAssembler::AssertNumber(Register object) { 3029 void MacroAssembler::AssertNumber(Register object) {
3031 if (emit_debug_code()) { 3030 if (emit_debug_code()) {
3032 Label ok; 3031 Label ok;
3033 Condition is_smi = CheckSmi(object); 3032 Condition is_smi = CheckSmi(object);
3034 j(is_smi, &ok, Label::kNear); 3033 j(is_smi, &ok, Label::kNear);
3035 Cmp(FieldOperand(object, HeapObject::kMapOffset), 3034 Cmp(FieldOperand(object, HeapObject::kMapOffset),
3036 isolate()->factory()->heap_number_map()); 3035 isolate()->factory()->heap_number_map());
3037 Check(equal, kOperandIsNotANumber); 3036 Check(equal, "Operand is not a number");
3038 bind(&ok); 3037 bind(&ok);
3039 } 3038 }
3040 } 3039 }
3041 3040
3042 3041
3043 void MacroAssembler::AssertNotSmi(Register object) { 3042 void MacroAssembler::AssertNotSmi(Register object) {
3044 if (emit_debug_code()) { 3043 if (emit_debug_code()) {
3045 Condition is_smi = CheckSmi(object); 3044 Condition is_smi = CheckSmi(object);
3046 Check(NegateCondition(is_smi), kOperandIsASmi); 3045 Check(NegateCondition(is_smi), "Operand is a smi");
3047 } 3046 }
3048 } 3047 }
3049 3048
3050 3049
3051 void MacroAssembler::AssertSmi(Register object) { 3050 void MacroAssembler::AssertSmi(Register object) {
3052 if (emit_debug_code()) { 3051 if (emit_debug_code()) {
3053 Condition is_smi = CheckSmi(object); 3052 Condition is_smi = CheckSmi(object);
3054 Check(is_smi, kOperandIsNotASmi); 3053 Check(is_smi, "Operand is not a smi");
3055 } 3054 }
3056 } 3055 }
3057 3056
3058 3057
3059 void MacroAssembler::AssertSmi(const Operand& object) { 3058 void MacroAssembler::AssertSmi(const Operand& object) {
3060 if (emit_debug_code()) { 3059 if (emit_debug_code()) {
3061 Condition is_smi = CheckSmi(object); 3060 Condition is_smi = CheckSmi(object);
3062 Check(is_smi, kOperandIsNotASmi); 3061 Check(is_smi, "Operand is not a smi");
3063 } 3062 }
3064 } 3063 }
3065 3064
3066 3065
3067 void MacroAssembler::AssertZeroExtended(Register int32_register) { 3066 void MacroAssembler::AssertZeroExtended(Register int32_register) {
3068 if (emit_debug_code()) { 3067 if (emit_debug_code()) {
3069 ASSERT(!int32_register.is(kScratchRegister)); 3068 ASSERT(!int32_register.is(kScratchRegister));
3070 movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64); 3069 movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64);
3071 cmpq(kScratchRegister, int32_register); 3070 cmpq(kScratchRegister, int32_register);
3072 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); 3071 Check(above_equal, "32 bit value in register is not zero-extended");
3073 } 3072 }
3074 } 3073 }
3075 3074
3076 3075
3077 void MacroAssembler::AssertString(Register object) { 3076 void MacroAssembler::AssertString(Register object) {
3078 if (emit_debug_code()) { 3077 if (emit_debug_code()) {
3079 testb(object, Immediate(kSmiTagMask)); 3078 testb(object, Immediate(kSmiTagMask));
3080 Check(not_equal, kOperandIsASmiAndNotAString); 3079 Check(not_equal, "Operand is a smi and not a string");
3081 push(object); 3080 push(object);
3082 movq(object, FieldOperand(object, HeapObject::kMapOffset)); 3081 movq(object, FieldOperand(object, HeapObject::kMapOffset));
3083 CmpInstanceType(object, FIRST_NONSTRING_TYPE); 3082 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
3084 pop(object); 3083 pop(object);
3085 Check(below, kOperandIsNotAString); 3084 Check(below, "Operand is not a string");
3086 } 3085 }
3087 } 3086 }
3088 3087
3089 3088
3090 void MacroAssembler::AssertName(Register object) { 3089 void MacroAssembler::AssertName(Register object) {
3091 if (emit_debug_code()) { 3090 if (emit_debug_code()) {
3092 testb(object, Immediate(kSmiTagMask)); 3091 testb(object, Immediate(kSmiTagMask));
3093 Check(not_equal, kOperandIsASmiAndNotAName); 3092 Check(not_equal, "Operand is a smi and not a name");
3094 push(object); 3093 push(object);
3095 movq(object, FieldOperand(object, HeapObject::kMapOffset)); 3094 movq(object, FieldOperand(object, HeapObject::kMapOffset));
3096 CmpInstanceType(object, LAST_NAME_TYPE); 3095 CmpInstanceType(object, LAST_NAME_TYPE);
3097 pop(object); 3096 pop(object);
3098 Check(below_equal, kOperandIsNotAName); 3097 Check(below_equal, "Operand is not a name");
3099 } 3098 }
3100 } 3099 }
3101 3100
3102 3101
3103 void MacroAssembler::AssertRootValue(Register src, 3102 void MacroAssembler::AssertRootValue(Register src,
3104 Heap::RootListIndex root_value_index, 3103 Heap::RootListIndex root_value_index,
3105 BailoutReason reason) { 3104 const char* message) {
3106 if (emit_debug_code()) { 3105 if (emit_debug_code()) {
3107 ASSERT(!src.is(kScratchRegister)); 3106 ASSERT(!src.is(kScratchRegister));
3108 LoadRoot(kScratchRegister, root_value_index); 3107 LoadRoot(kScratchRegister, root_value_index);
3109 cmpq(src, kScratchRegister); 3108 cmpq(src, kScratchRegister);
3110 Check(equal, reason); 3109 Check(equal, message);
3111 } 3110 }
3112 } 3111 }
3113 3112
3114 3113
3115 3114
3116 Condition MacroAssembler::IsObjectStringType(Register heap_object, 3115 Condition MacroAssembler::IsObjectStringType(Register heap_object,
3117 Register map, 3116 Register map,
3118 Register instance_type) { 3117 Register instance_type) {
3119 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 3118 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
3120 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); 3119 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
(...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after
3451 movq(rbp, rsp); 3450 movq(rbp, rsp);
3452 push(rsi); // Context. 3451 push(rsi); // Context.
3453 Push(Smi::FromInt(type)); 3452 Push(Smi::FromInt(type));
3454 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 3453 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3455 push(kScratchRegister); 3454 push(kScratchRegister);
3456 if (emit_debug_code()) { 3455 if (emit_debug_code()) {
3457 movq(kScratchRegister, 3456 movq(kScratchRegister,
3458 isolate()->factory()->undefined_value(), 3457 isolate()->factory()->undefined_value(),
3459 RelocInfo::EMBEDDED_OBJECT); 3458 RelocInfo::EMBEDDED_OBJECT);
3460 cmpq(Operand(rsp, 0), kScratchRegister); 3459 cmpq(Operand(rsp, 0), kScratchRegister);
3461 Check(not_equal, kCodeObjectNotProperlyPatched); 3460 Check(not_equal, "code object not properly patched");
3462 } 3461 }
3463 } 3462 }
3464 3463
3465 3464
3466 void MacroAssembler::LeaveFrame(StackFrame::Type type) { 3465 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
3467 if (emit_debug_code()) { 3466 if (emit_debug_code()) {
3468 Move(kScratchRegister, Smi::FromInt(type)); 3467 Move(kScratchRegister, Smi::FromInt(type));
3469 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); 3468 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
3470 Check(equal, kStackFrameTypesMustMatch); 3469 Check(equal, "stack frame types must match");
3471 } 3470 }
3472 movq(rsp, rbp); 3471 movq(rsp, rbp);
3473 pop(rbp); 3472 pop(rbp);
3474 } 3473 }
3475 3474
3476 3475
3477 void MacroAssembler::EnterExitFramePrologue(bool save_rax) { 3476 void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
3478 // Set up the frame structure on the stack. 3477 // Set up the frame structure on the stack.
3479 // All constants are relative to the frame pointer of the exit frame. 3478 // All constants are relative to the frame pointer of the exit frame.
3480 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); 3479 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
3561 } 3560 }
3562 } 3561 }
3563 // Get the return address from the stack and restore the frame pointer. 3562 // Get the return address from the stack and restore the frame pointer.
3564 movq(rcx, Operand(rbp, 1 * kPointerSize)); 3563 movq(rcx, Operand(rbp, 1 * kPointerSize));
3565 movq(rbp, Operand(rbp, 0 * kPointerSize)); 3564 movq(rbp, Operand(rbp, 0 * kPointerSize));
3566 3565
3567 // Drop everything up to and including the arguments and the receiver 3566 // Drop everything up to and including the arguments and the receiver
3568 // from the caller stack. 3567 // from the caller stack.
3569 lea(rsp, Operand(r15, 1 * kPointerSize)); 3568 lea(rsp, Operand(r15, 1 * kPointerSize));
3570 3569
3571 PushReturnAddressFrom(rcx); 3570 // Push the return address to get ready to return.
3571 push(rcx);
3572 3572
3573 LeaveExitFrameEpilogue(); 3573 LeaveExitFrameEpilogue();
3574 } 3574 }
3575 3575
3576 3576
3577 void MacroAssembler::LeaveApiExitFrame() { 3577 void MacroAssembler::LeaveApiExitFrame() {
3578 movq(rsp, rbp); 3578 movq(rsp, rbp);
3579 pop(rbp); 3579 pop(rbp);
3580 3580
3581 LeaveExitFrameEpilogue(); 3581 LeaveExitFrameEpilogue();
(...skipping 23 matching lines...) Expand all
3605 Label same_contexts; 3605 Label same_contexts;
3606 3606
3607 ASSERT(!holder_reg.is(scratch)); 3607 ASSERT(!holder_reg.is(scratch));
3608 ASSERT(!scratch.is(kScratchRegister)); 3608 ASSERT(!scratch.is(kScratchRegister));
3609 // Load current lexical context from the stack frame. 3609 // Load current lexical context from the stack frame.
3610 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset)); 3610 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
3611 3611
3612 // When generating debug code, make sure the lexical context is set. 3612 // When generating debug code, make sure the lexical context is set.
3613 if (emit_debug_code()) { 3613 if (emit_debug_code()) {
3614 cmpq(scratch, Immediate(0)); 3614 cmpq(scratch, Immediate(0));
3615 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext); 3615 Check(not_equal, "we should not have an empty lexical context");
3616 } 3616 }
3617 // Load the native context of the current context. 3617 // Load the native context of the current context.
3618 int offset = 3618 int offset =
3619 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 3619 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
3620 movq(scratch, FieldOperand(scratch, offset)); 3620 movq(scratch, FieldOperand(scratch, offset));
3621 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset)); 3621 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
3622 3622
3623 // Check the context is a native context. 3623 // Check the context is a native context.
3624 if (emit_debug_code()) { 3624 if (emit_debug_code()) {
3625 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), 3625 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
3626 isolate()->factory()->native_context_map()); 3626 isolate()->factory()->native_context_map());
3627 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); 3627 Check(equal, "JSGlobalObject::native_context should be a native context.");
3628 } 3628 }
3629 3629
3630 // Check if both contexts are the same. 3630 // Check if both contexts are the same.
3631 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3631 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3632 j(equal, &same_contexts); 3632 j(equal, &same_contexts);
3633 3633
3634 // Compare security tokens. 3634 // Compare security tokens.
3635 // Check that the security token in the calling global object is 3635 // Check that the security token in the calling global object is
3636 // compatible with the security token in the receiving global 3636 // compatible with the security token in the receiving global
3637 // object. 3637 // object.
3638 3638
3639 // Check the context is a native context. 3639 // Check the context is a native context.
3640 if (emit_debug_code()) { 3640 if (emit_debug_code()) {
3641 // Preserve original value of holder_reg. 3641 // Preserve original value of holder_reg.
3642 push(holder_reg); 3642 push(holder_reg);
3643 movq(holder_reg, 3643 movq(holder_reg,
3644 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3644 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3645 CompareRoot(holder_reg, Heap::kNullValueRootIndex); 3645 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3646 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull); 3646 Check(not_equal, "JSGlobalProxy::context() should not be null.");
3647 3647
3648 // Read the first word and compare to native_context_map(), 3648 // Read the first word and compare to native_context_map(),
3649 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); 3649 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
3650 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex); 3650 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
3651 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext); 3651 Check(equal, "JSGlobalObject::native_context should be a native context.");
3652 pop(holder_reg); 3652 pop(holder_reg);
3653 } 3653 }
3654 3654
3655 movq(kScratchRegister, 3655 movq(kScratchRegister,
3656 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3656 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3657 int token_offset = 3657 int token_offset =
3658 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize; 3658 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
3659 movq(scratch, FieldOperand(scratch, token_offset)); 3659 movq(scratch, FieldOperand(scratch, token_offset));
3660 cmpq(scratch, FieldOperand(kScratchRegister, token_offset)); 3660 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
3661 j(not_equal, miss); 3661 j(not_equal, miss);
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
3787 AllocationUtils::GetAllocationTopReference(isolate(), flags); 3787 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3788 3788
3789 // Just return if allocation top is already known. 3789 // Just return if allocation top is already known.
3790 if ((flags & RESULT_CONTAINS_TOP) != 0) { 3790 if ((flags & RESULT_CONTAINS_TOP) != 0) {
3791 // No use of scratch if allocation top is provided. 3791 // No use of scratch if allocation top is provided.
3792 ASSERT(!scratch.is_valid()); 3792 ASSERT(!scratch.is_valid());
3793 #ifdef DEBUG 3793 #ifdef DEBUG
3794 // Assert that result actually contains top on entry. 3794 // Assert that result actually contains top on entry.
3795 Operand top_operand = ExternalOperand(allocation_top); 3795 Operand top_operand = ExternalOperand(allocation_top);
3796 cmpq(result, top_operand); 3796 cmpq(result, top_operand);
3797 Check(equal, kUnexpectedAllocationTop); 3797 Check(equal, "Unexpected allocation top");
3798 #endif 3798 #endif
3799 return; 3799 return;
3800 } 3800 }
3801 3801
3802 // Move address of new object to result. Use scratch register if available, 3802 // Move address of new object to result. Use scratch register if available,
3803 // and keep address in scratch until call to UpdateAllocationTopHelper. 3803 // and keep address in scratch until call to UpdateAllocationTopHelper.
3804 if (scratch.is_valid()) { 3804 if (scratch.is_valid()) {
3805 LoadAddress(scratch, allocation_top); 3805 LoadAddress(scratch, allocation_top);
3806 movq(result, Operand(scratch, 0)); 3806 movq(result, Operand(scratch, 0));
3807 } else { 3807 } else {
3808 Load(result, allocation_top); 3808 Load(result, allocation_top);
3809 } 3809 }
3810 } 3810 }
3811 3811
3812 3812
3813 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, 3813 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
3814 Register scratch, 3814 Register scratch,
3815 AllocationFlags flags) { 3815 AllocationFlags flags) {
3816 if (emit_debug_code()) { 3816 if (emit_debug_code()) {
3817 testq(result_end, Immediate(kObjectAlignmentMask)); 3817 testq(result_end, Immediate(kObjectAlignmentMask));
3818 Check(zero, kUnalignedAllocationInNewSpace); 3818 Check(zero, "Unaligned allocation in new space");
3819 } 3819 }
3820 3820
3821 ExternalReference allocation_top = 3821 ExternalReference allocation_top =
3822 AllocationUtils::GetAllocationTopReference(isolate(), flags); 3822 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3823 3823
3824 // Update new top. 3824 // Update new top.
3825 if (scratch.is_valid()) { 3825 if (scratch.is_valid()) {
3826 // Scratch already contains address of allocation top. 3826 // Scratch already contains address of allocation top.
3827 movq(Operand(scratch, 0), result_end); 3827 movq(Operand(scratch, 0), result_end);
3828 } else { 3828 } else {
(...skipping 26 matching lines...) Expand all
3855 } 3855 }
3856 ASSERT(!result.is(result_end)); 3856 ASSERT(!result.is(result_end));
3857 3857
3858 // Load address of new object into result. 3858 // Load address of new object into result.
3859 LoadAllocationTopHelper(result, scratch, flags); 3859 LoadAllocationTopHelper(result, scratch, flags);
3860 3860
3861 // Align the next allocation. Storing the filler map without checking top is 3861 // Align the next allocation. Storing the filler map without checking top is
3862 // always safe because the limit of the heap is always aligned. 3862 // always safe because the limit of the heap is always aligned.
3863 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { 3863 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
3864 testq(result, Immediate(kDoubleAlignmentMask)); 3864 testq(result, Immediate(kDoubleAlignmentMask));
3865 Check(zero, kAllocationIsNotDoubleAligned); 3865 Check(zero, "Allocation is not double aligned");
3866 } 3866 }
3867 3867
3868 // Calculate new top and bail out if new space is exhausted. 3868 // Calculate new top and bail out if new space is exhausted.
3869 ExternalReference allocation_limit = 3869 ExternalReference allocation_limit =
3870 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 3870 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
3871 3871
3872 Register top_reg = result_end.is_valid() ? result_end : result; 3872 Register top_reg = result_end.is_valid() ? result_end : result;
3873 3873
3874 if (!top_reg.is(result)) { 3874 if (!top_reg.is(result)) {
3875 movq(top_reg, result); 3875 movq(top_reg, result);
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
3934 } 3934 }
3935 ASSERT(!result.is(result_end)); 3935 ASSERT(!result.is(result_end));
3936 3936
3937 // Load address of new object into result. 3937 // Load address of new object into result.
3938 LoadAllocationTopHelper(result, scratch, flags); 3938 LoadAllocationTopHelper(result, scratch, flags);
3939 3939
3940 // Align the next allocation. Storing the filler map without checking top is 3940 // Align the next allocation. Storing the filler map without checking top is
3941 // always safe because the limit of the heap is always aligned. 3941 // always safe because the limit of the heap is always aligned.
3942 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { 3942 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
3943 testq(result, Immediate(kDoubleAlignmentMask)); 3943 testq(result, Immediate(kDoubleAlignmentMask));
3944 Check(zero, kAllocationIsNotDoubleAligned); 3944 Check(zero, "Allocation is not double aligned");
3945 } 3945 }
3946 3946
3947 // Calculate new top and bail out if new space is exhausted. 3947 // Calculate new top and bail out if new space is exhausted.
3948 ExternalReference allocation_limit = 3948 ExternalReference allocation_limit =
3949 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 3949 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
3950 if (!object_size.is(result_end)) { 3950 if (!object_size.is(result_end)) {
3951 movq(result_end, object_size); 3951 movq(result_end, object_size);
3952 } 3952 }
3953 addq(result_end, result); 3953 addq(result_end, result);
3954 j(carry, gc_required); 3954 j(carry, gc_required);
(...skipping 13 matching lines...) Expand all
3968 3968
3969 void MacroAssembler::UndoAllocationInNewSpace(Register object) { 3969 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
3970 ExternalReference new_space_allocation_top = 3970 ExternalReference new_space_allocation_top =
3971 ExternalReference::new_space_allocation_top_address(isolate()); 3971 ExternalReference::new_space_allocation_top_address(isolate());
3972 3972
3973 // Make sure the object has no tag before resetting top. 3973 // Make sure the object has no tag before resetting top.
3974 and_(object, Immediate(~kHeapObjectTagMask)); 3974 and_(object, Immediate(~kHeapObjectTagMask));
3975 Operand top_operand = ExternalOperand(new_space_allocation_top); 3975 Operand top_operand = ExternalOperand(new_space_allocation_top);
3976 #ifdef DEBUG 3976 #ifdef DEBUG
3977 cmpq(object, top_operand); 3977 cmpq(object, top_operand);
3978 Check(below, kUndoAllocationOfNonAllocatedMemory); 3978 Check(below, "Undo allocation of non allocated memory");
3979 #endif 3979 #endif
3980 movq(top_operand, object); 3980 movq(top_operand, object);
3981 } 3981 }
3982 3982
3983 3983
3984 void MacroAssembler::AllocateHeapNumber(Register result, 3984 void MacroAssembler::AllocateHeapNumber(Register result,
3985 Register scratch, 3985 Register scratch,
3986 Label* gc_required) { 3986 Label* gc_required) {
3987 // Allocate heap number in new space. 3987 // Allocate heap number in new space.
3988 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT); 3988 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
4158 // The cld() instruction must have been emitted, to set the direction flag(), 4158 // The cld() instruction must have been emitted, to set the direction flag(),
4159 // before calling this function. 4159 // before calling this function.
4160 void MacroAssembler::CopyBytes(Register destination, 4160 void MacroAssembler::CopyBytes(Register destination,
4161 Register source, 4161 Register source,
4162 Register length, 4162 Register length,
4163 int min_length, 4163 int min_length,
4164 Register scratch) { 4164 Register scratch) {
4165 ASSERT(min_length >= 0); 4165 ASSERT(min_length >= 0);
4166 if (emit_debug_code()) { 4166 if (emit_debug_code()) {
4167 cmpl(length, Immediate(min_length)); 4167 cmpl(length, Immediate(min_length));
4168 Assert(greater_equal, kInvalidMinLength); 4168 Assert(greater_equal, "Invalid min_length");
4169 } 4169 }
4170 Label loop, done, short_string, short_loop; 4170 Label loop, done, short_string, short_loop;
4171 4171
4172 const int kLongStringLimit = 20; 4172 const int kLongStringLimit = 20;
4173 if (min_length <= kLongStringLimit) { 4173 if (min_length <= kLongStringLimit) {
4174 cmpl(length, Immediate(kLongStringLimit)); 4174 cmpl(length, Immediate(kLongStringLimit));
4175 j(less_equal, &short_string); 4175 j(less_equal, &short_string);
4176 } 4176 }
4177 4177
4178 ASSERT(source.is(rsi)); 4178 ASSERT(source.is(rsi));
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
4242 movq(dst, rsi); 4242 movq(dst, rsi);
4243 } 4243 }
4244 4244
4245 // We should not have found a with context by walking the context 4245 // We should not have found a with context by walking the context
4246 // chain (i.e., the static scope chain and runtime context chain do 4246 // chain (i.e., the static scope chain and runtime context chain do
4247 // not agree). A variable occurring in such a scope should have 4247 // not agree). A variable occurring in such a scope should have
4248 // slot type LOOKUP and not CONTEXT. 4248 // slot type LOOKUP and not CONTEXT.
4249 if (emit_debug_code()) { 4249 if (emit_debug_code()) {
4250 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset), 4250 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
4251 Heap::kWithContextMapRootIndex); 4251 Heap::kWithContextMapRootIndex);
4252 Check(not_equal, kVariableResolvedToWithContext); 4252 Check(not_equal, "Variable resolved to with context.");
4253 } 4253 }
4254 } 4254 }
4255 4255
4256 4256
4257 void MacroAssembler::LoadTransitionedArrayMapConditional( 4257 void MacroAssembler::LoadTransitionedArrayMapConditional(
4258 ElementsKind expected_kind, 4258 ElementsKind expected_kind,
4259 ElementsKind transitioned_kind, 4259 ElementsKind transitioned_kind,
4260 Register map_in_out, 4260 Register map_in_out,
4261 Register scratch, 4261 Register scratch,
4262 Label* no_map_match) { 4262 Label* no_map_match) {
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
4333 4333
4334 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, 4334 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4335 Register map) { 4335 Register map) {
4336 // Load the initial map. The global functions all have initial maps. 4336 // Load the initial map. The global functions all have initial maps.
4337 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 4337 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4338 if (emit_debug_code()) { 4338 if (emit_debug_code()) {
4339 Label ok, fail; 4339 Label ok, fail;
4340 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK); 4340 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
4341 jmp(&ok); 4341 jmp(&ok);
4342 bind(&fail); 4342 bind(&fail);
4343 Abort(kGlobalFunctionsMustHaveInitialMap); 4343 Abort("Global functions must have initial map");
4344 bind(&ok); 4344 bind(&ok);
4345 } 4345 }
4346 } 4346 }
4347 4347
4348 4348
4349 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) { 4349 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
4350 // On Windows 64 stack slots are reserved by the caller for all arguments 4350 // On Windows 64 stack slots are reserved by the caller for all arguments
4351 // including the ones passed in registers, and space is always allocated for 4351 // including the ones passed in registers, and space is always allocated for
4352 // the four register arguments even if the function takes fewer than four 4352 // the four register arguments even if the function takes fewer than four
4353 // arguments. 4353 // arguments.
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after
4694 j(greater, &no_memento_available); 4694 j(greater, &no_memento_available);
4695 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), 4695 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
4696 Heap::kAllocationMementoMapRootIndex); 4696 Heap::kAllocationMementoMapRootIndex);
4697 bind(&no_memento_available); 4697 bind(&no_memento_available);
4698 } 4698 }
4699 4699
4700 4700
4701 } } // namespace v8::internal 4701 } } // namespace v8::internal
4702 4702
4703 #endif // V8_TARGET_ARCH_X64 4703 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698