Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 20843012: Extract hardcoded error strings into a single place and replace them with enum. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: styles fixed Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 431 matching lines...) Expand 10 before | Expand all | Expand 10 after
442 442
443 // Clobber clobbered registers when running with the debug-code flag 443 // Clobber clobbered registers when running with the debug-code flag
444 // turned on to provoke errors. 444 // turned on to provoke errors.
445 if (emit_debug_code()) { 445 if (emit_debug_code()) {
446 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 446 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE64);
447 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); 447 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64);
448 } 448 }
449 } 449 }
450 450
451 451
452 void MacroAssembler::Assert(Condition cc, const char* msg) { 452 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
453 if (emit_debug_code()) Check(cc, msg); 453 if (emit_debug_code()) Check(cc, reason);
454 } 454 }
455 455
456 456
457 void MacroAssembler::AssertFastElements(Register elements) { 457 void MacroAssembler::AssertFastElements(Register elements) {
458 if (emit_debug_code()) { 458 if (emit_debug_code()) {
459 Label ok; 459 Label ok;
460 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 460 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
461 Heap::kFixedArrayMapRootIndex); 461 Heap::kFixedArrayMapRootIndex);
462 j(equal, &ok, Label::kNear); 462 j(equal, &ok, Label::kNear);
463 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 463 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
464 Heap::kFixedDoubleArrayMapRootIndex); 464 Heap::kFixedDoubleArrayMapRootIndex);
465 j(equal, &ok, Label::kNear); 465 j(equal, &ok, Label::kNear);
466 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), 466 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
467 Heap::kFixedCOWArrayMapRootIndex); 467 Heap::kFixedCOWArrayMapRootIndex);
468 j(equal, &ok, Label::kNear); 468 j(equal, &ok, Label::kNear);
469 Abort("JSObject with fast elements map has slow elements"); 469 Abort(kJSObjectWithFastElementsMapHasSlowElements);
470 bind(&ok); 470 bind(&ok);
471 } 471 }
472 } 472 }
473 473
474 474
475 void MacroAssembler::Check(Condition cc, const char* msg) { 475 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
476 Label L; 476 Label L;
477 j(cc, &L, Label::kNear); 477 j(cc, &L, Label::kNear);
478 Abort(msg); 478 Abort(reason);
479 // Control will not return here. 479 // Control will not return here.
480 bind(&L); 480 bind(&L);
481 } 481 }
482 482
483 483
484 void MacroAssembler::CheckStackAlignment() { 484 void MacroAssembler::CheckStackAlignment() {
485 int frame_alignment = OS::ActivationFrameAlignment(); 485 int frame_alignment = OS::ActivationFrameAlignment();
486 int frame_alignment_mask = frame_alignment - 1; 486 int frame_alignment_mask = frame_alignment - 1;
487 if (frame_alignment > kPointerSize) { 487 if (frame_alignment > kPointerSize) {
488 ASSERT(IsPowerOf2(frame_alignment)); 488 ASSERT(IsPowerOf2(frame_alignment));
(...skipping 12 matching lines...) Expand all
501 Label* then_label) { 501 Label* then_label) {
502 Label ok; 502 Label ok;
503 testl(result, result); 503 testl(result, result);
504 j(not_zero, &ok, Label::kNear); 504 j(not_zero, &ok, Label::kNear);
505 testl(op, op); 505 testl(op, op);
506 j(sign, then_label); 506 j(sign, then_label);
507 bind(&ok); 507 bind(&ok);
508 } 508 }
509 509
510 510
511 void MacroAssembler::Abort(const char* msg) { 511 void MacroAssembler::Abort(BailoutReason reason) {
512 // We want to pass the msg string like a smi to avoid GC 512 // We want to pass the msg string like a smi to avoid GC
513 // problems, however msg is not guaranteed to be aligned 513 // problems, however msg is not guaranteed to be aligned
514 // properly. Instead, we pass an aligned pointer that is 514 // properly. Instead, we pass an aligned pointer that is
515 // a proper v8 smi, but also pass the alignment difference 515 // a proper v8 smi, but also pass the alignment difference
516 // from the real pointer as a smi. 516 // from the real pointer as a smi.
517 const char* msg = GetBailoutReason(reason);
517 intptr_t p1 = reinterpret_cast<intptr_t>(msg); 518 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
518 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; 519 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
519 // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag. 520 // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
520 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); 521 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
521 #ifdef DEBUG 522 #ifdef DEBUG
522 if (msg != NULL) { 523 if (msg != NULL) {
523 RecordComment("Abort message: "); 524 RecordComment("Abort message: ");
524 RecordComment(msg); 525 RecordComment(msg);
525 } 526 }
526 #endif 527 #endif
(...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after
831 832
832 CompareRoot(return_value, Heap::kTrueValueRootIndex); 833 CompareRoot(return_value, Heap::kTrueValueRootIndex);
833 j(equal, &ok, Label::kNear); 834 j(equal, &ok, Label::kNear);
834 835
835 CompareRoot(return_value, Heap::kFalseValueRootIndex); 836 CompareRoot(return_value, Heap::kFalseValueRootIndex);
836 j(equal, &ok, Label::kNear); 837 j(equal, &ok, Label::kNear);
837 838
838 CompareRoot(return_value, Heap::kNullValueRootIndex); 839 CompareRoot(return_value, Heap::kNullValueRootIndex);
839 j(equal, &ok, Label::kNear); 840 j(equal, &ok, Label::kNear);
840 841
841 Abort("API call returned invalid object"); 842 Abort(kAPICallReturnedInvalidObject);
842 843
843 bind(&ok); 844 bind(&ok);
844 #endif 845 #endif
845 846
846 LeaveApiExitFrame(); 847 LeaveApiExitFrame();
847 ret(stack_space * kPointerSize); 848 ret(stack_space * kPointerSize);
848 849
849 bind(&promote_scheduled_exception); 850 bind(&promote_scheduled_exception);
850 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); 851 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
851 852
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
1031 } 1032 }
1032 1033
1033 1034
1034 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { 1035 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
1035 if (emit_debug_code()) { 1036 if (emit_debug_code()) {
1036 movq(dst, 1037 movq(dst,
1037 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)), 1038 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
1038 RelocInfo::NONE64); 1039 RelocInfo::NONE64);
1039 cmpq(dst, kSmiConstantRegister); 1040 cmpq(dst, kSmiConstantRegister);
1040 if (allow_stub_calls()) { 1041 if (allow_stub_calls()) {
1041 Assert(equal, "Uninitialized kSmiConstantRegister"); 1042 Assert(equal, kUninitializedKSmiConstantRegister);
1042 } else { 1043 } else {
1043 Label ok; 1044 Label ok;
1044 j(equal, &ok, Label::kNear); 1045 j(equal, &ok, Label::kNear);
1045 int3(); 1046 int3();
1046 bind(&ok); 1047 bind(&ok);
1047 } 1048 }
1048 } 1049 }
1049 int value = source->value(); 1050 int value = source->value();
1050 if (value == 0) { 1051 if (value == 0) {
1051 xorl(dst, dst); 1052 xorl(dst, dst);
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
1099 shl(dst, Immediate(kSmiShift)); 1100 shl(dst, Immediate(kSmiShift));
1100 } 1101 }
1101 1102
1102 1103
1103 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { 1104 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
1104 if (emit_debug_code()) { 1105 if (emit_debug_code()) {
1105 testb(dst, Immediate(0x01)); 1106 testb(dst, Immediate(0x01));
1106 Label ok; 1107 Label ok;
1107 j(zero, &ok, Label::kNear); 1108 j(zero, &ok, Label::kNear);
1108 if (allow_stub_calls()) { 1109 if (allow_stub_calls()) {
1109 Abort("Integer32ToSmiField writing to non-smi location"); 1110 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
1110 } else { 1111 } else {
1111 int3(); 1112 int3();
1112 } 1113 }
1113 bind(&ok); 1114 bind(&ok);
1114 } 1115 }
1115 ASSERT(kSmiShift % kBitsPerByte == 0); 1116 ASSERT(kSmiShift % kBitsPerByte == 0);
1116 movl(Operand(dst, kSmiShift / kBitsPerByte), src); 1117 movl(Operand(dst, kSmiShift / kBitsPerByte), src);
1117 } 1118 }
1118 1119
1119 1120
(...skipping 562 matching lines...) Expand 10 before | Expand all | Expand 10 after
1682 1683
1683 void MacroAssembler::SmiAdd(Register dst, 1684 void MacroAssembler::SmiAdd(Register dst,
1684 Register src1, 1685 Register src1,
1685 Register src2) { 1686 Register src2) {
1686 // No overflow checking. Use only when it's known that 1687 // No overflow checking. Use only when it's known that
1687 // overflowing is impossible. 1688 // overflowing is impossible.
1688 if (!dst.is(src1)) { 1689 if (!dst.is(src1)) {
1689 if (emit_debug_code()) { 1690 if (emit_debug_code()) {
1690 movq(kScratchRegister, src1); 1691 movq(kScratchRegister, src1);
1691 addq(kScratchRegister, src2); 1692 addq(kScratchRegister, src2);
1692 Check(no_overflow, "Smi addition overflow"); 1693 Check(no_overflow, kSmiAdditionOverflow);
1693 } 1694 }
1694 lea(dst, Operand(src1, src2, times_1, 0)); 1695 lea(dst, Operand(src1, src2, times_1, 0));
1695 } else { 1696 } else {
1696 addq(dst, src2); 1697 addq(dst, src2);
1697 Assert(no_overflow, "Smi addition overflow"); 1698 Assert(no_overflow, kSmiAdditionOverflow);
1698 } 1699 }
1699 } 1700 }
1700 1701
1701 1702
1702 void MacroAssembler::SmiSub(Register dst, 1703 void MacroAssembler::SmiSub(Register dst,
1703 Register src1, 1704 Register src1,
1704 Register src2, 1705 Register src2,
1705 Label* on_not_smi_result, 1706 Label* on_not_smi_result,
1706 Label::Distance near_jump) { 1707 Label::Distance near_jump) {
1707 ASSERT_NOT_NULL(on_not_smi_result); 1708 ASSERT_NOT_NULL(on_not_smi_result);
(...skipping 11 matching lines...) Expand all
1719 1720
1720 1721
1721 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) { 1722 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1722 // No overflow checking. Use only when it's known that 1723 // No overflow checking. Use only when it's known that
1723 // overflowing is impossible (e.g., subtracting two positive smis). 1724 // overflowing is impossible (e.g., subtracting two positive smis).
1724 ASSERT(!dst.is(src2)); 1725 ASSERT(!dst.is(src2));
1725 if (!dst.is(src1)) { 1726 if (!dst.is(src1)) {
1726 movq(dst, src1); 1727 movq(dst, src1);
1727 } 1728 }
1728 subq(dst, src2); 1729 subq(dst, src2);
1729 Assert(no_overflow, "Smi subtraction overflow"); 1730 Assert(no_overflow, kSmiSubtractionOverflow);
1730 } 1731 }
1731 1732
1732 1733
1733 void MacroAssembler::SmiSub(Register dst, 1734 void MacroAssembler::SmiSub(Register dst,
1734 Register src1, 1735 Register src1,
1735 const Operand& src2, 1736 const Operand& src2,
1736 Label* on_not_smi_result, 1737 Label* on_not_smi_result,
1737 Label::Distance near_jump) { 1738 Label::Distance near_jump) {
1738 ASSERT_NOT_NULL(on_not_smi_result); 1739 ASSERT_NOT_NULL(on_not_smi_result);
1739 if (dst.is(src1)) { 1740 if (dst.is(src1)) {
(...skipping 11 matching lines...) Expand all
1751 1752
1752 void MacroAssembler::SmiSub(Register dst, 1753 void MacroAssembler::SmiSub(Register dst,
1753 Register src1, 1754 Register src1,
1754 const Operand& src2) { 1755 const Operand& src2) {
1755 // No overflow checking. Use only when it's known that 1756 // No overflow checking. Use only when it's known that
1756 // overflowing is impossible (e.g., subtracting two positive smis). 1757 // overflowing is impossible (e.g., subtracting two positive smis).
1757 if (!dst.is(src1)) { 1758 if (!dst.is(src1)) {
1758 movq(dst, src1); 1759 movq(dst, src1);
1759 } 1760 }
1760 subq(dst, src2); 1761 subq(dst, src2);
1761 Assert(no_overflow, "Smi subtraction overflow"); 1762 Assert(no_overflow, kSmiSubtractionOverflow);
1762 } 1763 }
1763 1764
1764 1765
1765 void MacroAssembler::SmiMul(Register dst, 1766 void MacroAssembler::SmiMul(Register dst,
1766 Register src1, 1767 Register src1,
1767 Register src2, 1768 Register src2,
1768 Label* on_not_smi_result, 1769 Label* on_not_smi_result,
1769 Label::Distance near_jump) { 1770 Label::Distance near_jump) {
1770 ASSERT(!dst.is(src2)); 1771 ASSERT(!dst.is(src2));
1771 ASSERT(!dst.is(kScratchRegister)); 1772 ASSERT(!dst.is(kScratchRegister));
(...skipping 376 matching lines...) Expand 10 before | Expand all | Expand 10 after
2148 Label::Distance near_jump) { 2149 Label::Distance near_jump) {
2149 ASSERT(!dst.is(kScratchRegister)); 2150 ASSERT(!dst.is(kScratchRegister));
2150 ASSERT(!src1.is(kScratchRegister)); 2151 ASSERT(!src1.is(kScratchRegister));
2151 ASSERT(!src2.is(kScratchRegister)); 2152 ASSERT(!src2.is(kScratchRegister));
2152 ASSERT(!dst.is(src1)); 2153 ASSERT(!dst.is(src1));
2153 ASSERT(!dst.is(src2)); 2154 ASSERT(!dst.is(src2));
2154 // Both operands must not be smis. 2155 // Both operands must not be smis.
2155 #ifdef DEBUG 2156 #ifdef DEBUG
2156 if (allow_stub_calls()) { // Check contains a stub call. 2157 if (allow_stub_calls()) { // Check contains a stub call.
2157 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2)); 2158 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
2158 Check(not_both_smis, "Both registers were smis in SelectNonSmi."); 2159 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
2159 } 2160 }
2160 #endif 2161 #endif
2161 STATIC_ASSERT(kSmiTag == 0); 2162 STATIC_ASSERT(kSmiTag == 0);
2162 ASSERT_EQ(0, Smi::FromInt(0)); 2163 ASSERT_EQ(0, Smi::FromInt(0));
2163 movl(kScratchRegister, Immediate(kSmiTagMask)); 2164 movl(kScratchRegister, Immediate(kSmiTagMask));
2164 and_(kScratchRegister, src1); 2165 and_(kScratchRegister, src1);
2165 testl(kScratchRegister, src2); 2166 testl(kScratchRegister, src2);
2166 // If non-zero then both are smis. 2167 // If non-zero then both are smis.
2167 j(not_zero, on_not_smis, near_jump); 2168 j(not_zero, on_not_smis, near_jump);
2168 2169
(...skipping 808 matching lines...) Expand 10 before | Expand all | Expand 10 after
2977 Set(result_reg, 255); 2978 Set(result_reg, 255);
2978 bind(&done); 2979 bind(&done);
2979 } 2980 }
2980 2981
2981 2982
2982 void MacroAssembler::LoadUint32(XMMRegister dst, 2983 void MacroAssembler::LoadUint32(XMMRegister dst,
2983 Register src, 2984 Register src,
2984 XMMRegister scratch) { 2985 XMMRegister scratch) {
2985 if (FLAG_debug_code) { 2986 if (FLAG_debug_code) {
2986 cmpq(src, Immediate(0xffffffff)); 2987 cmpq(src, Immediate(0xffffffff));
2987 Assert(below_equal, "input GPR is expected to have upper32 cleared"); 2988 Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
2988 } 2989 }
2989 cvtqsi2sd(dst, src); 2990 cvtqsi2sd(dst, src);
2990 } 2991 }
2991 2992
2992 2993
2993 void MacroAssembler::LoadInstanceDescriptors(Register map, 2994 void MacroAssembler::LoadInstanceDescriptors(Register map,
2994 Register descriptors) { 2995 Register descriptors) {
2995 movq(descriptors, FieldOperand(map, Map::kDescriptorsOffset)); 2996 movq(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2996 } 2997 }
2997 2998
(...skipping 28 matching lines...) Expand all
3026 } 3027 }
3027 3028
3028 3029
3029 void MacroAssembler::AssertNumber(Register object) { 3030 void MacroAssembler::AssertNumber(Register object) {
3030 if (emit_debug_code()) { 3031 if (emit_debug_code()) {
3031 Label ok; 3032 Label ok;
3032 Condition is_smi = CheckSmi(object); 3033 Condition is_smi = CheckSmi(object);
3033 j(is_smi, &ok, Label::kNear); 3034 j(is_smi, &ok, Label::kNear);
3034 Cmp(FieldOperand(object, HeapObject::kMapOffset), 3035 Cmp(FieldOperand(object, HeapObject::kMapOffset),
3035 isolate()->factory()->heap_number_map()); 3036 isolate()->factory()->heap_number_map());
3036 Check(equal, "Operand is not a number"); 3037 Check(equal, kOperandIsNotANumber);
3037 bind(&ok); 3038 bind(&ok);
3038 } 3039 }
3039 } 3040 }
3040 3041
3041 3042
3042 void MacroAssembler::AssertNotSmi(Register object) { 3043 void MacroAssembler::AssertNotSmi(Register object) {
3043 if (emit_debug_code()) { 3044 if (emit_debug_code()) {
3044 Condition is_smi = CheckSmi(object); 3045 Condition is_smi = CheckSmi(object);
3045 Check(NegateCondition(is_smi), "Operand is a smi"); 3046 Check(NegateCondition(is_smi), kOperandIsASmi);
3046 } 3047 }
3047 } 3048 }
3048 3049
3049 3050
3050 void MacroAssembler::AssertSmi(Register object) { 3051 void MacroAssembler::AssertSmi(Register object) {
3051 if (emit_debug_code()) { 3052 if (emit_debug_code()) {
3052 Condition is_smi = CheckSmi(object); 3053 Condition is_smi = CheckSmi(object);
3053 Check(is_smi, "Operand is not a smi"); 3054 Check(is_smi, kOperandIsNotASmi);
3054 } 3055 }
3055 } 3056 }
3056 3057
3057 3058
3058 void MacroAssembler::AssertSmi(const Operand& object) { 3059 void MacroAssembler::AssertSmi(const Operand& object) {
3059 if (emit_debug_code()) { 3060 if (emit_debug_code()) {
3060 Condition is_smi = CheckSmi(object); 3061 Condition is_smi = CheckSmi(object);
3061 Check(is_smi, "Operand is not a smi"); 3062 Check(is_smi, kOperandIsNotASmi);
3062 } 3063 }
3063 } 3064 }
3064 3065
3065 3066
3066 void MacroAssembler::AssertZeroExtended(Register int32_register) { 3067 void MacroAssembler::AssertZeroExtended(Register int32_register) {
3067 if (emit_debug_code()) { 3068 if (emit_debug_code()) {
3068 ASSERT(!int32_register.is(kScratchRegister)); 3069 ASSERT(!int32_register.is(kScratchRegister));
3069 movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64); 3070 movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64);
3070 cmpq(kScratchRegister, int32_register); 3071 cmpq(kScratchRegister, int32_register);
3071 Check(above_equal, "32 bit value in register is not zero-extended"); 3072 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
3072 } 3073 }
3073 } 3074 }
3074 3075
3075 3076
3076 void MacroAssembler::AssertString(Register object) { 3077 void MacroAssembler::AssertString(Register object) {
3077 if (emit_debug_code()) { 3078 if (emit_debug_code()) {
3078 testb(object, Immediate(kSmiTagMask)); 3079 testb(object, Immediate(kSmiTagMask));
3079 Check(not_equal, "Operand is a smi and not a string"); 3080 Check(not_equal, kOperandIsASmiAndNotAString);
3080 push(object); 3081 push(object);
3081 movq(object, FieldOperand(object, HeapObject::kMapOffset)); 3082 movq(object, FieldOperand(object, HeapObject::kMapOffset));
3082 CmpInstanceType(object, FIRST_NONSTRING_TYPE); 3083 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
3083 pop(object); 3084 pop(object);
3084 Check(below, "Operand is not a string"); 3085 Check(below, kOperandIsNotAString);
3085 } 3086 }
3086 } 3087 }
3087 3088
3088 3089
3089 void MacroAssembler::AssertName(Register object) { 3090 void MacroAssembler::AssertName(Register object) {
3090 if (emit_debug_code()) { 3091 if (emit_debug_code()) {
3091 testb(object, Immediate(kSmiTagMask)); 3092 testb(object, Immediate(kSmiTagMask));
3092 Check(not_equal, "Operand is a smi and not a name"); 3093 Check(not_equal, kOperandIsASmiAndNotAName);
3093 push(object); 3094 push(object);
3094 movq(object, FieldOperand(object, HeapObject::kMapOffset)); 3095 movq(object, FieldOperand(object, HeapObject::kMapOffset));
3095 CmpInstanceType(object, LAST_NAME_TYPE); 3096 CmpInstanceType(object, LAST_NAME_TYPE);
3096 pop(object); 3097 pop(object);
3097 Check(below_equal, "Operand is not a name"); 3098 Check(below_equal, kOperandIsNotAName);
3098 } 3099 }
3099 } 3100 }
3100 3101
3101 3102
3102 void MacroAssembler::AssertRootValue(Register src, 3103 void MacroAssembler::AssertRootValue(Register src,
3103 Heap::RootListIndex root_value_index, 3104 Heap::RootListIndex root_value_index,
3104 const char* message) { 3105 BailoutReason reason) {
3105 if (emit_debug_code()) { 3106 if (emit_debug_code()) {
3106 ASSERT(!src.is(kScratchRegister)); 3107 ASSERT(!src.is(kScratchRegister));
3107 LoadRoot(kScratchRegister, root_value_index); 3108 LoadRoot(kScratchRegister, root_value_index);
3108 cmpq(src, kScratchRegister); 3109 cmpq(src, kScratchRegister);
3109 Check(equal, message); 3110 Check(equal, reason);
3110 } 3111 }
3111 } 3112 }
3112 3113
3113 3114
3114 3115
3115 Condition MacroAssembler::IsObjectStringType(Register heap_object, 3116 Condition MacroAssembler::IsObjectStringType(Register heap_object,
3116 Register map, 3117 Register map,
3117 Register instance_type) { 3118 Register instance_type) {
3118 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 3119 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
3119 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); 3120 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
(...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after
3450 movq(rbp, rsp); 3451 movq(rbp, rsp);
3451 push(rsi); // Context. 3452 push(rsi); // Context.
3452 Push(Smi::FromInt(type)); 3453 Push(Smi::FromInt(type));
3453 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 3454 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
3454 push(kScratchRegister); 3455 push(kScratchRegister);
3455 if (emit_debug_code()) { 3456 if (emit_debug_code()) {
3456 movq(kScratchRegister, 3457 movq(kScratchRegister,
3457 isolate()->factory()->undefined_value(), 3458 isolate()->factory()->undefined_value(),
3458 RelocInfo::EMBEDDED_OBJECT); 3459 RelocInfo::EMBEDDED_OBJECT);
3459 cmpq(Operand(rsp, 0), kScratchRegister); 3460 cmpq(Operand(rsp, 0), kScratchRegister);
3460 Check(not_equal, "code object not properly patched"); 3461 Check(not_equal, kCodeObjectNotProperlyPatched);
3461 } 3462 }
3462 } 3463 }
3463 3464
3464 3465
3465 void MacroAssembler::LeaveFrame(StackFrame::Type type) { 3466 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
3466 if (emit_debug_code()) { 3467 if (emit_debug_code()) {
3467 Move(kScratchRegister, Smi::FromInt(type)); 3468 Move(kScratchRegister, Smi::FromInt(type));
3468 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister); 3469 cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
3469 Check(equal, "stack frame types must match"); 3470 Check(equal, kStackFrameTypesMustMatch);
3470 } 3471 }
3471 movq(rsp, rbp); 3472 movq(rsp, rbp);
3472 pop(rbp); 3473 pop(rbp);
3473 } 3474 }
3474 3475
3475 3476
3476 void MacroAssembler::EnterExitFramePrologue(bool save_rax) { 3477 void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
3477 // Set up the frame structure on the stack. 3478 // Set up the frame structure on the stack.
3478 // All constants are relative to the frame pointer of the exit frame. 3479 // All constants are relative to the frame pointer of the exit frame.
3479 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); 3480 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
3605 Label same_contexts; 3606 Label same_contexts;
3606 3607
3607 ASSERT(!holder_reg.is(scratch)); 3608 ASSERT(!holder_reg.is(scratch));
3608 ASSERT(!scratch.is(kScratchRegister)); 3609 ASSERT(!scratch.is(kScratchRegister));
3609 // Load current lexical context from the stack frame. 3610 // Load current lexical context from the stack frame.
3610 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset)); 3611 movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
3611 3612
3612 // When generating debug code, make sure the lexical context is set. 3613 // When generating debug code, make sure the lexical context is set.
3613 if (emit_debug_code()) { 3614 if (emit_debug_code()) {
3614 cmpq(scratch, Immediate(0)); 3615 cmpq(scratch, Immediate(0));
3615 Check(not_equal, "we should not have an empty lexical context"); 3616 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
3616 } 3617 }
3617 // Load the native context of the current context. 3618 // Load the native context of the current context.
3618 int offset = 3619 int offset =
3619 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 3620 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
3620 movq(scratch, FieldOperand(scratch, offset)); 3621 movq(scratch, FieldOperand(scratch, offset));
3621 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset)); 3622 movq(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
3622 3623
3623 // Check the context is a native context. 3624 // Check the context is a native context.
3624 if (emit_debug_code()) { 3625 if (emit_debug_code()) {
3625 Cmp(FieldOperand(scratch, HeapObject::kMapOffset), 3626 Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
3626 isolate()->factory()->native_context_map()); 3627 isolate()->factory()->native_context_map());
3627 Check(equal, "JSGlobalObject::native_context should be a native context."); 3628 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
3628 } 3629 }
3629 3630
3630 // Check if both contexts are the same. 3631 // Check if both contexts are the same.
3631 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3632 cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3632 j(equal, &same_contexts); 3633 j(equal, &same_contexts);
3633 3634
3634 // Compare security tokens. 3635 // Compare security tokens.
3635 // Check that the security token in the calling global object is 3636 // Check that the security token in the calling global object is
3636 // compatible with the security token in the receiving global 3637 // compatible with the security token in the receiving global
3637 // object. 3638 // object.
3638 3639
3639 // Check the context is a native context. 3640 // Check the context is a native context.
3640 if (emit_debug_code()) { 3641 if (emit_debug_code()) {
3641 // Preserve original value of holder_reg. 3642 // Preserve original value of holder_reg.
3642 push(holder_reg); 3643 push(holder_reg);
3643 movq(holder_reg, 3644 movq(holder_reg,
3644 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3645 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3645 CompareRoot(holder_reg, Heap::kNullValueRootIndex); 3646 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
3646 Check(not_equal, "JSGlobalProxy::context() should not be null."); 3647 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
3647 3648
3648 // Read the first word and compare to native_context_map(), 3649 // Read the first word and compare to native_context_map(),
3649 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); 3650 movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
3650 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex); 3651 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
3651 Check(equal, "JSGlobalObject::native_context should be a native context."); 3652 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
3652 pop(holder_reg); 3653 pop(holder_reg);
3653 } 3654 }
3654 3655
3655 movq(kScratchRegister, 3656 movq(kScratchRegister,
3656 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset)); 3657 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
3657 int token_offset = 3658 int token_offset =
3658 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize; 3659 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
3659 movq(scratch, FieldOperand(scratch, token_offset)); 3660 movq(scratch, FieldOperand(scratch, token_offset));
3660 cmpq(scratch, FieldOperand(kScratchRegister, token_offset)); 3661 cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
3661 j(not_equal, miss); 3662 j(not_equal, miss);
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
3787 AllocationUtils::GetAllocationTopReference(isolate(), flags); 3788 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3788 3789
3789 // Just return if allocation top is already known. 3790 // Just return if allocation top is already known.
3790 if ((flags & RESULT_CONTAINS_TOP) != 0) { 3791 if ((flags & RESULT_CONTAINS_TOP) != 0) {
3791 // No use of scratch if allocation top is provided. 3792 // No use of scratch if allocation top is provided.
3792 ASSERT(!scratch.is_valid()); 3793 ASSERT(!scratch.is_valid());
3793 #ifdef DEBUG 3794 #ifdef DEBUG
3794 // Assert that result actually contains top on entry. 3795 // Assert that result actually contains top on entry.
3795 Operand top_operand = ExternalOperand(allocation_top); 3796 Operand top_operand = ExternalOperand(allocation_top);
3796 cmpq(result, top_operand); 3797 cmpq(result, top_operand);
3797 Check(equal, "Unexpected allocation top"); 3798 Check(equal, kUnexpectedAllocationTop);
3798 #endif 3799 #endif
3799 return; 3800 return;
3800 } 3801 }
3801 3802
3802 // Move address of new object to result. Use scratch register if available, 3803 // Move address of new object to result. Use scratch register if available,
3803 // and keep address in scratch until call to UpdateAllocationTopHelper. 3804 // and keep address in scratch until call to UpdateAllocationTopHelper.
3804 if (scratch.is_valid()) { 3805 if (scratch.is_valid()) {
3805 LoadAddress(scratch, allocation_top); 3806 LoadAddress(scratch, allocation_top);
3806 movq(result, Operand(scratch, 0)); 3807 movq(result, Operand(scratch, 0));
3807 } else { 3808 } else {
3808 Load(result, allocation_top); 3809 Load(result, allocation_top);
3809 } 3810 }
3810 } 3811 }
3811 3812
3812 3813
3813 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, 3814 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
3814 Register scratch, 3815 Register scratch,
3815 AllocationFlags flags) { 3816 AllocationFlags flags) {
3816 if (emit_debug_code()) { 3817 if (emit_debug_code()) {
3817 testq(result_end, Immediate(kObjectAlignmentMask)); 3818 testq(result_end, Immediate(kObjectAlignmentMask));
3818 Check(zero, "Unaligned allocation in new space"); 3819 Check(zero, kUnalignedAllocationInNewSpace);
3819 } 3820 }
3820 3821
3821 ExternalReference allocation_top = 3822 ExternalReference allocation_top =
3822 AllocationUtils::GetAllocationTopReference(isolate(), flags); 3823 AllocationUtils::GetAllocationTopReference(isolate(), flags);
3823 3824
3824 // Update new top. 3825 // Update new top.
3825 if (scratch.is_valid()) { 3826 if (scratch.is_valid()) {
3826 // Scratch already contains address of allocation top. 3827 // Scratch already contains address of allocation top.
3827 movq(Operand(scratch, 0), result_end); 3828 movq(Operand(scratch, 0), result_end);
3828 } else { 3829 } else {
(...skipping 26 matching lines...) Expand all
3855 } 3856 }
3856 ASSERT(!result.is(result_end)); 3857 ASSERT(!result.is(result_end));
3857 3858
3858 // Load address of new object into result. 3859 // Load address of new object into result.
3859 LoadAllocationTopHelper(result, scratch, flags); 3860 LoadAllocationTopHelper(result, scratch, flags);
3860 3861
3861 // Align the next allocation. Storing the filler map without checking top is 3862 // Align the next allocation. Storing the filler map without checking top is
3862 // always safe because the limit of the heap is always aligned. 3863 // always safe because the limit of the heap is always aligned.
3863 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { 3864 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
3864 testq(result, Immediate(kDoubleAlignmentMask)); 3865 testq(result, Immediate(kDoubleAlignmentMask));
3865 Check(zero, "Allocation is not double aligned"); 3866 Check(zero, kAllocationIsNotDoubleAligned);
3866 } 3867 }
3867 3868
3868 // Calculate new top and bail out if new space is exhausted. 3869 // Calculate new top and bail out if new space is exhausted.
3869 ExternalReference allocation_limit = 3870 ExternalReference allocation_limit =
3870 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 3871 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
3871 3872
3872 Register top_reg = result_end.is_valid() ? result_end : result; 3873 Register top_reg = result_end.is_valid() ? result_end : result;
3873 3874
3874 if (!top_reg.is(result)) { 3875 if (!top_reg.is(result)) {
3875 movq(top_reg, result); 3876 movq(top_reg, result);
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
3934 } 3935 }
3935 ASSERT(!result.is(result_end)); 3936 ASSERT(!result.is(result_end));
3936 3937
3937 // Load address of new object into result. 3938 // Load address of new object into result.
3938 LoadAllocationTopHelper(result, scratch, flags); 3939 LoadAllocationTopHelper(result, scratch, flags);
3939 3940
3940 // Align the next allocation. Storing the filler map without checking top is 3941 // Align the next allocation. Storing the filler map without checking top is
3941 // always safe because the limit of the heap is always aligned. 3942 // always safe because the limit of the heap is always aligned.
3942 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { 3943 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
3943 testq(result, Immediate(kDoubleAlignmentMask)); 3944 testq(result, Immediate(kDoubleAlignmentMask));
3944 Check(zero, "Allocation is not double aligned"); 3945 Check(zero, kAllocationIsNotDoubleAligned);
3945 } 3946 }
3946 3947
3947 // Calculate new top and bail out if new space is exhausted. 3948 // Calculate new top and bail out if new space is exhausted.
3948 ExternalReference allocation_limit = 3949 ExternalReference allocation_limit =
3949 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 3950 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
3950 if (!object_size.is(result_end)) { 3951 if (!object_size.is(result_end)) {
3951 movq(result_end, object_size); 3952 movq(result_end, object_size);
3952 } 3953 }
3953 addq(result_end, result); 3954 addq(result_end, result);
3954 j(carry, gc_required); 3955 j(carry, gc_required);
(...skipping 13 matching lines...) Expand all
3968 3969
3969 void MacroAssembler::UndoAllocationInNewSpace(Register object) { 3970 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
3970 ExternalReference new_space_allocation_top = 3971 ExternalReference new_space_allocation_top =
3971 ExternalReference::new_space_allocation_top_address(isolate()); 3972 ExternalReference::new_space_allocation_top_address(isolate());
3972 3973
3973 // Make sure the object has no tag before resetting top. 3974 // Make sure the object has no tag before resetting top.
3974 and_(object, Immediate(~kHeapObjectTagMask)); 3975 and_(object, Immediate(~kHeapObjectTagMask));
3975 Operand top_operand = ExternalOperand(new_space_allocation_top); 3976 Operand top_operand = ExternalOperand(new_space_allocation_top);
3976 #ifdef DEBUG 3977 #ifdef DEBUG
3977 cmpq(object, top_operand); 3978 cmpq(object, top_operand);
3978 Check(below, "Undo allocation of non allocated memory"); 3979 Check(below, kUndoAllocationOfNonAllocatedMemory);
3979 #endif 3980 #endif
3980 movq(top_operand, object); 3981 movq(top_operand, object);
3981 } 3982 }
3982 3983
3983 3984
3984 void MacroAssembler::AllocateHeapNumber(Register result, 3985 void MacroAssembler::AllocateHeapNumber(Register result,
3985 Register scratch, 3986 Register scratch,
3986 Label* gc_required) { 3987 Label* gc_required) {
3987 // Allocate heap number in new space. 3988 // Allocate heap number in new space.
3988 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT); 3989 Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
4158 // The cld() instruction must have been emitted, to set the direction flag(), 4159 // The cld() instruction must have been emitted, to set the direction flag(),
4159 // before calling this function. 4160 // before calling this function.
4160 void MacroAssembler::CopyBytes(Register destination, 4161 void MacroAssembler::CopyBytes(Register destination,
4161 Register source, 4162 Register source,
4162 Register length, 4163 Register length,
4163 int min_length, 4164 int min_length,
4164 Register scratch) { 4165 Register scratch) {
4165 ASSERT(min_length >= 0); 4166 ASSERT(min_length >= 0);
4166 if (emit_debug_code()) { 4167 if (emit_debug_code()) {
4167 cmpl(length, Immediate(min_length)); 4168 cmpl(length, Immediate(min_length));
4168 Assert(greater_equal, "Invalid min_length"); 4169 Assert(greater_equal, kInvalidMinLength);
4169 } 4170 }
4170 Label loop, done, short_string, short_loop; 4171 Label loop, done, short_string, short_loop;
4171 4172
4172 const int kLongStringLimit = 20; 4173 const int kLongStringLimit = 20;
4173 if (min_length <= kLongStringLimit) { 4174 if (min_length <= kLongStringLimit) {
4174 cmpl(length, Immediate(kLongStringLimit)); 4175 cmpl(length, Immediate(kLongStringLimit));
4175 j(less_equal, &short_string); 4176 j(less_equal, &short_string);
4176 } 4177 }
4177 4178
4178 ASSERT(source.is(rsi)); 4179 ASSERT(source.is(rsi));
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
4242 movq(dst, rsi); 4243 movq(dst, rsi);
4243 } 4244 }
4244 4245
4245 // We should not have found a with context by walking the context 4246 // We should not have found a with context by walking the context
4246 // chain (i.e., the static scope chain and runtime context chain do 4247 // chain (i.e., the static scope chain and runtime context chain do
4247 // not agree). A variable occurring in such a scope should have 4248 // not agree). A variable occurring in such a scope should have
4248 // slot type LOOKUP and not CONTEXT. 4249 // slot type LOOKUP and not CONTEXT.
4249 if (emit_debug_code()) { 4250 if (emit_debug_code()) {
4250 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset), 4251 CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
4251 Heap::kWithContextMapRootIndex); 4252 Heap::kWithContextMapRootIndex);
4252 Check(not_equal, "Variable resolved to with context."); 4253 Check(not_equal, kVariableResolvedToWithContext);
4253 } 4254 }
4254 } 4255 }
4255 4256
4256 4257
4257 void MacroAssembler::LoadTransitionedArrayMapConditional( 4258 void MacroAssembler::LoadTransitionedArrayMapConditional(
4258 ElementsKind expected_kind, 4259 ElementsKind expected_kind,
4259 ElementsKind transitioned_kind, 4260 ElementsKind transitioned_kind,
4260 Register map_in_out, 4261 Register map_in_out,
4261 Register scratch, 4262 Register scratch,
4262 Label* no_map_match) { 4263 Label* no_map_match) {
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
4333 4334
4334 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, 4335 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
4335 Register map) { 4336 Register map) {
4336 // Load the initial map. The global functions all have initial maps. 4337 // Load the initial map. The global functions all have initial maps.
4337 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 4338 movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
4338 if (emit_debug_code()) { 4339 if (emit_debug_code()) {
4339 Label ok, fail; 4340 Label ok, fail;
4340 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK); 4341 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
4341 jmp(&ok); 4342 jmp(&ok);
4342 bind(&fail); 4343 bind(&fail);
4343 Abort("Global functions must have initial map"); 4344 Abort(kGlobalFunctionsMustHaveInitialMap);
4344 bind(&ok); 4345 bind(&ok);
4345 } 4346 }
4346 } 4347 }
4347 4348
4348 4349
4349 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) { 4350 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
4350 // On Windows 64 stack slots are reserved by the caller for all arguments 4351 // On Windows 64 stack slots are reserved by the caller for all arguments
4351 // including the ones passed in registers, and space is always allocated for 4352 // including the ones passed in registers, and space is always allocated for
4352 // the four register arguments even if the function takes fewer than four 4353 // the four register arguments even if the function takes fewer than four
4353 // arguments. 4354 // arguments.
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after
4694 j(greater, &no_memento_available); 4695 j(greater, &no_memento_available);
4695 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), 4696 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize),
4696 Heap::kAllocationMementoMapRootIndex); 4697 Heap::kAllocationMementoMapRootIndex);
4697 bind(&no_memento_available); 4698 bind(&no_memento_available);
4698 } 4699 }
4699 4700
4700 4701
4701 } } // namespace v8::internal 4702 } } // namespace v8::internal
4702 4703
4703 #endif // V8_TARGET_ARCH_X64 4704 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698