OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 27 matching lines...) Expand all Loading... |
38 #include "debug.h" | 38 #include "debug.h" |
39 #include "heap.h" | 39 #include "heap.h" |
40 #include "isolate-inl.h" | 40 #include "isolate-inl.h" |
41 | 41 |
42 namespace v8 { | 42 namespace v8 { |
43 namespace internal { | 43 namespace internal { |
44 | 44 |
45 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) | 45 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) |
46 : Assembler(arg_isolate, buffer, size), | 46 : Assembler(arg_isolate, buffer, size), |
47 generating_stub_(false), | 47 generating_stub_(false), |
48 allow_stub_calls_(true), | |
49 has_frame_(false), | 48 has_frame_(false), |
50 root_array_available_(true) { | 49 root_array_available_(true) { |
51 if (isolate() != NULL) { | 50 if (isolate() != NULL) { |
52 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), | 51 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), |
53 isolate()); | 52 isolate()); |
54 } | 53 } |
55 } | 54 } |
56 | 55 |
57 | 56 |
58 static const int kInvalidRootRegisterDelta = -1; | 57 static const int kInvalidRootRegisterDelta = -1; |
(...skipping 489 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
548 } | 547 } |
549 | 548 |
550 | 549 |
551 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) { | 550 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) { |
552 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs | 551 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs |
553 Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id); | 552 Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id); |
554 } | 553 } |
555 | 554 |
556 | 555 |
557 void MacroAssembler::TailCallStub(CodeStub* stub) { | 556 void MacroAssembler::TailCallStub(CodeStub* stub) { |
558 ASSERT(allow_stub_calls_ || | |
559 stub->CompilingCallsToThisStubIsGCSafe(isolate())); | |
560 Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET); | 557 Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET); |
561 } | 558 } |
562 | 559 |
563 | 560 |
564 void MacroAssembler::StubReturn(int argc) { | 561 void MacroAssembler::StubReturn(int argc) { |
565 ASSERT(argc >= 1 && generating_stub()); | 562 ASSERT(argc >= 1 && generating_stub()); |
566 ret((argc - 1) * kPointerSize); | 563 ret((argc - 1) * kPointerSize); |
567 } | 564 } |
568 | 565 |
569 | 566 |
570 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { | 567 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { |
571 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false; | 568 return has_frame_ || !stub->SometimesSetsUpAFrame(); |
572 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate()); | |
573 } | 569 } |
574 | 570 |
575 | 571 |
576 void MacroAssembler::IllegalOperation(int num_arguments) { | 572 void MacroAssembler::IllegalOperation(int num_arguments) { |
577 if (num_arguments > 0) { | 573 if (num_arguments > 0) { |
578 addq(rsp, Immediate(num_arguments * kPointerSize)); | 574 addq(rsp, Immediate(num_arguments * kPointerSize)); |
579 } | 575 } |
580 LoadRoot(rax, Heap::kUndefinedValueRootIndex); | 576 LoadRoot(rax, Heap::kUndefinedValueRootIndex); |
581 } | 577 } |
582 | 578 |
(...skipping 459 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1042 } | 1038 } |
1043 LoadSmiConstant(kScratchRegister, source); | 1039 LoadSmiConstant(kScratchRegister, source); |
1044 return kScratchRegister; | 1040 return kScratchRegister; |
1045 } | 1041 } |
1046 | 1042 |
1047 | 1043 |
1048 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { | 1044 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { |
1049 if (emit_debug_code()) { | 1045 if (emit_debug_code()) { |
1050 movq(dst, Smi::FromInt(kSmiConstantRegisterValue), RelocInfo::NONE64); | 1046 movq(dst, Smi::FromInt(kSmiConstantRegisterValue), RelocInfo::NONE64); |
1051 cmpq(dst, kSmiConstantRegister); | 1047 cmpq(dst, kSmiConstantRegister); |
1052 if (allow_stub_calls()) { | 1048 Assert(equal, kUninitializedKSmiConstantRegister); |
1053 Assert(equal, kUninitializedKSmiConstantRegister); | |
1054 } else { | |
1055 Label ok; | |
1056 j(equal, &ok, Label::kNear); | |
1057 int3(); | |
1058 bind(&ok); | |
1059 } | |
1060 } | 1049 } |
1061 int value = source->value(); | 1050 int value = source->value(); |
1062 if (value == 0) { | 1051 if (value == 0) { |
1063 xorl(dst, dst); | 1052 xorl(dst, dst); |
1064 return; | 1053 return; |
1065 } | 1054 } |
1066 bool negative = value < 0; | 1055 bool negative = value < 0; |
1067 unsigned int uvalue = negative ? -value : value; | 1056 unsigned int uvalue = negative ? -value : value; |
1068 | 1057 |
1069 switch (uvalue) { | 1058 switch (uvalue) { |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1110 } | 1099 } |
1111 shl(dst, Immediate(kSmiShift)); | 1100 shl(dst, Immediate(kSmiShift)); |
1112 } | 1101 } |
1113 | 1102 |
1114 | 1103 |
1115 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { | 1104 void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) { |
1116 if (emit_debug_code()) { | 1105 if (emit_debug_code()) { |
1117 testb(dst, Immediate(0x01)); | 1106 testb(dst, Immediate(0x01)); |
1118 Label ok; | 1107 Label ok; |
1119 j(zero, &ok, Label::kNear); | 1108 j(zero, &ok, Label::kNear); |
1120 if (allow_stub_calls()) { | 1109 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation); |
1121 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation); | |
1122 } else { | |
1123 int3(); | |
1124 } | |
1125 bind(&ok); | 1110 bind(&ok); |
1126 } | 1111 } |
1127 ASSERT(kSmiShift % kBitsPerByte == 0); | 1112 ASSERT(kSmiShift % kBitsPerByte == 0); |
1128 movl(Operand(dst, kSmiShift / kBitsPerByte), src); | 1113 movl(Operand(dst, kSmiShift / kBitsPerByte), src); |
1129 } | 1114 } |
1130 | 1115 |
1131 | 1116 |
1132 void MacroAssembler::Integer64PlusConstantToSmi(Register dst, | 1117 void MacroAssembler::Integer64PlusConstantToSmi(Register dst, |
1133 Register src, | 1118 Register src, |
1134 int constant) { | 1119 int constant) { |
(...skipping 1040 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2175 Register src2, | 2160 Register src2, |
2176 Label* on_not_smis, | 2161 Label* on_not_smis, |
2177 Label::Distance near_jump) { | 2162 Label::Distance near_jump) { |
2178 ASSERT(!dst.is(kScratchRegister)); | 2163 ASSERT(!dst.is(kScratchRegister)); |
2179 ASSERT(!src1.is(kScratchRegister)); | 2164 ASSERT(!src1.is(kScratchRegister)); |
2180 ASSERT(!src2.is(kScratchRegister)); | 2165 ASSERT(!src2.is(kScratchRegister)); |
2181 ASSERT(!dst.is(src1)); | 2166 ASSERT(!dst.is(src1)); |
2182 ASSERT(!dst.is(src2)); | 2167 ASSERT(!dst.is(src2)); |
2183 // Both operands must not be smis. | 2168 // Both operands must not be smis. |
2184 #ifdef DEBUG | 2169 #ifdef DEBUG |
2185 if (allow_stub_calls()) { // Check contains a stub call. | 2170 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2)); |
2186 Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2)); | 2171 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi); |
2187 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi); | |
2188 } | |
2189 #endif | 2172 #endif |
2190 STATIC_ASSERT(kSmiTag == 0); | 2173 STATIC_ASSERT(kSmiTag == 0); |
2191 ASSERT_EQ(0, Smi::FromInt(0)); | 2174 ASSERT_EQ(0, Smi::FromInt(0)); |
2192 movl(kScratchRegister, Immediate(kSmiTagMask)); | 2175 movl(kScratchRegister, Immediate(kSmiTagMask)); |
2193 and_(kScratchRegister, src1); | 2176 and_(kScratchRegister, src1); |
2194 testl(kScratchRegister, src2); | 2177 testl(kScratchRegister, src2); |
2195 // If non-zero then both are smis. | 2178 // If non-zero then both are smis. |
2196 j(not_zero, on_not_smis, near_jump); | 2179 j(not_zero, on_not_smis, near_jump); |
2197 | 2180 |
2198 // Exactly one operand is a smi. | 2181 // Exactly one operand is a smi. |
(...skipping 2856 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5055 j(equal, found); | 5038 j(equal, found); |
5056 movq(current, FieldOperand(current, Map::kPrototypeOffset)); | 5039 movq(current, FieldOperand(current, Map::kPrototypeOffset)); |
5057 CompareRoot(current, Heap::kNullValueRootIndex); | 5040 CompareRoot(current, Heap::kNullValueRootIndex); |
5058 j(not_equal, &loop_again); | 5041 j(not_equal, &loop_again); |
5059 } | 5042 } |
5060 | 5043 |
5061 | 5044 |
5062 } } // namespace v8::internal | 5045 } } // namespace v8::internal |
5063 | 5046 |
5064 #endif // V8_TARGET_ARCH_X64 | 5047 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |