| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 156 } | 156 } |
| 157 // Size of movq(destination, src); | 157 // Size of movq(destination, src); |
| 158 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength; | 158 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength; |
| 159 } | 159 } |
| 160 | 160 |
| 161 | 161 |
| 162 void MacroAssembler::PushAddress(ExternalReference source) { | 162 void MacroAssembler::PushAddress(ExternalReference source) { |
| 163 int64_t address = reinterpret_cast<int64_t>(source.address()); | 163 int64_t address = reinterpret_cast<int64_t>(source.address()); |
| 164 if (is_int32(address) && !Serializer::enabled()) { | 164 if (is_int32(address) && !Serializer::enabled()) { |
| 165 if (emit_debug_code()) { | 165 if (emit_debug_code()) { |
| 166 movq(kScratchRegister, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); | 166 movq(kScratchRegister, kZapValue, RelocInfo::NONE64); |
| 167 } | 167 } |
| 168 push(Immediate(static_cast<int32_t>(address))); | 168 push(Immediate(static_cast<int32_t>(address))); |
| 169 return; | 169 return; |
| 170 } | 170 } |
| 171 LoadAddress(kScratchRegister, source); | 171 LoadAddress(kScratchRegister, source); |
| 172 push(kScratchRegister); | 172 push(kScratchRegister); |
| 173 } | 173 } |
| 174 | 174 |
| 175 | 175 |
| 176 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { | 176 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) { |
| (...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 281 movq(scratch, ExternalReference::new_space_mask(isolate())); | 281 movq(scratch, ExternalReference::new_space_mask(isolate())); |
| 282 and_(scratch, object); | 282 and_(scratch, object); |
| 283 } | 283 } |
| 284 movq(kScratchRegister, ExternalReference::new_space_start(isolate())); | 284 movq(kScratchRegister, ExternalReference::new_space_start(isolate())); |
| 285 cmpq(scratch, kScratchRegister); | 285 cmpq(scratch, kScratchRegister); |
| 286 j(cc, branch, distance); | 286 j(cc, branch, distance); |
| 287 } else { | 287 } else { |
| 288 ASSERT(is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask()))); | 288 ASSERT(is_int32(static_cast<int64_t>(isolate()->heap()->NewSpaceMask()))); |
| 289 intptr_t new_space_start = | 289 intptr_t new_space_start = |
| 290 reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart()); | 290 reinterpret_cast<intptr_t>(isolate()->heap()->NewSpaceStart()); |
| 291 movq(kScratchRegister, -new_space_start, RelocInfo::NONE64); | 291 movq(kScratchRegister, reinterpret_cast<Address>(-new_space_start), |
| 292 RelocInfo::NONE64); |
| 292 if (scratch.is(object)) { | 293 if (scratch.is(object)) { |
| 293 addq(scratch, kScratchRegister); | 294 addq(scratch, kScratchRegister); |
| 294 } else { | 295 } else { |
| 295 lea(scratch, Operand(object, kScratchRegister, times_1, 0)); | 296 lea(scratch, Operand(object, kScratchRegister, times_1, 0)); |
| 296 } | 297 } |
| 297 and_(scratch, | 298 and_(scratch, |
| 298 Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask()))); | 299 Immediate(static_cast<int32_t>(isolate()->heap()->NewSpaceMask()))); |
| 299 j(cc, branch, distance); | 300 j(cc, branch, distance); |
| 300 } | 301 } |
| 301 } | 302 } |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 337 } | 338 } |
| 338 | 339 |
| 339 RecordWrite( | 340 RecordWrite( |
| 340 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); | 341 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); |
| 341 | 342 |
| 342 bind(&done); | 343 bind(&done); |
| 343 | 344 |
| 344 // Clobber clobbered input registers when running with the debug-code flag | 345 // Clobber clobbered input registers when running with the debug-code flag |
| 345 // turned on to provoke errors. | 346 // turned on to provoke errors. |
| 346 if (emit_debug_code()) { | 347 if (emit_debug_code()) { |
| 347 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); | 348 movq(value, kZapValue, RelocInfo::NONE64); |
| 348 movq(dst, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); | 349 movq(dst, kZapValue, RelocInfo::NONE64); |
| 349 } | 350 } |
| 350 } | 351 } |
| 351 | 352 |
| 352 | 353 |
| 353 void MacroAssembler::RecordWriteArray(Register object, | 354 void MacroAssembler::RecordWriteArray(Register object, |
| 354 Register value, | 355 Register value, |
| 355 Register index, | 356 Register index, |
| 356 SaveFPRegsMode save_fp, | 357 SaveFPRegsMode save_fp, |
| 357 RememberedSetAction remembered_set_action, | 358 RememberedSetAction remembered_set_action, |
| 358 SmiCheck smi_check) { | 359 SmiCheck smi_check) { |
| (...skipping 12 matching lines...) Expand all Loading... |
| 371 FixedArray::kHeaderSize - kHeapObjectTag)); | 372 FixedArray::kHeaderSize - kHeapObjectTag)); |
| 372 | 373 |
| 373 RecordWrite( | 374 RecordWrite( |
| 374 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); | 375 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); |
| 375 | 376 |
| 376 bind(&done); | 377 bind(&done); |
| 377 | 378 |
| 378 // Clobber clobbered input registers when running with the debug-code flag | 379 // Clobber clobbered input registers when running with the debug-code flag |
| 379 // turned on to provoke errors. | 380 // turned on to provoke errors. |
| 380 if (emit_debug_code()) { | 381 if (emit_debug_code()) { |
| 381 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); | 382 movq(value, kZapValue, RelocInfo::NONE64); |
| 382 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); | 383 movq(index, kZapValue, RelocInfo::NONE64); |
| 383 } | 384 } |
| 384 } | 385 } |
| 385 | 386 |
| 386 | 387 |
| 387 void MacroAssembler::RecordWrite(Register object, | 388 void MacroAssembler::RecordWrite(Register object, |
| 388 Register address, | 389 Register address, |
| 389 Register value, | 390 Register value, |
| 390 SaveFPRegsMode fp_mode, | 391 SaveFPRegsMode fp_mode, |
| 391 RememberedSetAction remembered_set_action, | 392 RememberedSetAction remembered_set_action, |
| 392 SmiCheck smi_check) { | 393 SmiCheck smi_check) { |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 437 Label::kNear); | 438 Label::kNear); |
| 438 | 439 |
| 439 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); | 440 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); |
| 440 CallStub(&stub); | 441 CallStub(&stub); |
| 441 | 442 |
| 442 bind(&done); | 443 bind(&done); |
| 443 | 444 |
| 444 // Clobber clobbered registers when running with the debug-code flag | 445 // Clobber clobbered registers when running with the debug-code flag |
| 445 // turned on to provoke errors. | 446 // turned on to provoke errors. |
| 446 if (emit_debug_code()) { | 447 if (emit_debug_code()) { |
| 447 movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); | 448 movq(address, kZapValue, RelocInfo::NONE64); |
| 448 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE64); | 449 movq(value, kZapValue, RelocInfo::NONE64); |
| 449 } | 450 } |
| 450 } | 451 } |
| 451 | 452 |
| 452 | 453 |
| 453 void MacroAssembler::Assert(Condition cc, BailoutReason reason) { | 454 void MacroAssembler::Assert(Condition cc, BailoutReason reason) { |
| 454 if (emit_debug_code()) Check(cc, reason); | 455 if (emit_debug_code()) Check(cc, reason); |
| 455 } | 456 } |
| 456 | 457 |
| 457 | 458 |
| 458 void MacroAssembler::AssertFastElements(Register elements) { | 459 void MacroAssembler::AssertFastElements(Register elements) { |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 526 RecordComment(msg); | 527 RecordComment(msg); |
| 527 } | 528 } |
| 528 | 529 |
| 529 if (FLAG_trap_on_abort) { | 530 if (FLAG_trap_on_abort) { |
| 530 int3(); | 531 int3(); |
| 531 return; | 532 return; |
| 532 } | 533 } |
| 533 #endif | 534 #endif |
| 534 | 535 |
| 535 push(rax); | 536 push(rax); |
| 536 movq(kScratchRegister, p0, RelocInfo::NONE64); | 537 movq(kScratchRegister, reinterpret_cast<Smi*>(p0), RelocInfo::NONE64); |
| 537 push(kScratchRegister); | 538 push(kScratchRegister); |
| 538 movq(kScratchRegister, | 539 movq(kScratchRegister, Smi::FromInt(static_cast<int>(p1 - p0)), |
| 539 reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))), | |
| 540 RelocInfo::NONE64); | 540 RelocInfo::NONE64); |
| 541 push(kScratchRegister); | 541 push(kScratchRegister); |
| 542 | 542 |
| 543 if (!has_frame_) { | 543 if (!has_frame_) { |
| 544 // We don't actually want to generate a pile of code for this, so just | 544 // We don't actually want to generate a pile of code for this, so just |
| 545 // claim there is a stack frame, without generating one. | 545 // claim there is a stack frame, without generating one. |
| 546 FrameScope scope(this, StackFrame::NONE); | 546 FrameScope scope(this, StackFrame::NONE); |
| 547 CallRuntime(Runtime::kAbort, 2); | 547 CallRuntime(Runtime::kAbort, 2); |
| 548 } else { | 548 } else { |
| 549 CallRuntime(Runtime::kAbort, 2); | 549 CallRuntime(Runtime::kAbort, 2); |
| (...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 972 | 972 |
| 973 | 973 |
| 974 void MacroAssembler::Set(Register dst, int64_t x) { | 974 void MacroAssembler::Set(Register dst, int64_t x) { |
| 975 if (x == 0) { | 975 if (x == 0) { |
| 976 xorl(dst, dst); | 976 xorl(dst, dst); |
| 977 } else if (is_uint32(x)) { | 977 } else if (is_uint32(x)) { |
| 978 movl(dst, Immediate(static_cast<uint32_t>(x))); | 978 movl(dst, Immediate(static_cast<uint32_t>(x))); |
| 979 } else if (is_int32(x)) { | 979 } else if (is_int32(x)) { |
| 980 movq(dst, Immediate(static_cast<int32_t>(x))); | 980 movq(dst, Immediate(static_cast<int32_t>(x))); |
| 981 } else { | 981 } else { |
| 982 movq(dst, x, RelocInfo::NONE64); | 982 movq(dst, x); |
| 983 } | 983 } |
| 984 } | 984 } |
| 985 | 985 |
| 986 | 986 |
| 987 void MacroAssembler::Set(const Operand& dst, int64_t x) { | 987 void MacroAssembler::Set(const Operand& dst, int64_t x) { |
| 988 if (is_int32(x)) { | 988 if (is_int32(x)) { |
| 989 movq(dst, Immediate(static_cast<int32_t>(x))); | 989 movq(dst, Immediate(static_cast<int32_t>(x))); |
| 990 } else { | 990 } else { |
| 991 Set(kScratchRegister, x); | 991 Set(kScratchRegister, x); |
| 992 movq(dst, kScratchRegister); | 992 movq(dst, kScratchRegister); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1037 if (value == 1) { | 1037 if (value == 1) { |
| 1038 return kSmiConstantRegister; | 1038 return kSmiConstantRegister; |
| 1039 } | 1039 } |
| 1040 LoadSmiConstant(kScratchRegister, source); | 1040 LoadSmiConstant(kScratchRegister, source); |
| 1041 return kScratchRegister; | 1041 return kScratchRegister; |
| 1042 } | 1042 } |
| 1043 | 1043 |
| 1044 | 1044 |
| 1045 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { | 1045 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { |
| 1046 if (emit_debug_code()) { | 1046 if (emit_debug_code()) { |
| 1047 movq(dst, | 1047 movq(dst, Smi::FromInt(kSmiConstantRegisterValue), RelocInfo::NONE64); |
| 1048 reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)), | |
| 1049 RelocInfo::NONE64); | |
| 1050 cmpq(dst, kSmiConstantRegister); | 1048 cmpq(dst, kSmiConstantRegister); |
| 1051 if (allow_stub_calls()) { | 1049 if (allow_stub_calls()) { |
| 1052 Assert(equal, kUninitializedKSmiConstantRegister); | 1050 Assert(equal, kUninitializedKSmiConstantRegister); |
| 1053 } else { | 1051 } else { |
| 1054 Label ok; | 1052 Label ok; |
| 1055 j(equal, &ok, Label::kNear); | 1053 j(equal, &ok, Label::kNear); |
| 1056 int3(); | 1054 int3(); |
| 1057 bind(&ok); | 1055 bind(&ok); |
| 1058 } | 1056 } |
| 1059 } | 1057 } |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1086 case 2: | 1084 case 2: |
| 1087 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0)); | 1085 lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0)); |
| 1088 break; | 1086 break; |
| 1089 case 1: | 1087 case 1: |
| 1090 movq(dst, kSmiConstantRegister); | 1088 movq(dst, kSmiConstantRegister); |
| 1091 break; | 1089 break; |
| 1092 case 0: | 1090 case 0: |
| 1093 UNREACHABLE(); | 1091 UNREACHABLE(); |
| 1094 return; | 1092 return; |
| 1095 default: | 1093 default: |
| 1096 movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE64); | 1094 movq(dst, source, RelocInfo::NONE64); |
| 1097 return; | 1095 return; |
| 1098 } | 1096 } |
| 1099 if (negative) { | 1097 if (negative) { |
| 1100 neg(dst); | 1098 neg(dst); |
| 1101 } | 1099 } |
| 1102 } | 1100 } |
| 1103 | 1101 |
| 1104 | 1102 |
| 1105 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { | 1103 void MacroAssembler::Integer32ToSmi(Register dst, Register src) { |
| 1106 STATIC_ASSERT(kSmiTag == 0); | 1104 STATIC_ASSERT(kSmiTag == 0); |
| (...skipping 2005 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3112 } | 3110 } |
| 3113 | 3111 |
| 3114 bind(&done); | 3112 bind(&done); |
| 3115 } | 3113 } |
| 3116 | 3114 |
| 3117 | 3115 |
| 3118 void MacroAssembler::TruncateDoubleToI(Register result_reg, | 3116 void MacroAssembler::TruncateDoubleToI(Register result_reg, |
| 3119 XMMRegister input_reg) { | 3117 XMMRegister input_reg) { |
| 3120 Label done; | 3118 Label done; |
| 3121 cvttsd2siq(result_reg, input_reg); | 3119 cvttsd2siq(result_reg, input_reg); |
| 3122 movq(kScratchRegister, | 3120 movq(kScratchRegister, V8_INT64_C(0x8000000000000000)); |
| 3123 V8_INT64_C(0x8000000000000000), | |
| 3124 RelocInfo::NONE64); | |
| 3125 cmpq(result_reg, kScratchRegister); | 3121 cmpq(result_reg, kScratchRegister); |
| 3126 j(not_equal, &done, Label::kNear); | 3122 j(not_equal, &done, Label::kNear); |
| 3127 | 3123 |
| 3128 subq(rsp, Immediate(kDoubleSize)); | 3124 subq(rsp, Immediate(kDoubleSize)); |
| 3129 movsd(MemOperand(rsp, 0), input_reg); | 3125 movsd(MemOperand(rsp, 0), input_reg); |
| 3130 SlowTruncateToI(result_reg, rsp, 0); | 3126 SlowTruncateToI(result_reg, rsp, 0); |
| 3131 addq(rsp, Immediate(kDoubleSize)); | 3127 addq(rsp, Immediate(kDoubleSize)); |
| 3132 | 3128 |
| 3133 bind(&done); | 3129 bind(&done); |
| 3134 } | 3130 } |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3264 if (emit_debug_code()) { | 3260 if (emit_debug_code()) { |
| 3265 Condition is_smi = CheckSmi(object); | 3261 Condition is_smi = CheckSmi(object); |
| 3266 Check(is_smi, kOperandIsNotASmi); | 3262 Check(is_smi, kOperandIsNotASmi); |
| 3267 } | 3263 } |
| 3268 } | 3264 } |
| 3269 | 3265 |
| 3270 | 3266 |
| 3271 void MacroAssembler::AssertZeroExtended(Register int32_register) { | 3267 void MacroAssembler::AssertZeroExtended(Register int32_register) { |
| 3272 if (emit_debug_code()) { | 3268 if (emit_debug_code()) { |
| 3273 ASSERT(!int32_register.is(kScratchRegister)); | 3269 ASSERT(!int32_register.is(kScratchRegister)); |
| 3274 movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64); | 3270 movq(kScratchRegister, V8_INT64_C(0x0000000100000000)); |
| 3275 cmpq(kScratchRegister, int32_register); | 3271 cmpq(kScratchRegister, int32_register); |
| 3276 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); | 3272 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); |
| 3277 } | 3273 } |
| 3278 } | 3274 } |
| 3279 | 3275 |
| 3280 | 3276 |
| 3281 void MacroAssembler::AssertString(Register object) { | 3277 void MacroAssembler::AssertString(Register object) { |
| 3282 if (emit_debug_code()) { | 3278 if (emit_debug_code()) { |
| 3283 testb(object, Immediate(kSmiTagMask)); | 3279 testb(object, Immediate(kSmiTagMask)); |
| 3284 Check(not_equal, kOperandIsASmiAndNotAString); | 3280 Check(not_equal, kOperandIsASmiAndNotAString); |
| (...skipping 1679 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4964 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); | 4960 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); |
| 4965 CallCFunction( | 4961 CallCFunction( |
| 4966 ExternalReference::record_object_allocation_function(isolate), 3); | 4962 ExternalReference::record_object_allocation_function(isolate), 3); |
| 4967 PopSafepointRegisters(); | 4963 PopSafepointRegisters(); |
| 4968 } | 4964 } |
| 4969 | 4965 |
| 4970 | 4966 |
| 4971 } } // namespace v8::internal | 4967 } } // namespace v8::internal |
| 4972 | 4968 |
| 4973 #endif // V8_TARGET_ARCH_X64 | 4969 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |