| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/base/division-by-constant.h" | 8 #include "src/base/division-by-constant.h" |
| 9 #include "src/base/utils/random-number-generator.h" | 9 #include "src/base/utils/random-number-generator.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| (...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 305 | 305 |
| 306 // Clobber clobbered input registers when running with the debug-code flag | 306 // Clobber clobbered input registers when running with the debug-code flag |
| 307 // turned on to provoke errors. | 307 // turned on to provoke errors. |
| 308 if (emit_debug_code()) { | 308 if (emit_debug_code()) { |
| 309 Move(value, kZapValue, Assembler::RelocInfoNone()); | 309 Move(value, kZapValue, Assembler::RelocInfoNone()); |
| 310 Move(dst, kZapValue, Assembler::RelocInfoNone()); | 310 Move(dst, kZapValue, Assembler::RelocInfoNone()); |
| 311 } | 311 } |
| 312 } | 312 } |
| 313 | 313 |
| 314 | 314 |
| 315 void MacroAssembler::RecordWriteArray( | |
| 316 Register object, | |
| 317 Register value, | |
| 318 Register index, | |
| 319 SaveFPRegsMode save_fp, | |
| 320 RememberedSetAction remembered_set_action, | |
| 321 SmiCheck smi_check, | |
| 322 PointersToHereCheck pointers_to_here_check_for_value) { | |
| 323 // First, check if a write barrier is even needed. The tests below | |
| 324 // catch stores of Smis. | |
| 325 Label done; | |
| 326 | |
| 327 // Skip barrier if writing a smi. | |
| 328 if (smi_check == INLINE_SMI_CHECK) { | |
| 329 JumpIfSmi(value, &done); | |
| 330 } | |
| 331 | |
| 332 // Array access: calculate the destination address. Index is not a smi. | |
| 333 Register dst = index; | |
| 334 leap(dst, Operand(object, index, times_pointer_size, | |
| 335 FixedArray::kHeaderSize - kHeapObjectTag)); | |
| 336 | |
| 337 RecordWrite(object, dst, value, save_fp, remembered_set_action, | |
| 338 OMIT_SMI_CHECK, pointers_to_here_check_for_value); | |
| 339 | |
| 340 bind(&done); | |
| 341 | |
| 342 // Clobber clobbered input registers when running with the debug-code flag | |
| 343 // turned on to provoke errors. | |
| 344 if (emit_debug_code()) { | |
| 345 Move(value, kZapValue, Assembler::RelocInfoNone()); | |
| 346 Move(index, kZapValue, Assembler::RelocInfoNone()); | |
| 347 } | |
| 348 } | |
| 349 | |
| 350 | |
| 351 void MacroAssembler::RecordWriteForMap(Register object, | 315 void MacroAssembler::RecordWriteForMap(Register object, |
| 352 Register map, | 316 Register map, |
| 353 Register dst, | 317 Register dst, |
| 354 SaveFPRegsMode fp_mode) { | 318 SaveFPRegsMode fp_mode) { |
| 355 DCHECK(!object.is(kScratchRegister)); | 319 DCHECK(!object.is(kScratchRegister)); |
| 356 DCHECK(!object.is(map)); | 320 DCHECK(!object.is(map)); |
| 357 DCHECK(!object.is(dst)); | 321 DCHECK(!object.is(dst)); |
| 358 DCHECK(!map.is(dst)); | 322 DCHECK(!map.is(dst)); |
| 359 AssertNotSmi(object); | 323 AssertNotSmi(object); |
| 360 | 324 |
| (...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 571 Pop(js_function); | 535 Pop(js_function); |
| 572 | 536 |
| 573 bind(&done); | 537 bind(&done); |
| 574 } | 538 } |
| 575 | 539 |
| 576 void MacroAssembler::Assert(Condition cc, BailoutReason reason) { | 540 void MacroAssembler::Assert(Condition cc, BailoutReason reason) { |
| 577 if (emit_debug_code()) Check(cc, reason); | 541 if (emit_debug_code()) Check(cc, reason); |
| 578 } | 542 } |
| 579 | 543 |
| 580 | 544 |
| 581 void MacroAssembler::AssertFastElements(Register elements) { | |
| 582 if (emit_debug_code()) { | |
| 583 Label ok; | |
| 584 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), | |
| 585 Heap::kFixedArrayMapRootIndex); | |
| 586 j(equal, &ok, Label::kNear); | |
| 587 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), | |
| 588 Heap::kFixedDoubleArrayMapRootIndex); | |
| 589 j(equal, &ok, Label::kNear); | |
| 590 CompareRoot(FieldOperand(elements, HeapObject::kMapOffset), | |
| 591 Heap::kFixedCOWArrayMapRootIndex); | |
| 592 j(equal, &ok, Label::kNear); | |
| 593 Abort(kJSObjectWithFastElementsMapHasSlowElements); | |
| 594 bind(&ok); | |
| 595 } | |
| 596 } | |
| 597 | |
| 598 | |
| 599 void MacroAssembler::Check(Condition cc, BailoutReason reason) { | 545 void MacroAssembler::Check(Condition cc, BailoutReason reason) { |
| 600 Label L; | 546 Label L; |
| 601 j(cc, &L, Label::kNear); | 547 j(cc, &L, Label::kNear); |
| 602 Abort(reason); | 548 Abort(reason); |
| 603 // Control will not return here. | 549 // Control will not return here. |
| 604 bind(&L); | 550 bind(&L); |
| 605 } | 551 } |
| 606 | 552 |
| 607 | 553 |
| 608 void MacroAssembler::CheckStackAlignment() { | 554 void MacroAssembler::CheckStackAlignment() { |
| 609 int frame_alignment = base::OS::ActivationFrameAlignment(); | 555 int frame_alignment = base::OS::ActivationFrameAlignment(); |
| 610 int frame_alignment_mask = frame_alignment - 1; | 556 int frame_alignment_mask = frame_alignment - 1; |
| 611 if (frame_alignment > kPointerSize) { | 557 if (frame_alignment > kPointerSize) { |
| 612 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment)); | 558 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment)); |
| 613 Label alignment_as_expected; | 559 Label alignment_as_expected; |
| 614 testp(rsp, Immediate(frame_alignment_mask)); | 560 testp(rsp, Immediate(frame_alignment_mask)); |
| 615 j(zero, &alignment_as_expected, Label::kNear); | 561 j(zero, &alignment_as_expected, Label::kNear); |
| 616 // Abort if stack is not aligned. | 562 // Abort if stack is not aligned. |
| 617 int3(); | 563 int3(); |
| 618 bind(&alignment_as_expected); | 564 bind(&alignment_as_expected); |
| 619 } | 565 } |
| 620 } | 566 } |
| 621 | 567 |
| 622 | 568 |
| 623 void MacroAssembler::NegativeZeroTest(Register result, | |
| 624 Register op, | |
| 625 Label* then_label) { | |
| 626 Label ok; | |
| 627 testl(result, result); | |
| 628 j(not_zero, &ok, Label::kNear); | |
| 629 testl(op, op); | |
| 630 j(sign, then_label); | |
| 631 bind(&ok); | |
| 632 } | |
| 633 | |
| 634 | |
| 635 void MacroAssembler::Abort(BailoutReason reason) { | 569 void MacroAssembler::Abort(BailoutReason reason) { |
| 636 #ifdef DEBUG | 570 #ifdef DEBUG |
| 637 const char* msg = GetBailoutReason(reason); | 571 const char* msg = GetBailoutReason(reason); |
| 638 if (msg != NULL) { | 572 if (msg != NULL) { |
| 639 RecordComment("Abort message: "); | 573 RecordComment("Abort message: "); |
| 640 RecordComment(msg); | 574 RecordComment(msg); |
| 641 } | 575 } |
| 642 | 576 |
| 643 if (FLAG_trap_on_abort) { | 577 if (FLAG_trap_on_abort) { |
| 644 int3(); | 578 int3(); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 668 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs | 602 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs |
| 669 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); | 603 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id); |
| 670 } | 604 } |
| 671 | 605 |
| 672 | 606 |
| 673 void MacroAssembler::TailCallStub(CodeStub* stub) { | 607 void MacroAssembler::TailCallStub(CodeStub* stub) { |
| 674 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); | 608 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); |
| 675 } | 609 } |
| 676 | 610 |
| 677 | 611 |
| 678 void MacroAssembler::StubReturn(int argc) { | |
| 679 DCHECK(argc >= 1 && generating_stub()); | |
| 680 ret((argc - 1) * kPointerSize); | |
| 681 } | |
| 682 | |
| 683 | |
| 684 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { | 612 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { |
| 685 return has_frame_ || !stub->SometimesSetsUpAFrame(); | 613 return has_frame_ || !stub->SometimesSetsUpAFrame(); |
| 686 } | 614 } |
| 687 | 615 |
| 688 void MacroAssembler::CallRuntime(const Runtime::Function* f, | 616 void MacroAssembler::CallRuntime(const Runtime::Function* f, |
| 689 int num_arguments, | 617 int num_arguments, |
| 690 SaveFPRegsMode save_doubles) { | 618 SaveFPRegsMode save_doubles) { |
| 691 // If the expected number of arguments of the runtime function is | 619 // If the expected number of arguments of the runtime function is |
| 692 // constant, we check that the actual number of arguments match the | 620 // constant, we check that the actual number of arguments match the |
| 693 // expectation. | 621 // expectation. |
| (...skipping 649 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1343 | 1271 |
| 1344 | 1272 |
| 1345 void MacroAssembler::Cmp(const Operand& dst, Smi* src) { | 1273 void MacroAssembler::Cmp(const Operand& dst, Smi* src) { |
| 1346 // The Operand cannot use the smi register. | 1274 // The Operand cannot use the smi register. |
| 1347 Register smi_reg = GetSmiConstant(src); | 1275 Register smi_reg = GetSmiConstant(src); |
| 1348 DCHECK(!dst.AddressUsesRegister(smi_reg)); | 1276 DCHECK(!dst.AddressUsesRegister(smi_reg)); |
| 1349 cmpp(dst, smi_reg); | 1277 cmpp(dst, smi_reg); |
| 1350 } | 1278 } |
| 1351 | 1279 |
| 1352 | 1280 |
| 1353 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) { | |
| 1354 if (SmiValuesAre32Bits()) { | |
| 1355 cmpl(Operand(dst, kSmiShift / kBitsPerByte), src); | |
| 1356 } else { | |
| 1357 DCHECK(SmiValuesAre31Bits()); | |
| 1358 SmiToInteger32(kScratchRegister, dst); | |
| 1359 cmpl(kScratchRegister, src); | |
| 1360 } | |
| 1361 } | |
| 1362 | |
| 1363 | |
| 1364 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, | 1281 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst, |
| 1365 Register src, | 1282 Register src, |
| 1366 int power) { | 1283 int power) { |
| 1367 DCHECK(power >= 0); | 1284 DCHECK(power >= 0); |
| 1368 DCHECK(power < 64); | 1285 DCHECK(power < 64); |
| 1369 if (power == 0) { | 1286 if (power == 0) { |
| 1370 SmiToInteger64(dst, src); | 1287 SmiToInteger64(dst, src); |
| 1371 return; | 1288 return; |
| 1372 } | 1289 } |
| 1373 if (!dst.is(src)) { | 1290 if (!dst.is(src)) { |
| 1374 movp(dst, src); | 1291 movp(dst, src); |
| 1375 } | 1292 } |
| 1376 if (power < kSmiShift) { | 1293 if (power < kSmiShift) { |
| 1377 sarp(dst, Immediate(kSmiShift - power)); | 1294 sarp(dst, Immediate(kSmiShift - power)); |
| 1378 } else if (power > kSmiShift) { | 1295 } else if (power > kSmiShift) { |
| 1379 shlp(dst, Immediate(power - kSmiShift)); | 1296 shlp(dst, Immediate(power - kSmiShift)); |
| 1380 } | 1297 } |
| 1381 } | 1298 } |
| 1382 | 1299 |
| 1383 | 1300 |
| 1384 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst, | |
| 1385 Register src, | |
| 1386 int power) { | |
| 1387 DCHECK((0 <= power) && (power < 32)); | |
| 1388 if (dst.is(src)) { | |
| 1389 shrp(dst, Immediate(power + kSmiShift)); | |
| 1390 } else { | |
| 1391 UNIMPLEMENTED(); // Not used. | |
| 1392 } | |
| 1393 } | |
| 1394 | |
| 1395 | |
| 1396 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2, | |
| 1397 Label* on_not_smis, | |
| 1398 Label::Distance near_jump) { | |
| 1399 if (dst.is(src1) || dst.is(src2)) { | |
| 1400 DCHECK(!src1.is(kScratchRegister)); | |
| 1401 DCHECK(!src2.is(kScratchRegister)); | |
| 1402 movp(kScratchRegister, src1); | |
| 1403 orp(kScratchRegister, src2); | |
| 1404 JumpIfNotSmi(kScratchRegister, on_not_smis, near_jump); | |
| 1405 movp(dst, kScratchRegister); | |
| 1406 } else { | |
| 1407 movp(dst, src1); | |
| 1408 orp(dst, src2); | |
| 1409 JumpIfNotSmi(dst, on_not_smis, near_jump); | |
| 1410 } | |
| 1411 } | |
| 1412 | |
| 1413 | |
| 1414 Condition MacroAssembler::CheckSmi(Register src) { | 1301 Condition MacroAssembler::CheckSmi(Register src) { |
| 1415 STATIC_ASSERT(kSmiTag == 0); | 1302 STATIC_ASSERT(kSmiTag == 0); |
| 1416 testb(src, Immediate(kSmiTagMask)); | 1303 testb(src, Immediate(kSmiTagMask)); |
| 1417 return zero; | 1304 return zero; |
| 1418 } | 1305 } |
| 1419 | 1306 |
| 1420 | 1307 |
| 1421 Condition MacroAssembler::CheckSmi(const Operand& src) { | 1308 Condition MacroAssembler::CheckSmi(const Operand& src) { |
| 1422 STATIC_ASSERT(kSmiTag == 0); | 1309 STATIC_ASSERT(kSmiTag == 0); |
| 1423 testb(src, Immediate(kSmiTagMask)); | 1310 testb(src, Immediate(kSmiTagMask)); |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1504 testl(src, src); | 1391 testl(src, src); |
| 1505 return positive; | 1392 return positive; |
| 1506 } else { | 1393 } else { |
| 1507 DCHECK(SmiValuesAre31Bits()); | 1394 DCHECK(SmiValuesAre31Bits()); |
| 1508 testl(src, Immediate(0xc0000000)); | 1395 testl(src, Immediate(0xc0000000)); |
| 1509 return zero; | 1396 return zero; |
| 1510 } | 1397 } |
| 1511 } | 1398 } |
| 1512 | 1399 |
| 1513 | 1400 |
| 1514 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) { | |
| 1515 if (dst.is(src)) { | |
| 1516 andl(dst, Immediate(kSmiTagMask)); | |
| 1517 } else { | |
| 1518 movl(dst, Immediate(kSmiTagMask)); | |
| 1519 andl(dst, src); | |
| 1520 } | |
| 1521 } | |
| 1522 | |
| 1523 | |
| 1524 void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) { | |
| 1525 if (!(src.AddressUsesRegister(dst))) { | |
| 1526 movl(dst, Immediate(kSmiTagMask)); | |
| 1527 andl(dst, src); | |
| 1528 } else { | |
| 1529 movl(dst, src); | |
| 1530 andl(dst, Immediate(kSmiTagMask)); | |
| 1531 } | |
| 1532 } | |
| 1533 | |
| 1534 | |
| 1535 void MacroAssembler::JumpIfValidSmiValue(Register src, | 1401 void MacroAssembler::JumpIfValidSmiValue(Register src, |
| 1536 Label* on_valid, | 1402 Label* on_valid, |
| 1537 Label::Distance near_jump) { | 1403 Label::Distance near_jump) { |
| 1538 Condition is_valid = CheckInteger32ValidSmiValue(src); | 1404 Condition is_valid = CheckInteger32ValidSmiValue(src); |
| 1539 j(is_valid, on_valid, near_jump); | 1405 j(is_valid, on_valid, near_jump); |
| 1540 } | 1406 } |
| 1541 | 1407 |
| 1542 | 1408 |
| 1543 void MacroAssembler::JumpIfNotValidSmiValue(Register src, | 1409 void MacroAssembler::JumpIfNotValidSmiValue(Register src, |
| 1544 Label* on_invalid, | 1410 Label* on_invalid, |
| (...skipping 1502 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3047 } | 2913 } |
| 3048 | 2914 |
| 3049 | 2915 |
| 3050 void MacroAssembler::MoveHeapObject(Register result, | 2916 void MacroAssembler::MoveHeapObject(Register result, |
| 3051 Handle<Object> object) { | 2917 Handle<Object> object) { |
| 3052 DCHECK(object->IsHeapObject()); | 2918 DCHECK(object->IsHeapObject()); |
| 3053 Move(result, object, RelocInfo::EMBEDDED_OBJECT); | 2919 Move(result, object, RelocInfo::EMBEDDED_OBJECT); |
| 3054 } | 2920 } |
| 3055 | 2921 |
| 3056 | 2922 |
| 3057 void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) { | |
| 3058 if (dst.is(rax)) { | |
| 3059 AllowDeferredHandleDereference embedding_raw_address; | |
| 3060 load_rax(cell.location(), RelocInfo::CELL); | |
| 3061 } else { | |
| 3062 Move(dst, cell, RelocInfo::CELL); | |
| 3063 movp(dst, Operand(dst, 0)); | |
| 3064 } | |
| 3065 } | |
| 3066 | |
| 3067 | |
| 3068 void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell, | |
| 3069 Register scratch) { | |
| 3070 Move(scratch, cell, RelocInfo::EMBEDDED_OBJECT); | |
| 3071 cmpp(value, FieldOperand(scratch, WeakCell::kValueOffset)); | |
| 3072 } | |
| 3073 | |
| 3074 | |
| 3075 void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) { | 2923 void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) { |
| 3076 Move(value, cell, RelocInfo::EMBEDDED_OBJECT); | 2924 Move(value, cell, RelocInfo::EMBEDDED_OBJECT); |
| 3077 movp(value, FieldOperand(value, WeakCell::kValueOffset)); | 2925 movp(value, FieldOperand(value, WeakCell::kValueOffset)); |
| 3078 } | 2926 } |
| 3079 | 2927 |
| 3080 | 2928 |
| 3081 void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell, | 2929 void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell, |
| 3082 Label* miss) { | 2930 Label* miss) { |
| 3083 GetWeakValue(value, cell); | 2931 GetWeakValue(value, cell); |
| 3084 JumpIfSmi(value, miss); | 2932 JumpIfSmi(value, miss); |
| (...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3206 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1)); | 3054 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1)); |
| 3207 if (kPointerSize == kInt64Size) { | 3055 if (kPointerSize == kInt64Size) { |
| 3208 movsxlq(dst, FieldOperand(base, offset)); | 3056 movsxlq(dst, FieldOperand(base, offset)); |
| 3209 } else { | 3057 } else { |
| 3210 movp(dst, FieldOperand(base, offset)); | 3058 movp(dst, FieldOperand(base, offset)); |
| 3211 SmiToInteger32(dst, dst); | 3059 SmiToInteger32(dst, dst); |
| 3212 } | 3060 } |
| 3213 } | 3061 } |
| 3214 | 3062 |
| 3215 | 3063 |
| 3216 void MacroAssembler::TestBitSharedFunctionInfoSpecialField(Register base, | |
| 3217 int offset, | |
| 3218 int bits) { | |
| 3219 DCHECK(offset > SharedFunctionInfo::kLengthOffset && | |
| 3220 offset <= SharedFunctionInfo::kSize && | |
| 3221 (((offset - SharedFunctionInfo::kLengthOffset) / kIntSize) % 2 == 1)); | |
| 3222 if (kPointerSize == kInt32Size) { | |
| 3223 // On x32, this field is represented by SMI. | |
| 3224 bits += kSmiShift; | |
| 3225 } | |
| 3226 int byte_offset = bits / kBitsPerByte; | |
| 3227 int bit_in_byte = bits & (kBitsPerByte - 1); | |
| 3228 testb(FieldOperand(base, offset + byte_offset), Immediate(1 << bit_in_byte)); | |
| 3229 } | |
| 3230 | |
| 3231 | |
| 3232 void MacroAssembler::Jump(ExternalReference ext) { | 3064 void MacroAssembler::Jump(ExternalReference ext) { |
| 3233 LoadAddress(kScratchRegister, ext); | 3065 LoadAddress(kScratchRegister, ext); |
| 3234 jmp(kScratchRegister); | 3066 jmp(kScratchRegister); |
| 3235 } | 3067 } |
| 3236 | 3068 |
| 3237 | 3069 |
| 3238 void MacroAssembler::Jump(const Operand& op) { | 3070 void MacroAssembler::Jump(const Operand& op) { |
| 3239 if (kPointerSize == kInt64Size) { | 3071 if (kPointerSize == kInt64Size) { |
| 3240 jmp(op); | 3072 jmp(op); |
| 3241 } else { | 3073 } else { |
| (...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3557 Pop(r8); | 3389 Pop(r8); |
| 3558 Pop(rdi); | 3390 Pop(rdi); |
| 3559 Pop(rsi); | 3391 Pop(rsi); |
| 3560 Pop(rbx); | 3392 Pop(rbx); |
| 3561 Pop(rdx); | 3393 Pop(rdx); |
| 3562 Pop(rcx); | 3394 Pop(rcx); |
| 3563 Pop(rax); | 3395 Pop(rax); |
| 3564 } | 3396 } |
| 3565 | 3397 |
| 3566 | 3398 |
| 3567 void MacroAssembler::Dropad() { | |
| 3568 addp(rsp, Immediate(kNumSafepointRegisters * kPointerSize)); | |
| 3569 } | |
| 3570 | |
| 3571 | |
| 3572 // Order general registers are pushed by Pushad: | 3399 // Order general registers are pushed by Pushad: |
| 3573 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15. | 3400 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15. |
| 3574 const int | 3401 const int |
| 3575 MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = { | 3402 MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = { |
| 3576 0, | 3403 0, |
| 3577 1, | 3404 1, |
| 3578 2, | 3405 2, |
| 3579 3, | 3406 3, |
| 3580 -1, | 3407 -1, |
| 3581 -1, | 3408 -1, |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3836 AccessorComponent accessor) { | 3663 AccessorComponent accessor) { |
| 3837 movp(dst, FieldOperand(holder, HeapObject::kMapOffset)); | 3664 movp(dst, FieldOperand(holder, HeapObject::kMapOffset)); |
| 3838 LoadInstanceDescriptors(dst, dst); | 3665 LoadInstanceDescriptors(dst, dst); |
| 3839 movp(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index))); | 3666 movp(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index))); |
| 3840 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset | 3667 int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset |
| 3841 : AccessorPair::kSetterOffset; | 3668 : AccessorPair::kSetterOffset; |
| 3842 movp(dst, FieldOperand(dst, offset)); | 3669 movp(dst, FieldOperand(dst, offset)); |
| 3843 } | 3670 } |
| 3844 | 3671 |
| 3845 | 3672 |
| 3846 void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1, | |
| 3847 Register scratch2, Handle<WeakCell> cell, | |
| 3848 Handle<Code> success, | |
| 3849 SmiCheckType smi_check_type) { | |
| 3850 Label fail; | |
| 3851 if (smi_check_type == DO_SMI_CHECK) { | |
| 3852 JumpIfSmi(obj, &fail); | |
| 3853 } | |
| 3854 movq(scratch1, FieldOperand(obj, HeapObject::kMapOffset)); | |
| 3855 CmpWeakValue(scratch1, cell, scratch2); | |
| 3856 j(equal, success, RelocInfo::CODE_TARGET); | |
| 3857 bind(&fail); | |
| 3858 } | |
| 3859 | |
| 3860 | |
| 3861 void MacroAssembler::AssertNumber(Register object) { | |
| 3862 if (emit_debug_code()) { | |
| 3863 Label ok; | |
| 3864 Condition is_smi = CheckSmi(object); | |
| 3865 j(is_smi, &ok, Label::kNear); | |
| 3866 Cmp(FieldOperand(object, HeapObject::kMapOffset), | |
| 3867 isolate()->factory()->heap_number_map()); | |
| 3868 Check(equal, kOperandIsNotANumber); | |
| 3869 bind(&ok); | |
| 3870 } | |
| 3871 } | |
| 3872 | |
| 3873 void MacroAssembler::AssertNotNumber(Register object) { | |
| 3874 if (emit_debug_code()) { | |
| 3875 Condition is_smi = CheckSmi(object); | |
| 3876 Check(NegateCondition(is_smi), kOperandIsANumber); | |
| 3877 Cmp(FieldOperand(object, HeapObject::kMapOffset), | |
| 3878 isolate()->factory()->heap_number_map()); | |
| 3879 Check(not_equal, kOperandIsANumber); | |
| 3880 } | |
| 3881 } | |
| 3882 | |
| 3883 void MacroAssembler::AssertNotSmi(Register object) { | 3673 void MacroAssembler::AssertNotSmi(Register object) { |
| 3884 if (emit_debug_code()) { | 3674 if (emit_debug_code()) { |
| 3885 Condition is_smi = CheckSmi(object); | 3675 Condition is_smi = CheckSmi(object); |
| 3886 Check(NegateCondition(is_smi), kOperandIsASmi); | 3676 Check(NegateCondition(is_smi), kOperandIsASmi); |
| 3887 } | 3677 } |
| 3888 } | 3678 } |
| 3889 | 3679 |
| 3890 | 3680 |
| 3891 void MacroAssembler::AssertSmi(Register object) { | 3681 void MacroAssembler::AssertSmi(Register object) { |
| 3892 if (emit_debug_code()) { | 3682 if (emit_debug_code()) { |
| (...skipping 14 matching lines...) Expand all Loading... |
| 3907 void MacroAssembler::AssertZeroExtended(Register int32_register) { | 3697 void MacroAssembler::AssertZeroExtended(Register int32_register) { |
| 3908 if (emit_debug_code()) { | 3698 if (emit_debug_code()) { |
| 3909 DCHECK(!int32_register.is(kScratchRegister)); | 3699 DCHECK(!int32_register.is(kScratchRegister)); |
| 3910 movq(kScratchRegister, V8_INT64_C(0x0000000100000000)); | 3700 movq(kScratchRegister, V8_INT64_C(0x0000000100000000)); |
| 3911 cmpq(kScratchRegister, int32_register); | 3701 cmpq(kScratchRegister, int32_register); |
| 3912 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); | 3702 Check(above_equal, k32BitValueInRegisterIsNotZeroExtended); |
| 3913 } | 3703 } |
| 3914 } | 3704 } |
| 3915 | 3705 |
| 3916 | 3706 |
| 3917 void MacroAssembler::AssertString(Register object) { | |
| 3918 if (emit_debug_code()) { | |
| 3919 testb(object, Immediate(kSmiTagMask)); | |
| 3920 Check(not_equal, kOperandIsASmiAndNotAString); | |
| 3921 Push(object); | |
| 3922 movp(object, FieldOperand(object, HeapObject::kMapOffset)); | |
| 3923 CmpInstanceType(object, FIRST_NONSTRING_TYPE); | |
| 3924 Pop(object); | |
| 3925 Check(below, kOperandIsNotAString); | |
| 3926 } | |
| 3927 } | |
| 3928 | |
| 3929 | |
| 3930 void MacroAssembler::AssertName(Register object) { | |
| 3931 if (emit_debug_code()) { | |
| 3932 testb(object, Immediate(kSmiTagMask)); | |
| 3933 Check(not_equal, kOperandIsASmiAndNotAName); | |
| 3934 Push(object); | |
| 3935 movp(object, FieldOperand(object, HeapObject::kMapOffset)); | |
| 3936 CmpInstanceType(object, LAST_NAME_TYPE); | |
| 3937 Pop(object); | |
| 3938 Check(below_equal, kOperandIsNotAName); | |
| 3939 } | |
| 3940 } | |
| 3941 | |
| 3942 | |
| 3943 void MacroAssembler::AssertFunction(Register object) { | 3707 void MacroAssembler::AssertFunction(Register object) { |
| 3944 if (emit_debug_code()) { | 3708 if (emit_debug_code()) { |
| 3945 testb(object, Immediate(kSmiTagMask)); | 3709 testb(object, Immediate(kSmiTagMask)); |
| 3946 Check(not_equal, kOperandIsASmiAndNotAFunction); | 3710 Check(not_equal, kOperandIsASmiAndNotAFunction); |
| 3947 Push(object); | 3711 Push(object); |
| 3948 CmpObjectType(object, JS_FUNCTION_TYPE, object); | 3712 CmpObjectType(object, JS_FUNCTION_TYPE, object); |
| 3949 Pop(object); | 3713 Pop(object); |
| 3950 Check(equal, kOperandIsNotAFunction); | 3714 Check(equal, kOperandIsNotAFunction); |
| 3951 } | 3715 } |
| 3952 } | 3716 } |
| (...skipping 14 matching lines...) Expand all Loading... |
| 3967 if (emit_debug_code()) { | 3731 if (emit_debug_code()) { |
| 3968 testb(object, Immediate(kSmiTagMask)); | 3732 testb(object, Immediate(kSmiTagMask)); |
| 3969 Check(not_equal, kOperandIsASmiAndNotAGeneratorObject); | 3733 Check(not_equal, kOperandIsASmiAndNotAGeneratorObject); |
| 3970 Push(object); | 3734 Push(object); |
| 3971 CmpObjectType(object, JS_GENERATOR_OBJECT_TYPE, object); | 3735 CmpObjectType(object, JS_GENERATOR_OBJECT_TYPE, object); |
| 3972 Pop(object); | 3736 Pop(object); |
| 3973 Check(equal, kOperandIsNotAGeneratorObject); | 3737 Check(equal, kOperandIsNotAGeneratorObject); |
| 3974 } | 3738 } |
| 3975 } | 3739 } |
| 3976 | 3740 |
| 3977 void MacroAssembler::AssertReceiver(Register object) { | |
| 3978 if (emit_debug_code()) { | |
| 3979 testb(object, Immediate(kSmiTagMask)); | |
| 3980 Check(not_equal, kOperandIsASmiAndNotAReceiver); | |
| 3981 Push(object); | |
| 3982 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); | |
| 3983 CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, object); | |
| 3984 Pop(object); | |
| 3985 Check(above_equal, kOperandIsNotAReceiver); | |
| 3986 } | |
| 3987 } | |
| 3988 | |
| 3989 | |
| 3990 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) { | 3741 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) { |
| 3991 if (emit_debug_code()) { | 3742 if (emit_debug_code()) { |
| 3992 Label done_checking; | 3743 Label done_checking; |
| 3993 AssertNotSmi(object); | 3744 AssertNotSmi(object); |
| 3994 Cmp(object, isolate()->factory()->undefined_value()); | 3745 Cmp(object, isolate()->factory()->undefined_value()); |
| 3995 j(equal, &done_checking); | 3746 j(equal, &done_checking); |
| 3996 Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map()); | 3747 Cmp(FieldOperand(object, 0), isolate()->factory()->allocation_site_map()); |
| 3997 Assert(equal, kExpectedUndefinedOrCell); | 3748 Assert(equal, kExpectedUndefinedOrCell); |
| 3998 bind(&done_checking); | 3749 bind(&done_checking); |
| 3999 } | 3750 } |
| 4000 } | 3751 } |
| 4001 | 3752 |
| 4002 | 3753 |
| 4003 void MacroAssembler::AssertRootValue(Register src, | |
| 4004 Heap::RootListIndex root_value_index, | |
| 4005 BailoutReason reason) { | |
| 4006 if (emit_debug_code()) { | |
| 4007 DCHECK(!src.is(kScratchRegister)); | |
| 4008 LoadRoot(kScratchRegister, root_value_index); | |
| 4009 cmpp(src, kScratchRegister); | |
| 4010 Check(equal, reason); | |
| 4011 } | |
| 4012 } | |
| 4013 | |
| 4014 | |
| 4015 | |
| 4016 Condition MacroAssembler::IsObjectStringType(Register heap_object, | 3754 Condition MacroAssembler::IsObjectStringType(Register heap_object, |
| 4017 Register map, | 3755 Register map, |
| 4018 Register instance_type) { | 3756 Register instance_type) { |
| 4019 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset)); | 3757 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset)); |
| 4020 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); | 3758 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); |
| 4021 STATIC_ASSERT(kNotStringTag != 0); | 3759 STATIC_ASSERT(kNotStringTag != 0); |
| 4022 testb(instance_type, Immediate(kIsNotStringMask)); | 3760 testb(instance_type, Immediate(kIsNotStringMask)); |
| 4023 return zero; | 3761 return zero; |
| 4024 } | 3762 } |
| 4025 | 3763 |
| 4026 | 3764 |
| 4027 Condition MacroAssembler::IsObjectNameType(Register heap_object, | |
| 4028 Register map, | |
| 4029 Register instance_type) { | |
| 4030 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset)); | |
| 4031 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); | |
| 4032 cmpb(instance_type, Immediate(static_cast<uint8_t>(LAST_NAME_TYPE))); | |
| 4033 return below_equal; | |
| 4034 } | |
| 4035 | |
| 4036 | |
| 4037 void MacroAssembler::SetCounter(StatsCounter* counter, int value) { | 3765 void MacroAssembler::SetCounter(StatsCounter* counter, int value) { |
| 4038 if (FLAG_native_code_counters && counter->Enabled()) { | 3766 if (FLAG_native_code_counters && counter->Enabled()) { |
| 4039 Operand counter_operand = ExternalOperand(ExternalReference(counter)); | 3767 Operand counter_operand = ExternalOperand(ExternalReference(counter)); |
| 4040 movl(counter_operand, Immediate(value)); | 3768 movl(counter_operand, Immediate(value)); |
| 4041 } | 3769 } |
| 4042 } | 3770 } |
| 4043 | 3771 |
| 4044 | 3772 |
| 4045 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) { | 3773 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) { |
| 4046 DCHECK(value > 0); | 3774 DCHECK(value > 0); |
| (...skipping 1233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5280 movl(rax, dividend); | 5008 movl(rax, dividend); |
| 5281 shrl(rax, Immediate(31)); | 5009 shrl(rax, Immediate(31)); |
| 5282 addl(rdx, rax); | 5010 addl(rdx, rax); |
| 5283 } | 5011 } |
| 5284 | 5012 |
| 5285 | 5013 |
| 5286 } // namespace internal | 5014 } // namespace internal |
| 5287 } // namespace v8 | 5015 } // namespace v8 |
| 5288 | 5016 |
| 5289 #endif // V8_TARGET_ARCH_X64 | 5017 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |