| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
| 6 // are met: | 6 // are met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 405 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 416 void Assembler::emit(Instr x) { | 416 void Assembler::emit(Instr x) { |
| 417 CheckBuffer(); | 417 CheckBuffer(); |
| 418 *reinterpret_cast<Instr*>(pc_) = x; | 418 *reinterpret_cast<Instr*>(pc_) = x; |
| 419 pc_ += kInstrSize; | 419 pc_ += kInstrSize; |
| 420 } | 420 } |
| 421 | 421 |
| 422 | 422 |
| 423 Address Assembler::target_address_from_return_address(Address pc) { | 423 Address Assembler::target_address_from_return_address(Address pc) { |
| 424 // Returns the address of the call target from the return address that will | 424 // Returns the address of the call target from the return address that will |
| 425 // be returned to after a call. | 425 // be returned to after a call. |
| 426 // Call sequence on V7 or later is : | 426 // Call sequence on V7 or later is: |
| 427 // movw ip, #... @ call address low 16 | 427 // movw ip, #... @ call address low 16 |
| 428 // movt ip, #... @ call address high 16 | 428 // movt ip, #... @ call address high 16 |
| 429 // blx ip | 429 // blx ip |
| 430 // @ return address | 430 // @ return address |
| 431 // Or pre-V7 or cases that need frequent patching, the address is in the | 431 // For V6 when the constant pool is unavailable, it is: |
| 432 // mov ip, #... @ call address low 8 |
| 433 // orr ip, ip, #... @ call address 2nd 8 |
| 434 // orr ip, ip, #... @ call address 3rd 8 |
| 435 // orr ip, ip, #... @ call address high 8 |
| 436 // blx ip |
| 437 // @ return address |
| 438 // In cases that need frequent patching, the address is in the |
| 432 // constant pool. It could be a small constant pool load: | 439 // constant pool. It could be a small constant pool load: |
| 433 // ldr ip, [pc / pp, #...] @ call address | 440 // ldr ip, [pc / pp, #...] @ call address |
| 434 // blx ip | 441 // blx ip |
| 435 // @ return address | 442 // @ return address |
| 436 // Or an extended constant pool load: | 443 // Or an extended constant pool load (ARMv7): |
| 437 // movw ip, #... | 444 // movw ip, #... |
| 438 // movt ip, #... | 445 // movt ip, #... |
| 439 // ldr ip, [pc, ip] @ call address | 446 // ldr ip, [pc, ip] @ call address |
| 440 // blx ip | 447 // blx ip |
| 441 // @ return address | 448 // @ return address |
| 449 // Or an extended constant pool load (ARMv6): |
| 450 // mov ip, #... |
| 451 // orr ip, ip, #... |
| 452 // orr ip, ip, #... |
| 453 // orr ip, ip, #... |
| 454 // ldr ip, [pc, ip] @ call address |
| 455 // blx ip |
| 456 // @ return address |
| 442 Address candidate = pc - 2 * Assembler::kInstrSize; | 457 Address candidate = pc - 2 * Assembler::kInstrSize; |
| 443 Instr candidate_instr(Memory::int32_at(candidate)); | 458 Instr candidate_instr(Memory::int32_at(candidate)); |
| 444 if (IsLdrPcImmediateOffset(candidate_instr) | | 459 if (IsLdrPcImmediateOffset(candidate_instr) | |
| 445 IsLdrPpImmediateOffset(candidate_instr)) { | 460 IsLdrPpImmediateOffset(candidate_instr)) { |
| 446 return candidate; | 461 return candidate; |
| 447 } else if (IsLdrPpRegOffset(candidate_instr)) { | |
| 448 candidate = pc - 4 * Assembler::kInstrSize; | |
| 449 DCHECK(IsMovW(Memory::int32_at(candidate)) && | |
| 450 IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize))); | |
| 451 return candidate; | |
| 452 } else { | 462 } else { |
| 453 candidate = pc - 3 * Assembler::kInstrSize; | 463 if (IsLdrPpRegOffset(candidate_instr)) { |
| 454 DCHECK(IsMovW(Memory::int32_at(candidate)) && | 464 candidate -= Assembler::kInstrSize; |
| 455 IsMovT(Memory::int32_at(candidate + kInstrSize))); | 465 } |
| 466 if (CpuFeatures::IsSupported(ARMv7)) { |
| 467 candidate -= 1 * Assembler::kInstrSize; |
| 468 DCHECK(IsMovW(Memory::int32_at(candidate)) && |
| 469 IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize))); |
| 470 } else { |
| 471 candidate -= 3 * Assembler::kInstrSize; |
| 472 DCHECK( |
| 473 IsMovImmed(Memory::int32_at(candidate)) && |
| 474 IsOrrImmed(Memory::int32_at(candidate + Assembler::kInstrSize)) && |
| 475 IsOrrImmed(Memory::int32_at(candidate + 2 * Assembler::kInstrSize)) && |
| 476 IsOrrImmed(Memory::int32_at(candidate + 3 * Assembler::kInstrSize))); |
| 477 } |
| 456 return candidate; | 478 return candidate; |
| 457 } | 479 } |
| 458 } | 480 } |
| 459 | 481 |
| 460 | 482 |
| 461 Address Assembler::break_address_from_return_address(Address pc) { | 483 Address Assembler::break_address_from_return_address(Address pc) { |
| 462 return pc - Assembler::kPatchDebugBreakSlotReturnOffset; | 484 return pc - Assembler::kPatchDebugBreakSlotReturnOffset; |
| 463 } | 485 } |
| 464 | 486 |
| 465 | 487 |
| 466 Address Assembler::return_address_from_call_start(Address pc) { | 488 Address Assembler::return_address_from_call_start(Address pc) { |
| 467 if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) | | 489 if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) | |
| 468 IsLdrPpImmediateOffset(Memory::int32_at(pc))) { | 490 IsLdrPpImmediateOffset(Memory::int32_at(pc))) { |
| 469 // Load from constant pool, small section. | 491 // Load from constant pool, small section. |
| 470 return pc + kInstrSize * 2; | 492 return pc + kInstrSize * 2; |
| 471 } else { | 493 } else { |
| 472 DCHECK(IsMovW(Memory::int32_at(pc))); | 494 if (CpuFeatures::IsSupported(ARMv7)) { |
| 473 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize))); | 495 DCHECK(IsMovW(Memory::int32_at(pc))); |
| 474 if (IsLdrPpRegOffset(Memory::int32_at(pc + kInstrSize))) { | 496 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize))); |
| 475 // Load from constant pool, extended section. | 497 if (IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))) { |
| 476 return pc + kInstrSize * 4; | 498 // Load from constant pool, extended section. |
| 499 return pc + kInstrSize * 4; |
| 500 } else { |
| 501 // A movw / movt load immediate. |
| 502 return pc + kInstrSize * 3; |
| 503 } |
| 477 } else { | 504 } else { |
| 478 // A movw / movt load immediate. | 505 DCHECK(IsMovImmed(Memory::int32_at(pc))); |
| 479 return pc + kInstrSize * 3; | 506 DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize))); |
| 507 DCHECK(IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize))); |
| 508 DCHECK(IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize))); |
| 509 if (IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))) { |
| 510 // Load from constant pool, extended section. |
| 511 return pc + kInstrSize * 6; |
| 512 } else { |
| 513 // A mov / orr load immediate. |
| 514 return pc + kInstrSize * 5; |
| 515 } |
| 480 } | 516 } |
| 481 } | 517 } |
| 482 } | 518 } |
| 483 | 519 |
| 484 | 520 |
| 485 void Assembler::deserialization_set_special_target_at( | 521 void Assembler::deserialization_set_special_target_at( |
| 486 Address constant_pool_entry, Code* code, Address target) { | 522 Address constant_pool_entry, Code* code, Address target) { |
| 487 if (FLAG_enable_ool_constant_pool) { | 523 if (FLAG_enable_ool_constant_pool) { |
| 488 set_target_address_at(constant_pool_entry, code, target); | 524 set_target_address_at(constant_pool_entry, code, target); |
| 489 } else { | 525 } else { |
| 490 Memory::Address_at(constant_pool_entry) = target; | 526 Memory::Address_at(constant_pool_entry) = target; |
| 491 } | 527 } |
| 492 } | 528 } |
| 493 | 529 |
| 494 | 530 |
| 495 bool Assembler::is_constant_pool_load(Address pc) { | 531 bool Assembler::is_constant_pool_load(Address pc) { |
| 496 return !Assembler::IsMovW(Memory::int32_at(pc)) || | 532 if (CpuFeatures::IsSupported(ARMv7)) { |
| 497 (FLAG_enable_ool_constant_pool && | 533 return !Assembler::IsMovW(Memory::int32_at(pc)) || |
| 498 Assembler::IsLdrPpRegOffset( | 534 (FLAG_enable_ool_constant_pool && |
| 499 Memory::int32_at(pc + 2 * Assembler::kInstrSize))); | 535 Assembler::IsLdrPpRegOffset( |
| 536 Memory::int32_at(pc + 2 * Assembler::kInstrSize))); |
| 537 } else { |
| 538 return !Assembler::IsMovImmed(Memory::int32_at(pc)) || |
| 539 (FLAG_enable_ool_constant_pool && |
| 540 Assembler::IsLdrPpRegOffset( |
| 541 Memory::int32_at(pc + 4 * Assembler::kInstrSize))); |
| 542 } |
| 500 } | 543 } |
| 501 | 544 |
| 502 | 545 |
| 503 Address Assembler::constant_pool_entry_address( | 546 Address Assembler::constant_pool_entry_address( |
| 504 Address pc, ConstantPoolArray* constant_pool) { | 547 Address pc, ConstantPoolArray* constant_pool) { |
| 505 if (FLAG_enable_ool_constant_pool) { | 548 if (FLAG_enable_ool_constant_pool) { |
| 506 DCHECK(constant_pool != NULL); | 549 DCHECK(constant_pool != NULL); |
| 507 int cp_offset; | 550 int cp_offset; |
| 508 if (IsMovW(Memory::int32_at(pc))) { | 551 if (!CpuFeatures::IsSupported(ARMv7) && IsMovImmed(Memory::int32_at(pc))) { |
| 552 DCHECK(IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && |
| 553 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && |
| 554 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize)) && |
| 555 IsLdrPpRegOffset(Memory::int32_at(pc + 4 * kInstrSize))); |
| 556 // This is an extended constant pool lookup (ARMv6). |
| 557 Instr mov_instr = instr_at(pc); |
| 558 Instr orr_instr_1 = instr_at(pc + kInstrSize); |
| 559 Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize); |
| 560 Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize); |
| 561 cp_offset = DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) | |
| 562 DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3); |
| 563 } else if (IsMovW(Memory::int32_at(pc))) { |
| 509 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) && | 564 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)) && |
| 510 IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))); | 565 IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize))); |
| 511 // This is an extended constant pool lookup. | 566 // This is an extended constant pool lookup (ARMv7). |
| 512 Instruction* movw_instr = Instruction::At(pc); | 567 Instruction* movw_instr = Instruction::At(pc); |
| 513 Instruction* movt_instr = Instruction::At(pc + kInstrSize); | 568 Instruction* movt_instr = Instruction::At(pc + kInstrSize); |
| 514 cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) | | 569 cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) | |
| 515 movw_instr->ImmedMovwMovtValue(); | 570 movw_instr->ImmedMovwMovtValue(); |
| 516 } else { | 571 } else { |
| 517 // This is a small constant pool lookup. | 572 // This is a small constant pool lookup. |
| 518 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc))); | 573 DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc))); |
| 519 cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc)); | 574 cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc)); |
| 520 } | 575 } |
| 521 return reinterpret_cast<Address>(constant_pool) + cp_offset; | 576 return reinterpret_cast<Address>(constant_pool) + cp_offset; |
| 522 } else { | 577 } else { |
| 523 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc))); | 578 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc))); |
| 524 Instr instr = Memory::int32_at(pc); | 579 Instr instr = Memory::int32_at(pc); |
| 525 return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta; | 580 return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta; |
| 526 } | 581 } |
| 527 } | 582 } |
| 528 | 583 |
| 529 | 584 |
| 530 Address Assembler::target_address_at(Address pc, | 585 Address Assembler::target_address_at(Address pc, |
| 531 ConstantPoolArray* constant_pool) { | 586 ConstantPoolArray* constant_pool) { |
| 532 if (is_constant_pool_load(pc)) { | 587 if (is_constant_pool_load(pc)) { |
| 533 // This is a constant pool lookup. Return the value in the constant pool. | 588 // This is a constant pool lookup. Return the value in the constant pool. |
| 534 return Memory::Address_at(constant_pool_entry_address(pc, constant_pool)); | 589 return Memory::Address_at(constant_pool_entry_address(pc, constant_pool)); |
| 535 } else { | 590 } else if (CpuFeatures::IsSupported(ARMv7)) { |
| 536 // This is an movw_movt immediate load. Return the immediate. | 591 // This is an movw / movt immediate load. Return the immediate. |
| 537 DCHECK(IsMovW(Memory::int32_at(pc)) && | 592 DCHECK(IsMovW(Memory::int32_at(pc)) && |
| 538 IsMovT(Memory::int32_at(pc + kInstrSize))); | 593 IsMovT(Memory::int32_at(pc + kInstrSize))); |
| 539 Instruction* movw_instr = Instruction::At(pc); | 594 Instruction* movw_instr = Instruction::At(pc); |
| 540 Instruction* movt_instr = Instruction::At(pc + kInstrSize); | 595 Instruction* movt_instr = Instruction::At(pc + kInstrSize); |
| 541 return reinterpret_cast<Address>( | 596 return reinterpret_cast<Address>( |
| 542 (movt_instr->ImmedMovwMovtValue() << 16) | | 597 (movt_instr->ImmedMovwMovtValue() << 16) | |
| 543 movw_instr->ImmedMovwMovtValue()); | 598 movw_instr->ImmedMovwMovtValue()); |
| 599 } else { |
| 600 // This is an mov / orr immediate load. Return the immediate. |
| 601 DCHECK(IsMovImmed(Memory::int32_at(pc)) && |
| 602 IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && |
| 603 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && |
| 604 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize))); |
| 605 Instr mov_instr = instr_at(pc); |
| 606 Instr orr_instr_1 = instr_at(pc + kInstrSize); |
| 607 Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize); |
| 608 Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize); |
| 609 Address ret = reinterpret_cast<Address>( |
| 610 DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) | |
| 611 DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3)); |
| 612 return ret; |
| 544 } | 613 } |
| 545 } | 614 } |
| 546 | 615 |
| 547 | 616 |
| 548 void Assembler::set_target_address_at(Address pc, | 617 void Assembler::set_target_address_at(Address pc, |
| 549 ConstantPoolArray* constant_pool, | 618 ConstantPoolArray* constant_pool, |
| 550 Address target, | 619 Address target, |
| 551 ICacheFlushMode icache_flush_mode) { | 620 ICacheFlushMode icache_flush_mode) { |
| 552 if (is_constant_pool_load(pc)) { | 621 if (is_constant_pool_load(pc)) { |
| 553 // This is a constant pool lookup. Update the entry in the constant pool. | 622 // This is a constant pool lookup. Update the entry in the constant pool. |
| 554 Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target; | 623 Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target; |
| 555 // Intuitively, we would think it is necessary to always flush the | 624 // Intuitively, we would think it is necessary to always flush the |
| 556 // instruction cache after patching a target address in the code as follows: | 625 // instruction cache after patching a target address in the code as follows: |
| 557 // CpuFeatures::FlushICache(pc, sizeof(target)); | 626 // CpuFeatures::FlushICache(pc, sizeof(target)); |
| 558 // However, on ARM, no instruction is actually patched in the case | 627 // However, on ARM, no instruction is actually patched in the case |
| 559 // of embedded constants of the form: | 628 // of embedded constants of the form: |
| 560 // ldr ip, [pp, #...] | 629 // ldr ip, [pp, #...] |
| 561 // since the instruction accessing this address in the constant pool remains | 630 // since the instruction accessing this address in the constant pool remains |
| 562 // unchanged. | 631 // unchanged. |
| 563 } else { | 632 } else if (CpuFeatures::IsSupported(ARMv7)) { |
| 564 // This is an movw_movt immediate load. Patch the immediate embedded in the | 633 // This is an movw / movt immediate load. Patch the immediate embedded in |
| 565 // instructions. | 634 // the instructions. |
| 566 DCHECK(IsMovW(Memory::int32_at(pc))); | 635 DCHECK(IsMovW(Memory::int32_at(pc))); |
| 567 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize))); | 636 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize))); |
| 568 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc); | 637 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc); |
| 569 uint32_t immediate = reinterpret_cast<uint32_t>(target); | 638 uint32_t immediate = reinterpret_cast<uint32_t>(target); |
| 570 instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF); | 639 instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF); |
| 571 instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16); | 640 instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16); |
| 572 DCHECK(IsMovW(Memory::int32_at(pc))); | 641 DCHECK(IsMovW(Memory::int32_at(pc))); |
| 573 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize))); | 642 DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize))); |
| 574 if (icache_flush_mode != SKIP_ICACHE_FLUSH) { | 643 if (icache_flush_mode != SKIP_ICACHE_FLUSH) { |
| 575 CpuFeatures::FlushICache(pc, 2 * kInstrSize); | 644 CpuFeatures::FlushICache(pc, 2 * kInstrSize); |
| 576 } | 645 } |
| 646 } else { |
| 647 // This is an mov / orr immediate load. Patch the immediate embedded in |
| 648 // the instructions. |
| 649 DCHECK(IsMovImmed(Memory::int32_at(pc)) && |
| 650 IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && |
| 651 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && |
| 652 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize))); |
| 653 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc); |
| 654 uint32_t immediate = reinterpret_cast<uint32_t>(target); |
| 655 instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask); |
| 656 instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8)); |
| 657 instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16)); |
| 658 instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24)); |
| 659 DCHECK(IsMovImmed(Memory::int32_at(pc)) && |
| 660 IsOrrImmed(Memory::int32_at(pc + kInstrSize)) && |
| 661 IsOrrImmed(Memory::int32_at(pc + 2 * kInstrSize)) && |
| 662 IsOrrImmed(Memory::int32_at(pc + 3 * kInstrSize))); |
| 663 if (icache_flush_mode != SKIP_ICACHE_FLUSH) { |
| 664 CpuFeatures::FlushICache(pc, 4 * kInstrSize); |
| 665 } |
| 577 } | 666 } |
| 578 } | 667 } |
| 579 | 668 |
| 580 | 669 |
| 581 } } // namespace v8::internal | 670 } } // namespace v8::internal |
| 582 | 671 |
| 583 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_ | 672 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_ |
| OLD | NEW |