OLD | NEW |
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
6 // are met: | 6 // are met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
135 "d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9", "d10", | 135 "d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9", "d10", |
136 "d11", "d12", "d15", "d16", "d17", "d18", "d19", "d20", "d21", "d22", | 136 "d11", "d12", "d15", "d16", "d17", "d18", "d19", "d20", "d21", "d22", |
137 "d23", "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31"}; | 137 "d23", "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31"}; |
138 return names[index]; | 138 return names[index]; |
139 } | 139 } |
140 | 140 |
141 | 141 |
142 // ----------------------------------------------------------------------------- | 142 // ----------------------------------------------------------------------------- |
143 // Implementation of RelocInfo | 143 // Implementation of RelocInfo |
144 | 144 |
145 const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE; | 145 const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE | |
| 146 1 << RelocInfo::INTERNAL_REFERENCE_ENCODED; |
146 | 147 |
147 | 148 |
148 bool RelocInfo::IsCodedSpecially() { | 149 bool RelocInfo::IsCodedSpecially() { |
149 // The deserializer needs to know whether a pointer is specially | 150 // The deserializer needs to know whether a pointer is specially |
150 // coded. Being specially coded on PPC means that it is a lis/ori | 151 // coded. Being specially coded on PPC means that it is a lis/ori |
151 // instruction sequence or is an out of line constant pool entry, | 152 // instruction sequence or is an out of line constant pool entry, |
152 // and these are always the case inside code objects. | 153 // and these are always the case inside code objects. |
153 return true; | 154 return true; |
154 } | 155 } |
155 | 156 |
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
394 // | 395 // |
395 // Linked labels refer to unknown positions in the code | 396 // Linked labels refer to unknown positions in the code |
396 // to be generated; pos() is the position of the last | 397 // to be generated; pos() is the position of the last |
397 // instruction using the label. | 398 // instruction using the label. |
398 | 399 |
399 | 400 |
400 // The link chain is terminated by a negative code position (must be aligned) | 401 // The link chain is terminated by a negative code position (must be aligned) |
401 const int kEndOfChain = -4; | 402 const int kEndOfChain = -4; |
402 | 403 |
403 | 404 |
| 405 // Dummy opcodes for unbound label mov instructions or jump table entries. |
| 406 enum { |
| 407 kUnboundMovLabelOffsetOpcode = 0 << 26, |
| 408 kUnboundMovLabelAddrOpcode = 1 << 26, |
| 409 kUnboundJumpTableEntryOpcode = 2 << 26 |
| 410 }; |
| 411 |
| 412 |
404 int Assembler::target_at(int pos) { | 413 int Assembler::target_at(int pos) { |
405 Instr instr = instr_at(pos); | 414 Instr instr = instr_at(pos); |
406 // check which type of branch this is 16 or 26 bit offset | 415 // check which type of branch this is 16 or 26 bit offset |
407 int opcode = instr & kOpcodeMask; | 416 int opcode = instr & kOpcodeMask; |
408 if (BX == opcode) { | 417 int link; |
409 int imm26 = ((instr & kImm26Mask) << 6) >> 6; | 418 switch (opcode) { |
410 imm26 &= ~(kAAMask | kLKMask); // discard AA|LK bits if present | 419 case BX: |
411 if (imm26 == 0) return kEndOfChain; | 420 link = SIGN_EXT_IMM26(instr & kImm26Mask); |
412 return pos + imm26; | 421 link &= ~(kAAMask | kLKMask); // discard AA|LK bits if present |
413 } else if (BCX == opcode) { | 422 break; |
414 int imm16 = SIGN_EXT_IMM16((instr & kImm16Mask)); | 423 case BCX: |
415 imm16 &= ~(kAAMask | kLKMask); // discard AA|LK bits if present | 424 link = SIGN_EXT_IMM16((instr & kImm16Mask)); |
416 if (imm16 == 0) return kEndOfChain; | 425 link &= ~(kAAMask | kLKMask); // discard AA|LK bits if present |
417 return pos + imm16; | 426 break; |
418 } else if ((instr & ~kImm26Mask) == 0) { | 427 case kUnboundMovLabelOffsetOpcode: |
419 // Emitted link to a label, not part of a branch (regexp PushBacktrack). | 428 case kUnboundMovLabelAddrOpcode: |
420 if (instr == 0) { | 429 case kUnboundJumpTableEntryOpcode: |
421 return kEndOfChain; | 430 link = SIGN_EXT_IMM26(instr & kImm26Mask); |
422 } else { | 431 link <<= 2; |
423 int32_t imm26 = SIGN_EXT_IMM26(instr); | 432 break; |
424 return (imm26 + pos); | 433 default: |
425 } | 434 DCHECK(false); |
| 435 return -1; |
426 } | 436 } |
427 | 437 |
428 DCHECK(false); | 438 if (link == 0) return kEndOfChain; |
429 return -1; | 439 return pos + link; |
430 } | 440 } |
431 | 441 |
432 | 442 |
433 void Assembler::target_at_put(int pos, int target_pos) { | 443 void Assembler::target_at_put(int pos, int target_pos) { |
434 Instr instr = instr_at(pos); | 444 Instr instr = instr_at(pos); |
435 int opcode = instr & kOpcodeMask; | 445 int opcode = instr & kOpcodeMask; |
436 | 446 |
437 // check which type of branch this is 16 or 26 bit offset | 447 switch (opcode) { |
438 if (BX == opcode) { | 448 case BX: { |
439 int imm26 = target_pos - pos; | 449 int imm26 = target_pos - pos; |
440 DCHECK(is_int26(imm26) && (imm26 & (kAAMask | kLKMask)) == 0); | 450 DCHECK(is_int26(imm26) && (imm26 & (kAAMask | kLKMask)) == 0); |
441 if (imm26 == kInstrSize && !(instr & kLKMask)) { | 451 if (imm26 == kInstrSize && !(instr & kLKMask)) { |
442 // Branch to next instr without link. | 452 // Branch to next instr without link. |
443 instr = ORI; // nop: ori, 0,0,0 | 453 instr = ORI; // nop: ori, 0,0,0 |
444 } else { | 454 } else { |
445 instr &= ((~kImm26Mask) | kAAMask | kLKMask); | 455 instr &= ((~kImm26Mask) | kAAMask | kLKMask); |
446 instr |= (imm26 & kImm26Mask); | 456 instr |= (imm26 & kImm26Mask); |
| 457 } |
| 458 instr_at_put(pos, instr); |
| 459 break; |
447 } | 460 } |
448 instr_at_put(pos, instr); | 461 case BCX: { |
449 return; | 462 int imm16 = target_pos - pos; |
450 } else if (BCX == opcode) { | 463 DCHECK(is_int16(imm16) && (imm16 & (kAAMask | kLKMask)) == 0); |
451 int imm16 = target_pos - pos; | 464 if (imm16 == kInstrSize && !(instr & kLKMask)) { |
452 DCHECK(is_int16(imm16) && (imm16 & (kAAMask | kLKMask)) == 0); | 465 // Branch to next instr without link. |
453 if (imm16 == kInstrSize && !(instr & kLKMask)) { | 466 instr = ORI; // nop: ori, 0,0,0 |
454 // Branch to next instr without link. | 467 } else { |
455 instr = ORI; // nop: ori, 0,0,0 | 468 instr &= ((~kImm16Mask) | kAAMask | kLKMask); |
456 } else { | 469 instr |= (imm16 & kImm16Mask); |
457 instr &= ((~kImm16Mask) | kAAMask | kLKMask); | 470 } |
458 instr |= (imm16 & kImm16Mask); | 471 instr_at_put(pos, instr); |
| 472 break; |
459 } | 473 } |
460 instr_at_put(pos, instr); | 474 case kUnboundMovLabelOffsetOpcode: { |
461 return; | 475 // Load the position of the label relative to the generated code object |
462 } else if ((instr & ~kImm26Mask) == 0) { | 476 // pointer in a register. |
463 DCHECK(target_pos == kEndOfChain || target_pos >= 0); | 477 Register dst = Register::from_code(instr_at(pos + kInstrSize)); |
464 // Emitted link to a label, not part of a branch (regexp PushBacktrack). | 478 int32_t offset = target_pos + (Code::kHeaderSize - kHeapObjectTag); |
465 // Load the position of the label relative to the generated code object | 479 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 2, |
466 // pointer in a register. | 480 CodePatcher::DONT_FLUSH); |
467 | 481 patcher.masm()->bitwise_mov32(dst, offset); |
468 Register dst = r3; // we assume r3 for now | 482 break; |
469 DCHECK(IsNop(instr_at(pos + kInstrSize))); | 483 } |
470 uint32_t target = target_pos + (Code::kHeaderSize - kHeapObjectTag); | 484 case kUnboundMovLabelAddrOpcode: { |
471 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 2, | 485 // Load the address of the label in a register. |
472 CodePatcher::DONT_FLUSH); | 486 Register dst = Register::from_code(instr_at(pos + kInstrSize)); |
473 int target_hi = static_cast<int>(target) >> 16; | 487 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + target_pos); |
474 int target_lo = static_cast<int>(target) & 0XFFFF; | 488 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), |
475 | 489 kMovInstructionsNoConstantPool, |
476 patcher.masm()->lis(dst, Operand(SIGN_EXT_IMM16(target_hi))); | 490 CodePatcher::DONT_FLUSH); |
477 patcher.masm()->ori(dst, dst, Operand(target_lo)); | 491 AddBoundInternalReferenceLoad(pos); |
478 return; | 492 patcher.masm()->bitwise_mov(dst, addr); |
| 493 break; |
| 494 } |
| 495 case kUnboundJumpTableEntryOpcode: { |
| 496 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + target_pos); |
| 497 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), |
| 498 kPointerSize / kInstrSize, CodePatcher::DONT_FLUSH); |
| 499 AddBoundInternalReference(pos); |
| 500 patcher.masm()->emit_ptr(addr); |
| 501 break; |
| 502 } |
| 503 default: |
| 504 DCHECK(false); |
| 505 break; |
479 } | 506 } |
480 | |
481 DCHECK(false); | |
482 } | 507 } |
483 | 508 |
484 | 509 |
485 int Assembler::max_reach_from(int pos) { | 510 int Assembler::max_reach_from(int pos) { |
486 Instr instr = instr_at(pos); | 511 Instr instr = instr_at(pos); |
487 int opcode = instr & kOpcodeMask; | 512 int opcode = instr & kOpcodeMask; |
488 | 513 |
489 // check which type of branch this is 16 or 26 bit offset | 514 // check which type of branch this is 16 or 26 bit offset |
490 if (BX == opcode) { | 515 switch (opcode) { |
491 return 26; | 516 case BX: |
492 } else if (BCX == opcode) { | 517 return 26; |
493 return 16; | 518 case BCX: |
494 } else if ((instr & ~kImm26Mask) == 0) { | 519 return 16; |
495 // Emitted label constant, not part of a branch (regexp PushBacktrack). | 520 case kUnboundMovLabelOffsetOpcode: |
496 return 26; | 521 case kUnboundMovLabelAddrOpcode: |
| 522 case kUnboundJumpTableEntryOpcode: |
| 523 return 0; // no limit on reach |
497 } | 524 } |
498 | 525 |
499 DCHECK(false); | 526 DCHECK(false); |
500 return 0; | 527 return 0; |
501 } | 528 } |
502 | 529 |
503 | 530 |
504 void Assembler::bind_to(Label* L, int pos) { | 531 void Assembler::bind_to(Label* L, int pos) { |
505 DCHECK(0 <= pos && pos <= pc_offset()); // must have a valid binding position | 532 DCHECK(0 <= pos && pos <= pc_offset()); // must have a valid binding position |
506 int32_t trampoline_pos = kInvalidSlotPos; | 533 int32_t trampoline_pos = kInvalidSlotPos; |
507 if (L->is_linked() && !trampoline_emitted_) { | 534 if (L->is_linked() && !trampoline_emitted_) { |
508 unbound_labels_count_--; | 535 unbound_labels_count_--; |
509 next_buffer_check_ += kTrampolineSlotsSize; | 536 next_buffer_check_ += kTrampolineSlotsSize; |
510 } | 537 } |
511 | 538 |
512 while (L->is_linked()) { | 539 while (L->is_linked()) { |
513 int fixup_pos = L->pos(); | 540 int fixup_pos = L->pos(); |
514 int32_t offset = pos - fixup_pos; | 541 int32_t offset = pos - fixup_pos; |
515 int maxReach = max_reach_from(fixup_pos); | 542 int maxReach = max_reach_from(fixup_pos); |
516 next(L); // call next before overwriting link with target at fixup_pos | 543 next(L); // call next before overwriting link with target at fixup_pos |
517 if (is_intn(offset, maxReach) == false) { | 544 if (maxReach && is_intn(offset, maxReach) == false) { |
518 if (trampoline_pos == kInvalidSlotPos) { | 545 if (trampoline_pos == kInvalidSlotPos) { |
519 trampoline_pos = get_trampoline_entry(); | 546 trampoline_pos = get_trampoline_entry(); |
520 CHECK(trampoline_pos != kInvalidSlotPos); | 547 CHECK(trampoline_pos != kInvalidSlotPos); |
521 target_at_put(trampoline_pos, pos); | 548 target_at_put(trampoline_pos, pos); |
522 } | 549 } |
523 target_at_put(fixup_pos, trampoline_pos); | 550 target_at_put(fixup_pos, trampoline_pos); |
524 } else { | 551 } else { |
525 target_at_put(fixup_pos, pos); | 552 target_at_put(fixup_pos, pos); |
526 } | 553 } |
527 } | 554 } |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
629 trampoline_entry = trampoline_.take_slot(); | 656 trampoline_entry = trampoline_.take_slot(); |
630 | 657 |
631 if (kInvalidSlotPos == trampoline_entry) { | 658 if (kInvalidSlotPos == trampoline_entry) { |
632 internal_trampoline_exception_ = true; | 659 internal_trampoline_exception_ = true; |
633 } | 660 } |
634 } | 661 } |
635 return trampoline_entry; | 662 return trampoline_entry; |
636 } | 663 } |
637 | 664 |
638 | 665 |
639 int Assembler::branch_offset(Label* L, bool jump_elimination_allowed) { | 666 int Assembler::link(Label* L) { |
640 int target_pos; | 667 int position; |
641 if (L->is_bound()) { | 668 if (L->is_bound()) { |
642 target_pos = L->pos(); | 669 position = L->pos(); |
643 } else { | 670 } else { |
644 if (L->is_linked()) { | 671 if (L->is_linked()) { |
645 target_pos = L->pos(); // L's link | 672 position = L->pos(); // L's link |
646 } else { | 673 } else { |
647 // was: target_pos = kEndOfChain; | 674 // was: target_pos = kEndOfChain; |
648 // However, using branch to self to mark the first reference | 675 // However, using self to mark the first reference |
649 // should avoid most instances of branch offset overflow. See | 676 // should avoid most instances of branch offset overflow. See |
650 // target_at() for where this is converted back to kEndOfChain. | 677 // target_at() for where this is converted back to kEndOfChain. |
651 target_pos = pc_offset(); | 678 position = pc_offset(); |
652 if (!trampoline_emitted_) { | 679 if (!trampoline_emitted_) { |
653 unbound_labels_count_++; | 680 unbound_labels_count_++; |
654 next_buffer_check_ -= kTrampolineSlotsSize; | 681 next_buffer_check_ -= kTrampolineSlotsSize; |
655 } | 682 } |
656 } | 683 } |
657 L->link_to(pc_offset()); | 684 L->link_to(pc_offset()); |
658 } | 685 } |
659 | 686 |
660 return target_pos - pc_offset(); | 687 return position; |
661 } | 688 } |
662 | 689 |
663 | 690 |
664 // Branch instructions. | 691 // Branch instructions. |
665 | 692 |
666 | 693 |
667 void Assembler::bclr(BOfield bo, LKBit lk) { | 694 void Assembler::bclr(BOfield bo, LKBit lk) { |
668 positions_recorder()->WriteRecordedPositions(); | 695 positions_recorder()->WriteRecordedPositions(); |
669 emit(EXT1 | bo | BCLRX | lk); | 696 emit(EXT1 | bo | BCLRX | lk); |
670 } | 697 } |
(...skipping 800 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1471 xo_form(EXT2 | DIVDU, dst, src1, src2, o, r); | 1498 xo_form(EXT2 | DIVDU, dst, src1, src2, o, r); |
1472 } | 1499 } |
1473 #endif | 1500 #endif |
1474 | 1501 |
1475 | 1502 |
1476 // Function descriptor for AIX. | 1503 // Function descriptor for AIX. |
1477 // Code address skips the function descriptor "header". | 1504 // Code address skips the function descriptor "header". |
1478 // TOC and static chain are ignored and set to 0. | 1505 // TOC and static chain are ignored and set to 0. |
1479 void Assembler::function_descriptor() { | 1506 void Assembler::function_descriptor() { |
1480 #if ABI_USES_FUNCTION_DESCRIPTORS | 1507 #if ABI_USES_FUNCTION_DESCRIPTORS |
| 1508 Label instructions; |
1481 DCHECK(pc_offset() == 0); | 1509 DCHECK(pc_offset() == 0); |
1482 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); | 1510 emit_label_addr(&instructions); |
1483 emit_ptr(reinterpret_cast<uintptr_t>(pc_) + 3 * kPointerSize); | |
1484 emit_ptr(0); | 1511 emit_ptr(0); |
1485 emit_ptr(0); | 1512 emit_ptr(0); |
| 1513 bind(&instructions); |
1486 #endif | 1514 #endif |
1487 } | 1515 } |
1488 | 1516 |
1489 | 1517 |
1490 #if ABI_USES_FUNCTION_DESCRIPTORS || V8_OOL_CONSTANT_POOL | |
1491 void Assembler::RelocateInternalReference(Address pc, intptr_t delta, | 1518 void Assembler::RelocateInternalReference(Address pc, intptr_t delta, |
1492 Address code_start, | 1519 Address code_start, |
| 1520 RelocInfo::Mode rmode, |
1493 ICacheFlushMode icache_flush_mode) { | 1521 ICacheFlushMode icache_flush_mode) { |
1494 DCHECK(delta || code_start); | 1522 if (RelocInfo::IsInternalReference(rmode)) { |
1495 #if ABI_USES_FUNCTION_DESCRIPTORS | 1523 // Jump table entry |
1496 uintptr_t* fd = reinterpret_cast<uintptr_t*>(pc); | 1524 DCHECK(delta || code_start); |
1497 if (fd[1] == 0 && fd[2] == 0) { | 1525 uintptr_t* entry = reinterpret_cast<uintptr_t*>(pc); |
1498 // Function descriptor | |
1499 if (delta) { | 1526 if (delta) { |
1500 fd[0] += delta; | 1527 *entry += delta; |
1501 } else { | 1528 } else { |
1502 fd[0] = reinterpret_cast<uintptr_t>(code_start) + 3 * kPointerSize; | 1529 // remove when serializer properly supports internal references |
| 1530 *entry = reinterpret_cast<uintptr_t>(code_start) + 3 * kPointerSize; |
1503 } | 1531 } |
1504 return; | 1532 } else { |
| 1533 // mov sequence |
| 1534 DCHECK(delta || code_start); |
| 1535 DCHECK(RelocInfo::IsInternalReferenceEncoded(rmode)); |
| 1536 ConstantPoolArray* constant_pool = NULL; |
| 1537 Address addr; |
| 1538 if (delta) { |
| 1539 addr = target_address_at(pc, constant_pool) + delta; |
| 1540 } else { |
| 1541 // remove when serializer properly supports internal references |
| 1542 addr = code_start; |
| 1543 } |
| 1544 set_target_address_at(pc, constant_pool, addr, icache_flush_mode); |
1505 } | 1545 } |
1506 #endif | |
1507 #if V8_OOL_CONSTANT_POOL | |
1508 // mov for LoadConstantPoolPointerRegister | |
1509 ConstantPoolArray* constant_pool = NULL; | |
1510 if (delta) { | |
1511 code_start = target_address_at(pc, constant_pool) + delta; | |
1512 } | |
1513 set_target_address_at(pc, constant_pool, code_start, icache_flush_mode); | |
1514 #endif | |
1515 } | 1546 } |
1516 | 1547 |
1517 | 1548 |
1518 int Assembler::DecodeInternalReference(Vector<char> buffer, Address pc) { | |
1519 #if ABI_USES_FUNCTION_DESCRIPTORS | |
1520 uintptr_t* fd = reinterpret_cast<uintptr_t*>(pc); | |
1521 if (fd[1] == 0 && fd[2] == 0) { | |
1522 // Function descriptor | |
1523 SNPrintF(buffer, "[%08" V8PRIxPTR ", %08" V8PRIxPTR ", %08" V8PRIxPTR | |
1524 "]" | |
1525 " function descriptor", | |
1526 fd[0], fd[1], fd[2]); | |
1527 return kPointerSize * 3; | |
1528 } | |
1529 #endif | |
1530 return 0; | |
1531 } | |
1532 #endif | |
1533 | |
1534 | |
1535 int Assembler::instructions_required_for_mov(const Operand& x) const { | 1549 int Assembler::instructions_required_for_mov(const Operand& x) const { |
1536 #if V8_OOL_CONSTANT_POOL || DEBUG | 1550 #if V8_OOL_CONSTANT_POOL || DEBUG |
1537 bool canOptimize = | 1551 bool canOptimize = |
1538 !(x.must_output_reloc_info(this) || is_trampoline_pool_blocked()); | 1552 !(x.must_output_reloc_info(this) || is_trampoline_pool_blocked()); |
1539 #endif | 1553 #endif |
1540 #if V8_OOL_CONSTANT_POOL | 1554 #if V8_OOL_CONSTANT_POOL |
1541 if (use_constant_pool_for_mov(x, canOptimize)) { | 1555 if (use_constant_pool_for_mov(x, canOptimize)) { |
1542 // Current usage guarantees that all constant pool references can | 1556 // Current usage guarantees that all constant pool references can |
1543 // use the same sequence. | 1557 // use the same sequence. |
1544 return kMovInstructionsConstantPool; | 1558 return kMovInstructionsConstantPool; |
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1651 #endif | 1665 #endif |
1652 u16 = (value & 0xffff); | 1666 u16 = (value & 0xffff); |
1653 if (u16) { | 1667 if (u16) { |
1654 ori(dst, dst, Operand(u16)); | 1668 ori(dst, dst, Operand(u16)); |
1655 } | 1669 } |
1656 } | 1670 } |
1657 return; | 1671 return; |
1658 } | 1672 } |
1659 | 1673 |
1660 DCHECK(!canOptimize); | 1674 DCHECK(!canOptimize); |
| 1675 bitwise_mov(dst, value); |
| 1676 } |
1661 | 1677 |
1662 { | 1678 |
| 1679 void Assembler::bitwise_mov(Register dst, intptr_t value) { |
1663 BlockTrampolinePoolScope block_trampoline_pool(this); | 1680 BlockTrampolinePoolScope block_trampoline_pool(this); |
1664 #if V8_TARGET_ARCH_PPC64 | 1681 #if V8_TARGET_ARCH_PPC64 |
1665 int32_t hi_32 = static_cast<int32_t>(value >> 32); | 1682 int32_t hi_32 = static_cast<int32_t>(value >> 32); |
1666 int32_t lo_32 = static_cast<int32_t>(value); | 1683 int32_t lo_32 = static_cast<int32_t>(value); |
1667 int hi_word = static_cast<int>(hi_32 >> 16); | 1684 int hi_word = static_cast<int>(hi_32 >> 16); |
1668 int lo_word = static_cast<int>(hi_32 & 0xffff); | 1685 int lo_word = static_cast<int>(hi_32 & 0xffff); |
1669 lis(dst, Operand(SIGN_EXT_IMM16(hi_word))); | 1686 lis(dst, Operand(SIGN_EXT_IMM16(hi_word))); |
1670 ori(dst, dst, Operand(lo_word)); | 1687 ori(dst, dst, Operand(lo_word)); |
1671 sldi(dst, dst, Operand(32)); | 1688 sldi(dst, dst, Operand(32)); |
1672 hi_word = static_cast<int>(((lo_32 >> 16) & 0xffff)); | 1689 hi_word = static_cast<int>(((lo_32 >> 16) & 0xffff)); |
1673 lo_word = static_cast<int>(lo_32 & 0xffff); | 1690 lo_word = static_cast<int>(lo_32 & 0xffff); |
1674 oris(dst, dst, Operand(hi_word)); | 1691 oris(dst, dst, Operand(hi_word)); |
1675 ori(dst, dst, Operand(lo_word)); | 1692 ori(dst, dst, Operand(lo_word)); |
1676 #else | 1693 #else |
1677 int hi_word = static_cast<int>(value >> 16); | 1694 int hi_word = static_cast<int>(value >> 16); |
1678 int lo_word = static_cast<int>(value & 0xffff); | 1695 int lo_word = static_cast<int>(value & 0xffff); |
1679 lis(dst, Operand(SIGN_EXT_IMM16(hi_word))); | 1696 lis(dst, Operand(SIGN_EXT_IMM16(hi_word))); |
1680 ori(dst, dst, Operand(lo_word)); | 1697 ori(dst, dst, Operand(lo_word)); |
1681 #endif | 1698 #endif |
1682 } | 1699 } |
| 1700 |
| 1701 |
| 1702 void Assembler::bitwise_mov32(Register dst, int32_t value) { |
| 1703 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1704 int hi_word = static_cast<int>(value >> 16); |
| 1705 int lo_word = static_cast<int>(value & 0xffff); |
| 1706 lis(dst, Operand(SIGN_EXT_IMM16(hi_word))); |
| 1707 ori(dst, dst, Operand(lo_word)); |
1683 } | 1708 } |
1684 | 1709 |
1685 | 1710 |
1686 void Assembler::mov_label_offset(Register dst, Label* label) { | 1711 void Assembler::mov_label_offset(Register dst, Label* label) { |
| 1712 int position = link(label); |
1687 if (label->is_bound()) { | 1713 if (label->is_bound()) { |
1688 int target = label->pos(); | 1714 // Load the position of the label relative to the generated code object. |
1689 mov(dst, Operand(target + Code::kHeaderSize - kHeapObjectTag)); | 1715 mov(dst, Operand(position + Code::kHeaderSize - kHeapObjectTag)); |
1690 } else { | 1716 } else { |
1691 bool is_linked = label->is_linked(); | 1717 // Encode internal reference to unbound label. We use a dummy opcode |
1692 // Emit the link to the label in the code stream followed by extra | 1718 // such that it won't collide with any opcode that might appear in the |
1693 // nop instructions. | 1719 // label's chain. Encode the destination register in the 2nd instruction. |
1694 DCHECK(dst.is(r3)); // target_at_put assumes r3 for now | 1720 int link = position - pc_offset(); |
1695 int link = is_linked ? label->pos() - pc_offset() : 0; | 1721 DCHECK_EQ(0, link & 3); |
1696 label->link_to(pc_offset()); | 1722 link >>= 2; |
1697 | 1723 DCHECK(is_int26(link)); |
1698 if (!is_linked && !trampoline_emitted_) { | |
1699 unbound_labels_count_++; | |
1700 next_buffer_check_ -= kTrampolineSlotsSize; | |
1701 } | |
1702 | 1724 |
1703 // When the label is bound, these instructions will be patched | 1725 // When the label is bound, these instructions will be patched |
1704 // with a 2 instruction mov sequence that will load the | 1726 // with a 2 instruction mov sequence that will load the |
1705 // destination register with the position of the label from the | 1727 // destination register with the position of the label from the |
1706 // beginning of the code. | 1728 // beginning of the code. |
1707 // | 1729 // |
1708 // When the label gets bound: target_at extracts the link and | 1730 // target_at extracts the link and target_at_put patches the instructions. |
1709 // target_at_put patches the instructions. | |
1710 BlockTrampolinePoolScope block_trampoline_pool(this); | 1731 BlockTrampolinePoolScope block_trampoline_pool(this); |
1711 emit(link); | 1732 emit(kUnboundMovLabelOffsetOpcode | (link & kImm26Mask)); |
1712 nop(); | 1733 emit(dst.code()); |
1713 } | 1734 } |
1714 } | 1735 } |
1715 | 1736 |
| 1737 |
| 1738 // TODO(mbrandy): allow loading internal reference from constant pool |
| 1739 void Assembler::mov_label_addr(Register dst, Label* label) { |
| 1740 CheckBuffer(); |
| 1741 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
| 1742 int position = link(label); |
| 1743 if (label->is_bound()) { |
| 1744 // CheckBuffer() is called too frequently. This will pre-grow |
| 1745 // the buffer if needed to avoid spliting the relocation and instructions |
| 1746 #if V8_OOL_CONSTANT_POOL |
| 1747 EnsureSpaceFor(kMovInstructionsNoConstantPool * kInstrSize); |
| 1748 #endif |
| 1749 |
| 1750 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + position); |
| 1751 AddBoundInternalReferenceLoad(pc_offset()); |
| 1752 bitwise_mov(dst, addr); |
| 1753 } else { |
| 1754 // Encode internal reference to unbound label. We use a dummy opcode |
| 1755 // such that it won't collide with any opcode that might appear in the |
| 1756 // label's chain. Encode the destination register in the 2nd instruction. |
| 1757 int link = position - pc_offset(); |
| 1758 DCHECK_EQ(0, link & 3); |
| 1759 link >>= 2; |
| 1760 DCHECK(is_int26(link)); |
| 1761 |
| 1762 // When the label is bound, these instructions will be patched |
| 1763 // with a multi-instruction mov sequence that will load the |
| 1764 // destination register with the address of the label. |
| 1765 // |
| 1766 // target_at extracts the link and target_at_put patches the instructions. |
| 1767 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1768 emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask)); |
| 1769 emit(dst.code()); |
| 1770 DCHECK(kMovInstructionsNoConstantPool >= 2); |
| 1771 for (int i = 0; i < kMovInstructionsNoConstantPool - 2; i++) nop(); |
| 1772 } |
| 1773 } |
| 1774 |
| 1775 |
| 1776 void Assembler::emit_label_addr(Label* label) { |
| 1777 CheckBuffer(); |
| 1778 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); |
| 1779 int position = link(label); |
| 1780 if (label->is_bound()) { |
| 1781 // CheckBuffer() is called too frequently. This will pre-grow |
| 1782 // the buffer if needed to avoid spliting the relocation and entry. |
| 1783 #if V8_OOL_CONSTANT_POOL |
| 1784 EnsureSpaceFor(kPointerSize); |
| 1785 #endif |
| 1786 |
| 1787 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + position); |
| 1788 AddBoundInternalReference(pc_offset()); |
| 1789 emit_ptr(addr); |
| 1790 } else { |
| 1791 // Encode internal reference to unbound label. We use a dummy opcode |
| 1792 // such that it won't collide with any opcode that might appear in the |
| 1793 // label's chain. |
| 1794 int link = position - pc_offset(); |
| 1795 DCHECK_EQ(0, link & 3); |
| 1796 link >>= 2; |
| 1797 DCHECK(is_int26(link)); |
| 1798 |
| 1799 // When the label is bound, the instruction(s) will be patched |
| 1800 // as a jump table entry containing the label address. target_at extracts |
| 1801 // the link and target_at_put patches the instruction(s). |
| 1802 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1803 emit(kUnboundJumpTableEntryOpcode | (link & kImm26Mask)); |
| 1804 #if V8_TARGET_ARCH_PPC64 |
| 1805 nop(); |
| 1806 #endif |
| 1807 } |
| 1808 } |
| 1809 |
1716 | 1810 |
1717 // Special register instructions | 1811 // Special register instructions |
1718 void Assembler::crxor(int bt, int ba, int bb) { | 1812 void Assembler::crxor(int bt, int ba, int bb) { |
1719 emit(EXT1 | CRXOR | bt * B21 | ba * B16 | bb * B11); | 1813 emit(EXT1 | CRXOR | bt * B21 | ba * B16 | bb * B11); |
1720 } | 1814 } |
1721 | 1815 |
1722 | 1816 |
1723 void Assembler::creqv(int bt, int ba, int bb) { | 1817 void Assembler::creqv(int bt, int ba, int bb) { |
1724 emit(EXT1 | CREQV | bt * B21 | ba * B16 | bb * B11); | 1818 emit(EXT1 | CREQV | bt * B21 | ba * B16 | bb * B11); |
1725 } | 1819 } |
(...skipping 476 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2202 desc.reloc_size); | 2296 desc.reloc_size); |
2203 | 2297 |
2204 // Switch buffers. | 2298 // Switch buffers. |
2205 DeleteArray(buffer_); | 2299 DeleteArray(buffer_); |
2206 buffer_ = desc.buffer; | 2300 buffer_ = desc.buffer; |
2207 buffer_size_ = desc.buffer_size; | 2301 buffer_size_ = desc.buffer_size; |
2208 pc_ += pc_delta; | 2302 pc_ += pc_delta; |
2209 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, | 2303 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, |
2210 reloc_info_writer.last_pc() + pc_delta); | 2304 reloc_info_writer.last_pc() + pc_delta); |
2211 | 2305 |
2212 // None of our relocation types are pc relative pointing outside the code | 2306 // Relocate internal references |
2213 // buffer nor pc absolute pointing inside the code buffer, so there is no need | 2307 for (int pos : internal_reference_positions_) { |
2214 // to relocate any emitted relocation entries. | 2308 RelocateInternalReference(buffer_ + pos, pc_delta, 0, |
2215 | 2309 RelocInfo::INTERNAL_REFERENCE); |
2216 #if ABI_USES_FUNCTION_DESCRIPTORS || V8_OOL_CONSTANT_POOL | 2310 } |
2217 // Relocate runtime entries. | 2311 for (int pos : internal_reference_load_positions_) { |
2218 for (RelocIterator it(desc); !it.done(); it.next()) { | 2312 RelocateInternalReference(buffer_ + pos, pc_delta, 0, |
2219 RelocInfo::Mode rmode = it.rinfo()->rmode(); | 2313 RelocInfo::INTERNAL_REFERENCE_ENCODED); |
2220 if (rmode == RelocInfo::INTERNAL_REFERENCE) { | |
2221 RelocateInternalReference(it.rinfo()->pc(), pc_delta, 0); | |
2222 } | |
2223 } | 2314 } |
2224 #if V8_OOL_CONSTANT_POOL | 2315 #if V8_OOL_CONSTANT_POOL |
2225 constant_pool_builder_.Relocate(pc_delta); | 2316 constant_pool_builder_.Relocate(pc_delta); |
2226 #endif | 2317 #endif |
2227 #endif | |
2228 } | 2318 } |
2229 | 2319 |
2230 | 2320 |
2231 void Assembler::db(uint8_t data) { | 2321 void Assembler::db(uint8_t data) { |
2232 CheckBuffer(); | 2322 CheckBuffer(); |
2233 *reinterpret_cast<uint8_t*>(pc_) = data; | 2323 *reinterpret_cast<uint8_t*>(pc_) = data; |
2234 pc_ += sizeof(uint8_t); | 2324 pc_ += sizeof(uint8_t); |
2235 } | 2325 } |
2236 | 2326 |
2237 | 2327 |
2238 void Assembler::dd(uint32_t data) { | 2328 void Assembler::dd(uint32_t data) { |
2239 CheckBuffer(); | 2329 CheckBuffer(); |
2240 *reinterpret_cast<uint32_t*>(pc_) = data; | 2330 *reinterpret_cast<uint32_t*>(pc_) = data; |
2241 pc_ += sizeof(uint32_t); | 2331 pc_ += sizeof(uint32_t); |
2242 } | 2332 } |
2243 | 2333 |
2244 | 2334 |
2245 void Assembler::emit_ptr(uintptr_t data) { | 2335 void Assembler::emit_ptr(intptr_t data) { |
2246 CheckBuffer(); | 2336 CheckBuffer(); |
2247 *reinterpret_cast<uintptr_t*>(pc_) = data; | 2337 *reinterpret_cast<uintptr_t*>(pc_) = data; |
2248 pc_ += sizeof(uintptr_t); | 2338 pc_ += sizeof(uintptr_t); |
2249 } | 2339 } |
2250 | 2340 |
2251 | 2341 |
2252 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 2342 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
2253 RelocInfo rinfo(pc_, rmode, data, NULL); | 2343 RelocInfo rinfo(pc_, rmode, data, NULL); |
2254 RecordRelocInfo(rinfo); | 2344 RecordRelocInfo(rinfo); |
2255 } | 2345 } |
(...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2523 | 2613 |
2524 // Patch load instruction with correct offset. | 2614 // Patch load instruction with correct offset. |
2525 Assembler::SetConstantPoolOffset(rinfo.pc(), offset); | 2615 Assembler::SetConstantPoolOffset(rinfo.pc(), offset); |
2526 } | 2616 } |
2527 } | 2617 } |
2528 #endif | 2618 #endif |
2529 } | 2619 } |
2530 } // namespace v8::internal | 2620 } // namespace v8::internal |
2531 | 2621 |
2532 #endif // V8_TARGET_ARCH_PPC | 2622 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |