OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // | 2 // |
3 // Redistribution and use in source and binary forms, with or without | 3 // Redistribution and use in source and binary forms, with or without |
4 // modification, are permitted provided that the following conditions are | 4 // modification, are permitted provided that the following conditions are |
5 // met: | 5 // met: |
6 // | 6 // |
7 // * Redistributions of source code must retain the above copyright | 7 // * Redistributions of source code must retain the above copyright |
8 // notice, this list of conditions and the following disclaimer. | 8 // notice, this list of conditions and the following disclaimer. |
9 // * Redistributions in binary form must reproduce the above | 9 // * Redistributions in binary form must reproduce the above |
10 // copyright notice, this list of conditions and the following | 10 // copyright notice, this list of conditions and the following |
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
326 bool Operand::NeedsRelocation(const Assembler* assembler) const { | 326 bool Operand::NeedsRelocation(const Assembler* assembler) const { |
327 RelocInfo::Mode rmode = immediate_.rmode(); | 327 RelocInfo::Mode rmode = immediate_.rmode(); |
328 | 328 |
329 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { | 329 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { |
330 return assembler->serializer_enabled(); | 330 return assembler->serializer_enabled(); |
331 } | 331 } |
332 | 332 |
333 return !RelocInfo::IsNone(rmode); | 333 return !RelocInfo::IsNone(rmode); |
334 } | 334 } |
335 | 335 |
| 336 bool ConstPool::AddSharedEntry(SharedEntryMap& entry_map, uint64_t data, |
| 337 int offset) { |
| 338 auto existing = entry_map.find(data); |
| 339 if (existing == entry_map.end()) { |
| 340 entry_map[data] = static_cast<int>(entries_.size()); |
| 341 entries_.push_back(std::make_pair(data, std::vector<int>(1, offset))); |
| 342 return true; |
| 343 } |
| 344 int index = existing->second; |
| 345 entries_[index].second.push_back(offset); |
| 346 return false; |
| 347 } |
336 | 348 |
337 // Constant Pool. | 349 // Constant Pool. |
338 void ConstPool::RecordEntry(intptr_t data, | 350 bool ConstPool::RecordEntry(intptr_t data, RelocInfo::Mode mode) { |
339 RelocInfo::Mode mode) { | |
340 DCHECK(mode != RelocInfo::COMMENT && mode != RelocInfo::CONST_POOL && | 351 DCHECK(mode != RelocInfo::COMMENT && mode != RelocInfo::CONST_POOL && |
341 mode != RelocInfo::VENEER_POOL && | 352 mode != RelocInfo::VENEER_POOL && |
342 mode != RelocInfo::CODE_AGE_SEQUENCE && | 353 mode != RelocInfo::CODE_AGE_SEQUENCE && |
343 mode != RelocInfo::DEOPT_SCRIPT_OFFSET && | 354 mode != RelocInfo::DEOPT_SCRIPT_OFFSET && |
344 mode != RelocInfo::DEOPT_INLINING_ID && | 355 mode != RelocInfo::DEOPT_INLINING_ID && |
345 mode != RelocInfo::DEOPT_REASON && mode != RelocInfo::DEOPT_ID); | 356 mode != RelocInfo::DEOPT_REASON && mode != RelocInfo::DEOPT_ID); |
| 357 |
| 358 bool write_reloc_info = true; |
| 359 |
346 uint64_t raw_data = static_cast<uint64_t>(data); | 360 uint64_t raw_data = static_cast<uint64_t>(data); |
347 int offset = assm_->pc_offset(); | 361 int offset = assm_->pc_offset(); |
348 if (IsEmpty()) { | 362 if (IsEmpty()) { |
349 first_use_ = offset; | 363 first_use_ = offset; |
350 } | 364 } |
351 | 365 |
352 std::pair<uint64_t, int> entry = std::make_pair(raw_data, offset); | |
353 if (CanBeShared(mode)) { | 366 if (CanBeShared(mode)) { |
354 shared_entries_.insert(entry); | 367 write_reloc_info = AddSharedEntry(shared_entries_, raw_data, offset); |
355 if (shared_entries_.count(entry.first) == 1) { | 368 } else if (mode == RelocInfo::CODE_TARGET && |
356 shared_entries_count++; | 369 assm_->IsCodeTargetSharingAllowed()) { |
357 } | 370 write_reloc_info = AddSharedEntry(handle_to_index_map_, raw_data, offset); |
358 } else { | 371 } else { |
359 unique_entries_.push_back(entry); | 372 entries_.push_back(std::make_pair(raw_data, std::vector<int>(1, offset))); |
360 } | 373 } |
361 | 374 |
362 if (EntryCount() > Assembler::kApproxMaxPoolEntryCount) { | 375 if (EntryCount() > Assembler::kApproxMaxPoolEntryCount) { |
363 // Request constant pool emission after the next instruction. | 376 // Request constant pool emission after the next instruction. |
364 assm_->SetNextConstPoolCheckIn(1); | 377 assm_->SetNextConstPoolCheckIn(1); |
365 } | 378 } |
| 379 |
| 380 return write_reloc_info; |
366 } | 381 } |
367 | 382 |
368 | 383 |
369 int ConstPool::DistanceToFirstUse() { | 384 int ConstPool::DistanceToFirstUse() { |
370 DCHECK(first_use_ >= 0); | 385 DCHECK(first_use_ >= 0); |
371 return assm_->pc_offset() - first_use_; | 386 return assm_->pc_offset() - first_use_; |
372 } | 387 } |
373 | 388 |
374 | 389 |
375 int ConstPool::MaxPcOffset() { | 390 int ConstPool::MaxPcOffset() { |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
464 assm_->bind(&after_pool); | 479 assm_->bind(&after_pool); |
465 } | 480 } |
466 | 481 |
467 DCHECK(assm_->SizeOfCodeGeneratedSince(&size_check) == | 482 DCHECK(assm_->SizeOfCodeGeneratedSince(&size_check) == |
468 static_cast<unsigned>(size)); | 483 static_cast<unsigned>(size)); |
469 } | 484 } |
470 | 485 |
471 | 486 |
472 void ConstPool::Clear() { | 487 void ConstPool::Clear() { |
473 shared_entries_.clear(); | 488 shared_entries_.clear(); |
474 shared_entries_count = 0; | 489 handle_to_index_map_.clear(); |
475 unique_entries_.clear(); | 490 entries_.clear(); |
476 first_use_ = -1; | 491 first_use_ = -1; |
477 } | 492 } |
478 | 493 |
479 | 494 |
480 bool ConstPool::CanBeShared(RelocInfo::Mode mode) { | 495 bool ConstPool::CanBeShared(RelocInfo::Mode mode) { |
481 // Constant pool currently does not support 32-bit entries. | 496 // Constant pool currently does not support 32-bit entries. |
482 DCHECK(mode != RelocInfo::NONE32); | 497 DCHECK(mode != RelocInfo::NONE32); |
483 | 498 |
484 return RelocInfo::IsNone(mode) || | 499 return RelocInfo::IsNone(mode) || |
485 (!assm_->serializer_enabled() && | 500 (mode >= RelocInfo::FIRST_SHAREABLE_RELOC_MODE); |
486 (mode >= RelocInfo::FIRST_SHAREABLE_RELOC_MODE)); | |
487 } | 501 } |
488 | 502 |
489 | 503 |
490 void ConstPool::EmitMarker() { | 504 void ConstPool::EmitMarker() { |
491 // A constant pool size is expressed in number of 32-bits words. | 505 // A constant pool size is expressed in number of 32-bits words. |
492 // Currently all entries are 64-bit. | 506 // Currently all entries are 64-bit. |
493 // + 1 is for the crash guard. | 507 // + 1 is for the crash guard. |
494 // + 0/1 for alignment. | 508 // + 0/1 for alignment. |
495 int word_count = EntryCount() * 2 + 1 + | 509 int word_count = EntryCount() * 2 + 1 + |
496 (IsAligned(assm_->pc_offset(), 8) ? 0 : 1); | 510 (IsAligned(assm_->pc_offset(), 8) ? 0 : 1); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
534 DCHECK(instr->preceding()->IsLdrLiteralX() && | 548 DCHECK(instr->preceding()->IsLdrLiteralX() && |
535 instr->preceding()->Rt() == xzr.code()); | 549 instr->preceding()->Rt() == xzr.code()); |
536 #endif | 550 #endif |
537 assm_->EmitPoolGuard(); | 551 assm_->EmitPoolGuard(); |
538 } | 552 } |
539 | 553 |
540 | 554 |
541 void ConstPool::EmitEntries() { | 555 void ConstPool::EmitEntries() { |
542 DCHECK(IsAligned(assm_->pc_offset(), 8)); | 556 DCHECK(IsAligned(assm_->pc_offset(), 8)); |
543 | 557 |
544 typedef std::multimap<uint64_t, int>::const_iterator SharedEntriesIterator; | 558 // Emit entries. |
545 SharedEntriesIterator value_it; | 559 for (const auto& entry : entries_) { |
546 // Iterate through the keys (constant pool values). | 560 for (const auto& pc : entry.second) { |
547 for (value_it = shared_entries_.begin(); | 561 Instruction* instr = assm_->InstructionAt(pc); |
548 value_it != shared_entries_.end(); | |
549 value_it = shared_entries_.upper_bound(value_it->first)) { | |
550 std::pair<SharedEntriesIterator, SharedEntriesIterator> range; | |
551 uint64_t data = value_it->first; | |
552 range = shared_entries_.equal_range(data); | |
553 SharedEntriesIterator offset_it; | |
554 // Iterate through the offsets of a given key. | |
555 for (offset_it = range.first; offset_it != range.second; offset_it++) { | |
556 Instruction* instr = assm_->InstructionAt(offset_it->second); | |
557 | 562 |
558 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. | 563 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. |
559 DCHECK(instr->IsLdrLiteral() && instr->ImmLLiteral() == 0); | 564 DCHECK(instr->IsLdrLiteral() && instr->ImmLLiteral() == 0); |
560 instr->SetImmPCOffsetTarget(assm_->isolate_data(), assm_->pc()); | 565 instr->SetImmPCOffsetTarget(assm_->isolate_data(), assm_->pc()); |
561 } | 566 } |
562 assm_->dc64(data); | 567 |
| 568 assm_->dc64(entry.first); |
563 } | 569 } |
564 shared_entries_.clear(); | 570 Clear(); |
565 shared_entries_count = 0; | |
566 | |
567 // Emit unique entries. | |
568 std::vector<std::pair<uint64_t, int> >::const_iterator unique_it; | |
569 for (unique_it = unique_entries_.begin(); | |
570 unique_it != unique_entries_.end(); | |
571 unique_it++) { | |
572 Instruction* instr = assm_->InstructionAt(unique_it->second); | |
573 | |
574 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. | |
575 DCHECK(instr->IsLdrLiteral() && instr->ImmLLiteral() == 0); | |
576 instr->SetImmPCOffsetTarget(assm_->isolate_data(), assm_->pc()); | |
577 assm_->dc64(unique_it->first); | |
578 } | |
579 unique_entries_.clear(); | |
580 first_use_ = -1; | |
581 } | 571 } |
582 | 572 |
583 | 573 |
584 // Assembler | 574 // Assembler |
585 Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) | 575 Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) |
586 : AssemblerBase(isolate_data, buffer, buffer_size), | 576 : AssemblerBase(isolate_data, buffer, buffer_size), |
587 constpool_(this), | 577 constpool_(this), |
588 recorded_ast_id_(TypeFeedbackId::None()), | 578 recorded_ast_id_(TypeFeedbackId::None()), |
589 unresolved_branches_() { | 579 unresolved_branches_() { |
590 const_pool_blocked_nesting_ = 0; | 580 const_pool_blocked_nesting_ = 0; |
591 veneer_pool_blocked_nesting_ = 0; | 581 veneer_pool_blocked_nesting_ = 0; |
| 582 code_target_sharing_blocked_nesting_ = 0; |
592 Reset(); | 583 Reset(); |
593 } | 584 } |
594 | 585 |
595 | 586 |
596 Assembler::~Assembler() { | 587 Assembler::~Assembler() { |
597 DCHECK(constpool_.IsEmpty()); | 588 DCHECK(constpool_.IsEmpty()); |
598 DCHECK(const_pool_blocked_nesting_ == 0); | 589 DCHECK_EQ(const_pool_blocked_nesting_, 0); |
599 DCHECK(veneer_pool_blocked_nesting_ == 0); | 590 DCHECK_EQ(veneer_pool_blocked_nesting_, 0); |
| 591 DCHECK_EQ(code_target_sharing_blocked_nesting_, 0); |
600 } | 592 } |
601 | 593 |
602 | 594 |
603 void Assembler::Reset() { | 595 void Assembler::Reset() { |
604 #ifdef DEBUG | 596 #ifdef DEBUG |
605 DCHECK((pc_ >= buffer_) && (pc_ < buffer_ + buffer_size_)); | 597 DCHECK((pc_ >= buffer_) && (pc_ < buffer_ + buffer_size_)); |
606 DCHECK(const_pool_blocked_nesting_ == 0); | 598 DCHECK_EQ(const_pool_blocked_nesting_, 0); |
607 DCHECK(veneer_pool_blocked_nesting_ == 0); | 599 DCHECK_EQ(veneer_pool_blocked_nesting_, 0); |
| 600 DCHECK_EQ(code_target_sharing_blocked_nesting_, 0); |
608 DCHECK(unresolved_branches_.empty()); | 601 DCHECK(unresolved_branches_.empty()); |
609 memset(buffer_, 0, pc_ - buffer_); | 602 memset(buffer_, 0, pc_ - buffer_); |
610 #endif | 603 #endif |
611 pc_ = buffer_; | 604 pc_ = buffer_; |
612 reloc_info_writer.Reposition(reinterpret_cast<byte*>(buffer_ + buffer_size_), | 605 reloc_info_writer.Reposition(reinterpret_cast<byte*>(buffer_ + buffer_size_), |
613 reinterpret_cast<byte*>(pc_)); | 606 reinterpret_cast<byte*>(pc_)); |
614 constpool_.Clear(); | 607 constpool_.Clear(); |
615 next_constant_pool_check_ = 0; | 608 next_constant_pool_check_ = 0; |
616 next_veneer_pool_check_ = kMaxInt; | 609 next_veneer_pool_check_ = kMaxInt; |
617 no_const_pool_before_ = 0; | 610 no_const_pool_before_ = 0; |
(...skipping 4135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4753 *p += pc_delta; | 4746 *p += pc_delta; |
4754 } | 4747 } |
4755 | 4748 |
4756 // Pending relocation entries are also relative, no need to relocate. | 4749 // Pending relocation entries are also relative, no need to relocate. |
4757 } | 4750 } |
4758 | 4751 |
4759 | 4752 |
4760 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 4753 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
4761 // We do not try to reuse pool constants. | 4754 // We do not try to reuse pool constants. |
4762 RelocInfo rinfo(reinterpret_cast<byte*>(pc_), rmode, data, NULL); | 4755 RelocInfo rinfo(reinterpret_cast<byte*>(pc_), rmode, data, NULL); |
| 4756 bool write_reloc_info = true; |
| 4757 |
4763 if (((rmode >= RelocInfo::COMMENT) && | 4758 if (((rmode >= RelocInfo::COMMENT) && |
4764 (rmode <= RelocInfo::DEBUG_BREAK_SLOT_AT_TAIL_CALL)) || | 4759 (rmode <= RelocInfo::DEBUG_BREAK_SLOT_AT_TAIL_CALL)) || |
4765 (rmode == RelocInfo::INTERNAL_REFERENCE) || | 4760 (rmode == RelocInfo::INTERNAL_REFERENCE) || |
4766 (rmode == RelocInfo::CONST_POOL) || (rmode == RelocInfo::VENEER_POOL) || | 4761 (rmode == RelocInfo::CONST_POOL) || (rmode == RelocInfo::VENEER_POOL) || |
4767 (rmode == RelocInfo::DEOPT_SCRIPT_OFFSET) || | 4762 (rmode == RelocInfo::DEOPT_SCRIPT_OFFSET) || |
4768 (rmode == RelocInfo::DEOPT_INLINING_ID) || | 4763 (rmode == RelocInfo::DEOPT_INLINING_ID) || |
4769 (rmode == RelocInfo::DEOPT_REASON) || (rmode == RelocInfo::DEOPT_ID)) { | 4764 (rmode == RelocInfo::DEOPT_REASON) || (rmode == RelocInfo::DEOPT_ID)) { |
4770 // Adjust code for new modes. | 4765 // Adjust code for new modes. |
4771 DCHECK(RelocInfo::IsDebugBreakSlot(rmode) || RelocInfo::IsComment(rmode) || | 4766 DCHECK(RelocInfo::IsDebugBreakSlot(rmode) || RelocInfo::IsComment(rmode) || |
4772 RelocInfo::IsDeoptReason(rmode) || RelocInfo::IsDeoptId(rmode) || | 4767 RelocInfo::IsDeoptReason(rmode) || RelocInfo::IsDeoptId(rmode) || |
4773 RelocInfo::IsDeoptPosition(rmode) || | 4768 RelocInfo::IsDeoptPosition(rmode) || |
4774 RelocInfo::IsInternalReference(rmode) || | 4769 RelocInfo::IsInternalReference(rmode) || |
4775 RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)); | 4770 RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)); |
4776 // These modes do not need an entry in the constant pool. | 4771 // These modes do not need an entry in the constant pool. |
4777 } else { | 4772 } else { |
4778 constpool_.RecordEntry(data, rmode); | 4773 write_reloc_info = constpool_.RecordEntry(data, rmode); |
4779 // Make sure the constant pool is not emitted in place of the next | 4774 // Make sure the constant pool is not emitted in place of the next |
4780 // instruction for which we just recorded relocation info. | 4775 // instruction for which we just recorded relocation info. |
4781 BlockConstPoolFor(1); | 4776 BlockConstPoolFor(1); |
4782 } | 4777 } |
4783 | 4778 |
4784 if (!RelocInfo::IsNone(rmode)) { | 4779 if (!RelocInfo::IsNone(rmode) && write_reloc_info) { |
4785 // Don't record external references unless the heap will be serialized. | 4780 // Don't record external references unless the heap will be serialized. |
4786 if (rmode == RelocInfo::EXTERNAL_REFERENCE && | 4781 if (rmode == RelocInfo::EXTERNAL_REFERENCE && |
4787 !serializer_enabled() && !emit_debug_code()) { | 4782 !serializer_enabled() && !emit_debug_code()) { |
4788 return; | 4783 return; |
4789 } | 4784 } |
4790 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here | 4785 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here |
4791 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { | 4786 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { |
4792 RelocInfo reloc_info_with_ast_id(reinterpret_cast<byte*>(pc_), rmode, | 4787 RelocInfo reloc_info_with_ast_id(reinterpret_cast<byte*>(pc_), rmode, |
4793 RecordedAstId().ToInt(), NULL); | 4788 RecordedAstId().ToInt(), NULL); |
4794 ClearRecordedAstId(); | 4789 ClearRecordedAstId(); |
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5028 movk(scratch, (target_offset >> 32) & 0xFFFF, 32); | 5023 movk(scratch, (target_offset >> 32) & 0xFFFF, 32); |
5029 DCHECK((target_offset >> 48) == 0); | 5024 DCHECK((target_offset >> 48) == 0); |
5030 add(rd, rd, scratch); | 5025 add(rd, rd, scratch); |
5031 } | 5026 } |
5032 | 5027 |
5033 | 5028 |
5034 } // namespace internal | 5029 } // namespace internal |
5035 } // namespace v8 | 5030 } // namespace v8 |
5036 | 5031 |
5037 #endif // V8_TARGET_ARCH_ARM64 | 5032 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |