Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(48)

Side by Side Diff: src/a64/assembler-a64.cc

Issue 181873002: A64: Move veneer emission checking in the Assembler. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Remove the force_emit parameter in CheckVeneerPool Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/assembler-a64.h ('k') | src/a64/assembler-a64-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // 2 //
3 // Redistribution and use in source and binary forms, with or without 3 // Redistribution and use in source and binary forms, with or without
4 // modification, are permitted provided that the following conditions are 4 // modification, are permitted provided that the following conditions are
5 // met: 5 // met:
6 // 6 //
7 // * Redistributions of source code must retain the above copyright 7 // * Redistributions of source code must retain the above copyright
8 // notice, this list of conditions and the following disclaimer. 8 // notice, this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above 9 // * Redistributions in binary form must reproduce the above
10 // copyright notice, this list of conditions and the following 10 // copyright notice, this list of conditions and the following
(...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after
279 279
280 280
281 // Assembler 281 // Assembler
282 282
283 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 283 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
284 : AssemblerBase(isolate, buffer, buffer_size), 284 : AssemblerBase(isolate, buffer, buffer_size),
285 recorded_ast_id_(TypeFeedbackId::None()), 285 recorded_ast_id_(TypeFeedbackId::None()),
286 unresolved_branches_(), 286 unresolved_branches_(),
287 positions_recorder_(this) { 287 positions_recorder_(this) {
288 const_pool_blocked_nesting_ = 0; 288 const_pool_blocked_nesting_ = 0;
289 veneer_pool_blocked_nesting_ = 0;
289 Reset(); 290 Reset();
290 } 291 }
291 292
292 293
293 Assembler::~Assembler() { 294 Assembler::~Assembler() {
294 ASSERT(num_pending_reloc_info_ == 0); 295 ASSERT(num_pending_reloc_info_ == 0);
295 ASSERT(const_pool_blocked_nesting_ == 0); 296 ASSERT(const_pool_blocked_nesting_ == 0);
297 ASSERT(veneer_pool_blocked_nesting_ == 0);
296 } 298 }
297 299
298 300
299 void Assembler::Reset() { 301 void Assembler::Reset() {
300 #ifdef DEBUG 302 #ifdef DEBUG
301 ASSERT((pc_ >= buffer_) && (pc_ < buffer_ + buffer_size_)); 303 ASSERT((pc_ >= buffer_) && (pc_ < buffer_ + buffer_size_));
302 ASSERT(const_pool_blocked_nesting_ == 0); 304 ASSERT(const_pool_blocked_nesting_ == 0);
305 ASSERT(veneer_pool_blocked_nesting_ == 0);
306 ASSERT(unresolved_branches_.empty());
303 memset(buffer_, 0, pc_ - buffer_); 307 memset(buffer_, 0, pc_ - buffer_);
304 #endif 308 #endif
305 pc_ = buffer_; 309 pc_ = buffer_;
306 reloc_info_writer.Reposition(reinterpret_cast<byte*>(buffer_ + buffer_size_), 310 reloc_info_writer.Reposition(reinterpret_cast<byte*>(buffer_ + buffer_size_),
307 reinterpret_cast<byte*>(pc_)); 311 reinterpret_cast<byte*>(pc_));
308 num_pending_reloc_info_ = 0; 312 num_pending_reloc_info_ = 0;
309 next_buffer_check_ = 0; 313 next_constant_pool_check_ = 0;
314 next_veneer_pool_check_ = kMaxInt;
310 no_const_pool_before_ = 0; 315 no_const_pool_before_ = 0;
311 first_const_pool_use_ = -1; 316 first_const_pool_use_ = -1;
312 ClearRecordedAstId(); 317 ClearRecordedAstId();
313 } 318 }
314 319
315 320
316 void Assembler::GetCode(CodeDesc* desc) { 321 void Assembler::GetCode(CodeDesc* desc) {
317 // Emit constant pool if necessary. 322 // Emit constant pool if necessary.
318 CheckConstPool(true, false); 323 CheckConstPool(true, false);
319 ASSERT(num_pending_reloc_info_ == 0); 324 ASSERT(num_pending_reloc_info_ == 0);
(...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after
527 } 532 }
528 // The instruction at pc is now the last link in the label's chain. 533 // The instruction at pc is now the last link in the label's chain.
529 label->link_to(pc_offset()); 534 label->link_to(pc_offset());
530 } 535 }
531 536
532 return offset; 537 return offset;
533 } 538 }
534 539
535 540
536 void Assembler::DeleteUnresolvedBranchInfoForLabel(Label* label) { 541 void Assembler::DeleteUnresolvedBranchInfoForLabel(Label* label) {
542 if (unresolved_branches_.empty()) {
543 ASSERT(next_veneer_pool_check_ == kMaxInt);
544 return;
545 }
546
537 // Branches to this label will be resolved when the label is bound below. 547 // Branches to this label will be resolved when the label is bound below.
538 std::multimap<int, FarBranchInfo>::iterator it_tmp, it; 548 std::multimap<int, FarBranchInfo>::iterator it_tmp, it;
539 it = unresolved_branches_.begin(); 549 it = unresolved_branches_.begin();
540 while (it != unresolved_branches_.end()) { 550 while (it != unresolved_branches_.end()) {
541 it_tmp = it++; 551 it_tmp = it++;
542 if (it_tmp->second.label_ == label) { 552 if (it_tmp->second.label_ == label) {
543 CHECK(it_tmp->first >= pc_offset()); 553 CHECK(it_tmp->first >= pc_offset());
544 unresolved_branches_.erase(it_tmp); 554 unresolved_branches_.erase(it_tmp);
545 } 555 }
546 } 556 }
557 if (unresolved_branches_.empty()) {
558 next_veneer_pool_check_ = kMaxInt;
559 } else {
560 next_veneer_pool_check_ =
561 unresolved_branches_first_limit() - kVeneerDistanceCheckMargin;
562 }
547 } 563 }
548 564
549 565
550 void Assembler::StartBlockConstPool() { 566 void Assembler::StartBlockConstPool() {
551 if (const_pool_blocked_nesting_++ == 0) { 567 if (const_pool_blocked_nesting_++ == 0) {
552 // Prevent constant pool checks happening by setting the next check to 568 // Prevent constant pool checks happening by setting the next check to
553 // the biggest possible offset. 569 // the biggest possible offset.
554 next_buffer_check_ = kMaxInt; 570 next_constant_pool_check_ = kMaxInt;
555 } 571 }
556 } 572 }
557 573
558 574
559 void Assembler::EndBlockConstPool() { 575 void Assembler::EndBlockConstPool() {
560 if (--const_pool_blocked_nesting_ == 0) { 576 if (--const_pool_blocked_nesting_ == 0) {
561 // Check the constant pool hasn't been blocked for too long. 577 // Check the constant pool hasn't been blocked for too long.
562 ASSERT((num_pending_reloc_info_ == 0) || 578 ASSERT((num_pending_reloc_info_ == 0) ||
563 (pc_offset() < (first_const_pool_use_ + kMaxDistToPool))); 579 (pc_offset() < (first_const_pool_use_ + kMaxDistToConstPool)));
564 // Two cases: 580 // Two cases:
565 // * no_const_pool_before_ >= next_buffer_check_ and the emission is 581 // * no_const_pool_before_ >= next_constant_pool_check_ and the emission is
566 // still blocked 582 // still blocked
567 // * no_const_pool_before_ < next_buffer_check_ and the next emit will 583 // * no_const_pool_before_ < next_constant_pool_check_ and the next emit
568 // trigger a check. 584 // will trigger a check.
569 next_buffer_check_ = no_const_pool_before_; 585 next_constant_pool_check_ = no_const_pool_before_;
570 } 586 }
571 } 587 }
572 588
573 589
574 bool Assembler::is_const_pool_blocked() const { 590 bool Assembler::is_const_pool_blocked() const {
575 return (const_pool_blocked_nesting_ > 0) || 591 return (const_pool_blocked_nesting_ > 0) ||
576 (pc_offset() < no_const_pool_before_); 592 (pc_offset() < no_const_pool_before_);
577 } 593 }
578 594
579 595
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
615 Instruction* instr = reinterpret_cast<Instruction*>(pc_); 631 Instruction* instr = reinterpret_cast<Instruction*>(pc_);
616 ASSERT(instr->preceding()->IsLdrLiteralX() && 632 ASSERT(instr->preceding()->IsLdrLiteralX() &&
617 instr->preceding()->Rt() == xzr.code()); 633 instr->preceding()->Rt() == xzr.code());
618 #endif 634 #endif
619 635
620 // We must generate only one instruction. 636 // We must generate only one instruction.
621 Emit(BLR | Rn(xzr)); 637 Emit(BLR | Rn(xzr));
622 } 638 }
623 639
624 640
641 void Assembler::StartBlockVeneerPool() {
642 ++veneer_pool_blocked_nesting_;
643 }
644
645
646 void Assembler::EndBlockVeneerPool() {
647 if (--veneer_pool_blocked_nesting_ == 0) {
648 // Check the veneer pool hasn't been blocked for too long.
649 ASSERT(unresolved_branches_.empty() ||
650 (pc_offset() < unresolved_branches_first_limit()));
651 }
652 }
653
654
625 void Assembler::br(const Register& xn) { 655 void Assembler::br(const Register& xn) {
626 positions_recorder()->WriteRecordedPositions(); 656 positions_recorder()->WriteRecordedPositions();
627 ASSERT(xn.Is64Bits()); 657 ASSERT(xn.Is64Bits());
628 Emit(BR | Rn(xn)); 658 Emit(BR | Rn(xn));
629 } 659 }
630 660
631 661
632 void Assembler::blr(const Register& xn) { 662 void Assembler::blr(const Register& xn) {
633 positions_recorder()->WriteRecordedPositions(); 663 positions_recorder()->WriteRecordedPositions();
634 ASSERT(xn.Is64Bits()); 664 ASSERT(xn.Is64Bits());
(...skipping 1228 matching lines...) Expand 10 before | Expand all | Expand 10 after
1863 1893
1864 void Assembler::debug(const char* message, uint32_t code, Instr params) { 1894 void Assembler::debug(const char* message, uint32_t code, Instr params) {
1865 #ifdef USE_SIMULATOR 1895 #ifdef USE_SIMULATOR
1866 // Don't generate simulator specific code if we are building a snapshot, which 1896 // Don't generate simulator specific code if we are building a snapshot, which
1867 // might be run on real hardware. 1897 // might be run on real hardware.
1868 if (!Serializer::enabled()) { 1898 if (!Serializer::enabled()) {
1869 #ifdef DEBUG 1899 #ifdef DEBUG
1870 Serializer::TooLateToEnableNow(); 1900 Serializer::TooLateToEnableNow();
1871 #endif 1901 #endif
1872 // The arguments to the debug marker need to be contiguous in memory, so 1902 // The arguments to the debug marker need to be contiguous in memory, so
1873 // make sure we don't try to emit a literal pool. 1903 // make sure we don't try to emit pools.
1874 BlockConstPoolScope scope(this); 1904 BlockPoolsScope scope(this);
1875 1905
1876 Label start; 1906 Label start;
1877 bind(&start); 1907 bind(&start);
1878 1908
1879 // Refer to instructions-a64.h for a description of the marker and its 1909 // Refer to instructions-a64.h for a description of the marker and its
1880 // arguments. 1910 // arguments.
1881 hlt(kImmExceptionIsDebug); 1911 hlt(kImmExceptionIsDebug);
1882 ASSERT(SizeOfCodeGeneratedSince(&start) == kDebugCodeOffset); 1912 ASSERT(SizeOfCodeGeneratedSince(&start) == kDebugCodeOffset);
1883 dc32(code); 1913 dc32(code);
1884 ASSERT(SizeOfCodeGeneratedSince(&start) == kDebugParamsOffset); 1914 ASSERT(SizeOfCodeGeneratedSince(&start) == kDebugParamsOffset);
(...skipping 553 matching lines...) Expand 10 before | Expand all | Expand 10 after
2438 reloc_info_writer.Write(&rinfo); 2468 reloc_info_writer.Write(&rinfo);
2439 } 2469 }
2440 } 2470 }
2441 } 2471 }
2442 2472
2443 2473
2444 void Assembler::BlockConstPoolFor(int instructions) { 2474 void Assembler::BlockConstPoolFor(int instructions) {
2445 int pc_limit = pc_offset() + instructions * kInstructionSize; 2475 int pc_limit = pc_offset() + instructions * kInstructionSize;
2446 if (no_const_pool_before_ < pc_limit) { 2476 if (no_const_pool_before_ < pc_limit) {
2447 // If there are some pending entries, the constant pool cannot be blocked 2477 // If there are some pending entries, the constant pool cannot be blocked
2448 // further than first_const_pool_use_ + kMaxDistToPool 2478 // further than first_const_pool_use_ + kMaxDistToConstPool
2449 ASSERT((num_pending_reloc_info_ == 0) || 2479 ASSERT((num_pending_reloc_info_ == 0) ||
2450 (pc_limit < (first_const_pool_use_ + kMaxDistToPool))); 2480 (pc_limit < (first_const_pool_use_ + kMaxDistToConstPool)));
2451 no_const_pool_before_ = pc_limit; 2481 no_const_pool_before_ = pc_limit;
2452 } 2482 }
2453 2483
2454 if (next_buffer_check_ < no_const_pool_before_) { 2484 if (next_constant_pool_check_ < no_const_pool_before_) {
2455 next_buffer_check_ = no_const_pool_before_; 2485 next_constant_pool_check_ = no_const_pool_before_;
2456 } 2486 }
2457 } 2487 }
2458 2488
2459 2489
2460 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { 2490 void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
2461 // Some short sequence of instruction mustn't be broken up by constant pool 2491 // Some short sequence of instruction mustn't be broken up by constant pool
2462 // emission, such sequences are protected by calls to BlockConstPoolFor and 2492 // emission, such sequences are protected by calls to BlockConstPoolFor and
2463 // BlockConstPoolScope. 2493 // BlockConstPoolScope.
2464 if (is_const_pool_blocked()) { 2494 if (is_const_pool_blocked()) {
2465 // Something is wrong if emission is forced and blocked at the same time. 2495 // Something is wrong if emission is forced and blocked at the same time.
2466 ASSERT(!force_emit); 2496 ASSERT(!force_emit);
2467 return; 2497 return;
2468 } 2498 }
2469 2499
2470 // There is nothing to do if there are no pending constant pool entries. 2500 // There is nothing to do if there are no pending constant pool entries.
2471 if (num_pending_reloc_info_ == 0) { 2501 if (num_pending_reloc_info_ == 0) {
2472 // Calculate the offset of the next check. 2502 // Calculate the offset of the next check.
2473 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2503 next_constant_pool_check_ = pc_offset() + kCheckConstPoolInterval;
2474 return; 2504 return;
2475 } 2505 }
2476 2506
2477 // We emit a constant pool when: 2507 // We emit a constant pool when:
2478 // * requested to do so by parameter force_emit (e.g. after each function). 2508 // * requested to do so by parameter force_emit (e.g. after each function).
2479 // * the distance to the first instruction accessing the constant pool is 2509 // * the distance to the first instruction accessing the constant pool is
2480 // kAvgDistToPool or more. 2510 // kAvgDistToConstPool or more.
2481 // * no jump is required and the distance to the first instruction accessing 2511 // * no jump is required and the distance to the first instruction accessing
2482 // the constant pool is at least kMaxDistToPool / 2. 2512 // the constant pool is at least kMaxDistToPConstool / 2.
2483 ASSERT(first_const_pool_use_ >= 0); 2513 ASSERT(first_const_pool_use_ >= 0);
2484 int dist = pc_offset() - first_const_pool_use_; 2514 int dist = pc_offset() - first_const_pool_use_;
2485 if (!force_emit && dist < kAvgDistToPool && 2515 if (!force_emit && dist < kAvgDistToConstPool &&
2486 (require_jump || (dist < (kMaxDistToPool / 2)))) { 2516 (require_jump || (dist < (kMaxDistToConstPool / 2)))) {
2487 return; 2517 return;
2488 } 2518 }
2489 2519
2520 int jump_instr = require_jump ? kInstructionSize : 0;
2521 int size_pool_marker = kInstructionSize;
2522 int size_pool_guard = kInstructionSize;
2523 int pool_size = jump_instr + size_pool_marker + size_pool_guard +
2524 num_pending_reloc_info_ * kPointerSize;
2525 int needed_space = pool_size + kGap;
2526
2527 // Emit veneers for branches that would go out of range during emission of the
2528 // constant pool.
2529 CheckVeneerPool(require_jump, kVeneerDistanceMargin - pool_size);
2530
2490 Label size_check; 2531 Label size_check;
2491 bind(&size_check); 2532 bind(&size_check);
2492 2533
2493 // Check that the code buffer is large enough before emitting the constant 2534 // Check that the code buffer is large enough before emitting the constant
2494 // pool (include the jump over the pool, the constant pool marker, the 2535 // pool (include the jump over the pool, the constant pool marker, the
2495 // constant pool guard, and the gap to the relocation information). 2536 // constant pool guard, and the gap to the relocation information).
2496 int jump_instr = require_jump ? kInstructionSize : 0;
2497 int size_pool_marker = kInstructionSize;
2498 int size_pool_guard = kInstructionSize;
2499 int pool_size = jump_instr + size_pool_marker + size_pool_guard +
2500 num_pending_reloc_info_ * kPointerSize;
2501 int needed_space = pool_size + kGap;
2502 while (buffer_space() <= needed_space) { 2537 while (buffer_space() <= needed_space) {
2503 GrowBuffer(); 2538 GrowBuffer();
2504 } 2539 }
2505 2540
2506 { 2541 {
2507 // Block recursive calls to CheckConstPool. 2542 // Block recursive calls to CheckConstPool and protect from veneer pools.
2508 BlockConstPoolScope block_const_pool(this); 2543 BlockPoolsScope block_pools(this);
2509 RecordComment("[ Constant Pool"); 2544 RecordComment("[ Constant Pool");
2510 RecordConstPool(pool_size); 2545 RecordConstPool(pool_size);
2511 2546
2512 // Emit jump over constant pool if necessary. 2547 // Emit jump over constant pool if necessary.
2513 Label after_pool; 2548 Label after_pool;
2514 if (require_jump) { 2549 if (require_jump) {
2515 b(&after_pool); 2550 b(&after_pool);
2516 } 2551 }
2517 2552
2518 // Emit a constant pool header. The header has two goals: 2553 // Emit a constant pool header. The header has two goals:
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
2551 2586
2552 RecordComment("]"); 2587 RecordComment("]");
2553 2588
2554 if (after_pool.is_linked()) { 2589 if (after_pool.is_linked()) {
2555 bind(&after_pool); 2590 bind(&after_pool);
2556 } 2591 }
2557 } 2592 }
2558 2593
2559 // Since a constant pool was just emitted, move the check offset forward by 2594 // Since a constant pool was just emitted, move the check offset forward by
2560 // the standard interval. 2595 // the standard interval.
2561 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2596 next_constant_pool_check_ = pc_offset() + kCheckConstPoolInterval;
2562 2597
2563 ASSERT(SizeOfCodeGeneratedSince(&size_check) == 2598 ASSERT(SizeOfCodeGeneratedSince(&size_check) ==
2564 static_cast<unsigned>(pool_size)); 2599 static_cast<unsigned>(pool_size));
2565 } 2600 }
2566 2601
2567 2602
2603 bool Assembler::ShouldEmitVeneer(int max_reachable_pc, int margin) {
2604 // Account for the branch around the veneers and the guard.
2605 int protection_offset = 2 * kInstructionSize;
2606 return pc_offset() > max_reachable_pc - margin - protection_offset -
2607 static_cast<int>(unresolved_branches_.size() * kMaxVeneerCodeSize);
2608 }
2609
2610
2611 void Assembler::EmitVeneers(bool need_protection, int margin) {
2612 BlockPoolsScope scope(this);
2613 RecordComment("[ Veneers");
2614
2615 Label end;
2616 if (need_protection) {
2617 b(&end);
2618 }
2619
2620 EmitVeneersGuard();
2621
2622 Label size_check;
2623
2624 std::multimap<int, FarBranchInfo>::iterator it, it_to_delete;
2625
2626 it = unresolved_branches_.begin();
2627 while (it != unresolved_branches_.end()) {
2628 if (ShouldEmitVeneer(it->first, margin)) {
2629 Instruction* branch = InstructionAt(it->second.pc_offset_);
2630 Label* label = it->second.label_;
2631
2632 #ifdef DEBUG
2633 bind(&size_check);
2634 #endif
2635 // Patch the branch to point to the current position, and emit a branch
2636 // to the label.
2637 Instruction* veneer = reinterpret_cast<Instruction*>(pc_);
2638 RemoveBranchFromLabelLinkChain(branch, label, veneer);
2639 branch->SetImmPCOffsetTarget(veneer);
2640 b(label);
2641 #ifdef DEBUG
2642 ASSERT(SizeOfCodeGeneratedSince(&size_check) <=
2643 static_cast<uint64_t>(kMaxVeneerCodeSize));
2644 size_check.Unuse();
2645 #endif
2646
2647 it_to_delete = it++;
2648 unresolved_branches_.erase(it_to_delete);
2649 } else {
2650 ++it;
2651 }
2652 }
2653
2654 bind(&end);
2655
2656 RecordComment("]");
2657
2658 if (unresolved_branches_.empty()) {
2659 next_veneer_pool_check_ = kMaxInt;
2660 } else {
2661 next_veneer_pool_check_ =
2662 unresolved_branches_first_limit() - kVeneerDistanceCheckMargin;
2663 }
2664 }
2665
2666
2667 void Assembler::EmitVeneersGuard() {
2668 if (emit_debug_code()) {
2669 Unreachable();
2670 }
2671 }
2672
2673
2674 void Assembler::CheckVeneerPool(bool require_jump,
2675 int margin) {
2676 // There is nothing to do if there are no pending veneer pool entries.
2677 if (unresolved_branches_.empty()) {
2678 ASSERT(next_veneer_pool_check_ == kMaxInt);
2679 return;
2680 }
2681
2682 ASSERT(pc_offset() < unresolved_branches_first_limit());
2683
2684 // Some short sequence of instruction mustn't be broken up by veneer pool
2685 // emission, such sequences are protected by calls to BlockVeneerPoolFor and
2686 // BlockVeneerPoolScope.
2687 if (is_veneer_pool_blocked()) {
2688 return;
2689 }
2690
2691 if (!require_jump) {
2692 // Prefer emitting veneers protected by an existing instruction.
2693 margin *= kVeneerNoProtectionFactor;
2694 }
2695 if (ShouldEmitVeneers(margin)) {
2696 EmitVeneers(require_jump, margin);
2697 } else {
2698 next_veneer_pool_check_ =
2699 unresolved_branches_first_limit() - kVeneerDistanceCheckMargin;
2700 }
2701 }
2702
2703
2568 void Assembler::RecordComment(const char* msg) { 2704 void Assembler::RecordComment(const char* msg) {
2569 if (FLAG_code_comments) { 2705 if (FLAG_code_comments) {
2570 CheckBuffer(); 2706 CheckBuffer();
2571 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg)); 2707 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
2572 } 2708 }
2573 } 2709 }
2574 2710
2575 2711
2576 int Assembler::buffer_space() const { 2712 int Assembler::buffer_space() const {
2577 return reloc_info_writer.pos() - reinterpret_cast<byte*>(pc_); 2713 return reloc_info_writer.pos() - reinterpret_cast<byte*>(pc_);
(...skipping 19 matching lines...) Expand all
2597 // code. 2733 // code.
2598 #ifdef ENABLE_DEBUGGER_SUPPORT 2734 #ifdef ENABLE_DEBUGGER_SUPPORT
2599 RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size)); 2735 RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size));
2600 #endif 2736 #endif
2601 } 2737 }
2602 2738
2603 2739
2604 } } // namespace v8::internal 2740 } } // namespace v8::internal
2605 2741
2606 #endif // V8_TARGET_ARCH_A64 2742 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/assembler-a64.h ('k') | src/a64/assembler-a64-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698