Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(58)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 11191029: Use VLDR instead of VMOVs from GPR when a 64-bit double can't be encoded as a VMOV immediate. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Update with ulan's comments. Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/constants-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after
266 // register r is not encoded. 266 // register r is not encoded.
267 const Instr kPushRegPattern = 267 const Instr kPushRegPattern =
268 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; 268 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16;
269 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) 269 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r))
270 // register r is not encoded. 270 // register r is not encoded.
271 const Instr kPopRegPattern = 271 const Instr kPopRegPattern =
272 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; 272 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16;
273 // mov lr, pc 273 // mov lr, pc
274 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12; 274 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12;
275 // ldr rd, [pc, #offset] 275 // ldr rd, [pc, #offset]
276 const Instr kLdrPCMask = kCondMask | 15 * B24 | 7 * B20 | 15 * B16; 276 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16;
277 const Instr kLdrPCPattern = al | 5 * B24 | L | kRegister_pc_Code * B16; 277 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16;
278 // vldr dd, [pc, #offset]
279 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
280 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8;
278 // blxcc rm 281 // blxcc rm
279 const Instr kBlxRegMask = 282 const Instr kBlxRegMask =
280 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; 283 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4;
281 const Instr kBlxRegPattern = 284 const Instr kBlxRegPattern =
282 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX; 285 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX;
283 const Instr kBlxIp = al | kBlxRegPattern | ip.code(); 286 const Instr kBlxIp = al | kBlxRegPattern | ip.code();
284 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16; 287 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16;
285 const Instr kMovMvnPattern = 0xd * B21; 288 const Instr kMovMvnPattern = 0xd * B21;
286 const Instr kMovMvnFlip = B22; 289 const Instr kMovMvnFlip = B22;
287 const Instr kMovLeaveCCMask = 0xdff * B16; 290 const Instr kMovLeaveCCMask = 0xdff * B16;
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
343 buffer_ = static_cast<byte*>(buffer); 346 buffer_ = static_cast<byte*>(buffer);
344 buffer_size_ = buffer_size; 347 buffer_size_ = buffer_size;
345 own_buffer_ = false; 348 own_buffer_ = false;
346 } 349 }
347 350
348 // Set up buffer pointers. 351 // Set up buffer pointers.
349 ASSERT(buffer_ != NULL); 352 ASSERT(buffer_ != NULL);
350 pc_ = buffer_; 353 pc_ = buffer_;
351 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_); 354 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
352 num_pending_reloc_info_ = 0; 355 num_pending_reloc_info_ = 0;
356 num_pending_64_bit_reloc_info_ = 0;
353 next_buffer_check_ = 0; 357 next_buffer_check_ = 0;
354 const_pool_blocked_nesting_ = 0; 358 const_pool_blocked_nesting_ = 0;
355 no_const_pool_before_ = 0; 359 no_const_pool_before_ = 0;
356 first_const_pool_use_ = -1; 360 first_const_pool_use_ = -1;
357 last_bound_pos_ = 0; 361 last_bound_pos_ = 0;
358 ClearRecordedAstId(); 362 ClearRecordedAstId();
359 } 363 }
360 364
361 365
362 Assembler::~Assembler() { 366 Assembler::~Assembler() {
363 ASSERT(const_pool_blocked_nesting_ == 0); 367 ASSERT(const_pool_blocked_nesting_ == 0);
364 if (own_buffer_) { 368 if (own_buffer_) {
365 if (isolate()->assembler_spare_buffer() == NULL && 369 if (isolate()->assembler_spare_buffer() == NULL &&
366 buffer_size_ == kMinimalBufferSize) { 370 buffer_size_ == kMinimalBufferSize) {
367 isolate()->set_assembler_spare_buffer(buffer_); 371 isolate()->set_assembler_spare_buffer(buffer_);
368 } else { 372 } else {
369 DeleteArray(buffer_); 373 DeleteArray(buffer_);
370 } 374 }
371 } 375 }
372 } 376 }
373 377
374 378
375 void Assembler::GetCode(CodeDesc* desc) { 379 void Assembler::GetCode(CodeDesc* desc) {
376 // Emit constant pool if necessary. 380 // Emit constant pool if necessary.
377 CheckConstPool(true, false); 381 CheckConstPool(true, false);
378 ASSERT(num_pending_reloc_info_ == 0); 382 ASSERT(num_pending_reloc_info_ == 0);
383 ASSERT(num_pending_64_bit_reloc_info_ == 0);
379 384
380 // Set up code descriptor. 385 // Set up code descriptor.
381 desc->buffer = buffer_; 386 desc->buffer = buffer_;
382 desc->buffer_size = buffer_size_; 387 desc->buffer_size = buffer_size_;
383 desc->instr_size = pc_offset(); 388 desc->instr_size = pc_offset();
384 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 389 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
385 } 390 }
386 391
387 392
388 void Assembler::Align(int m) { 393 void Assembler::Align(int m) {
(...skipping 26 matching lines...) Expand all
415 // with 4 to get the offset in bytes. 420 // with 4 to get the offset in bytes.
416 return ((instr & kImm24Mask) << 8) >> 6; 421 return ((instr & kImm24Mask) << 8) >> 6;
417 } 422 }
418 423
419 424
420 bool Assembler::IsLdrRegisterImmediate(Instr instr) { 425 bool Assembler::IsLdrRegisterImmediate(Instr instr) {
421 return (instr & (B27 | B26 | B25 | B22 | B20)) == (B26 | B20); 426 return (instr & (B27 | B26 | B25 | B22 | B20)) == (B26 | B20);
422 } 427 }
423 428
424 429
430 bool Assembler::IsVldrDRegisterImmediate(Instr instr) {
431 return (instr & (15 * B24 | 3 * B20 | 15 * B8)) == (13 * B24 | B20 | 11 * B8);
432 }
433
434
425 int Assembler::GetLdrRegisterImmediateOffset(Instr instr) { 435 int Assembler::GetLdrRegisterImmediateOffset(Instr instr) {
426 ASSERT(IsLdrRegisterImmediate(instr)); 436 ASSERT(IsLdrRegisterImmediate(instr));
427 bool positive = (instr & B23) == B23; 437 bool positive = (instr & B23) == B23;
428 int offset = instr & kOff12Mask; // Zero extended offset. 438 int offset = instr & kOff12Mask; // Zero extended offset.
429 return positive ? offset : -offset; 439 return positive ? offset : -offset;
430 } 440 }
431 441
432 442
443 int Assembler::GetVldrDRegisterImmediateOffset(Instr instr) {
444 ASSERT(IsVldrDRegisterImmediate(instr));
445 bool positive = (instr & B23) == B23;
446 int offset = instr & kOff8Mask; // Zero extended offset.
447 offset <<= 2;
448 return positive ? offset : -offset;
449 }
450
451
433 Instr Assembler::SetLdrRegisterImmediateOffset(Instr instr, int offset) { 452 Instr Assembler::SetLdrRegisterImmediateOffset(Instr instr, int offset) {
434 ASSERT(IsLdrRegisterImmediate(instr)); 453 ASSERT(IsLdrRegisterImmediate(instr));
435 bool positive = offset >= 0; 454 bool positive = offset >= 0;
436 if (!positive) offset = -offset; 455 if (!positive) offset = -offset;
437 ASSERT(is_uint12(offset)); 456 ASSERT(is_uint12(offset));
438 // Set bit indicating whether the offset should be added. 457 // Set bit indicating whether the offset should be added.
439 instr = (instr & ~B23) | (positive ? B23 : 0); 458 instr = (instr & ~B23) | (positive ? B23 : 0);
440 // Set the actual offset. 459 // Set the actual offset.
441 return (instr & ~kOff12Mask) | offset; 460 return (instr & ~kOff12Mask) | offset;
442 } 461 }
443 462
444 463
464 Instr Assembler::SetVldrDRegisterImmediateOffset(Instr instr, int offset) {
465 ASSERT(IsVldrDRegisterImmediate(instr));
466 ASSERT((offset & ~3) == offset); // Must be 64-bit aligned.
467 bool positive = offset >= 0;
468 if (!positive) offset = -offset;
469 ASSERT(is_uint10(offset));
470 // Set bit indicating whether the offset should be added.
471 instr = (instr & ~B23) | (positive ? B23 : 0);
472 // Set the actual offset. Its bottom 2 bits are zero.
473 return (instr & ~kOff8Mask) | (offset >> 2);
474 }
475
476
445 bool Assembler::IsStrRegisterImmediate(Instr instr) { 477 bool Assembler::IsStrRegisterImmediate(Instr instr) {
446 return (instr & (B27 | B26 | B25 | B22 | B20)) == B26; 478 return (instr & (B27 | B26 | B25 | B22 | B20)) == B26;
447 } 479 }
448 480
449 481
450 Instr Assembler::SetStrRegisterImmediateOffset(Instr instr, int offset) { 482 Instr Assembler::SetStrRegisterImmediateOffset(Instr instr, int offset) {
451 ASSERT(IsStrRegisterImmediate(instr)); 483 ASSERT(IsStrRegisterImmediate(instr));
452 bool positive = offset >= 0; 484 bool positive = offset >= 0;
453 if (!positive) offset = -offset; 485 if (!positive) offset = -offset;
454 ASSERT(is_uint12(offset)); 486 ASSERT(is_uint12(offset));
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
520 552
521 553
522 bool Assembler::IsLdrRegFpNegOffset(Instr instr) { 554 bool Assembler::IsLdrRegFpNegOffset(Instr instr) {
523 return ((instr & kLdrStrInstrTypeMask) == kLdrRegFpNegOffsetPattern); 555 return ((instr & kLdrStrInstrTypeMask) == kLdrRegFpNegOffsetPattern);
524 } 556 }
525 557
526 558
527 bool Assembler::IsLdrPcImmediateOffset(Instr instr) { 559 bool Assembler::IsLdrPcImmediateOffset(Instr instr) {
528 // Check the instruction is indeed a 560 // Check the instruction is indeed a
529 // ldr<cond> <Rd>, [pc +/- offset_12]. 561 // ldr<cond> <Rd>, [pc +/- offset_12].
530 return (instr & (kLdrPCMask & ~kCondMask)) == 0x051f0000; 562 return (instr & kLdrPCMask) == kLdrPCPattern;
531 } 563 }
532 564
533 565
566 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) {
567 // Check the instruction is indeed a
568 // vldr<cond> <Dd>, [pc +/- offset_12].
569 return (instr & kVldrDPCMask) == kVldrDPCPattern;
570 }
571
572
534 bool Assembler::IsTstImmediate(Instr instr) { 573 bool Assembler::IsTstImmediate(Instr instr) {
535 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) == 574 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) ==
536 (I | TST | S); 575 (I | TST | S);
537 } 576 }
538 577
539 578
540 bool Assembler::IsCmpRegister(Instr instr) { 579 bool Assembler::IsCmpRegister(Instr instr) {
541 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) == 580 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) ==
542 (CMP | S); 581 (CMP | S);
543 } 582 }
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
802 // encoded. 841 // encoded.
803 bool Operand::must_use_constant_pool(const Assembler* assembler) const { 842 bool Operand::must_use_constant_pool(const Assembler* assembler) const {
804 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { 843 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) {
805 #ifdef DEBUG 844 #ifdef DEBUG
806 if (!Serializer::enabled()) { 845 if (!Serializer::enabled()) {
807 Serializer::TooLateToEnableNow(); 846 Serializer::TooLateToEnableNow();
808 } 847 }
809 #endif // def DEBUG 848 #endif // def DEBUG
810 if (assembler != NULL && assembler->predictable_code_size()) return true; 849 if (assembler != NULL && assembler->predictable_code_size()) return true;
811 return Serializer::enabled(); 850 return Serializer::enabled();
812 } else if (rmode_ == RelocInfo::NONE) { 851 } else if (RelocInfo::IsNone(rmode_)) {
813 return false; 852 return false;
814 } 853 }
815 return true; 854 return true;
816 } 855 }
817 856
818 857
819 bool Operand::is_single_instruction(const Assembler* assembler, 858 bool Operand::is_single_instruction(const Assembler* assembler,
820 Instr instr) const { 859 Instr instr) const {
821 if (rm_.is_valid()) return true; 860 if (rm_.is_valid()) return true;
822 uint32_t dummy1, dummy2; 861 uint32_t dummy1, dummy2;
(...skipping 1187 matching lines...) Expand 10 before | Expand all | Expand 10 after
2010 const Condition cond) { 2049 const Condition cond) {
2011 // Dd = immediate 2050 // Dd = immediate
2012 // Instruction details available in ARM DDI 0406B, A8-640. 2051 // Instruction details available in ARM DDI 0406B, A8-640.
2013 ASSERT(CpuFeatures::IsEnabled(VFP2)); 2052 ASSERT(CpuFeatures::IsEnabled(VFP2));
2014 2053
2015 uint32_t enc; 2054 uint32_t enc;
2016 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { 2055 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) {
2017 // The double can be encoded in the instruction. 2056 // The double can be encoded in the instruction.
2018 emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0xB*B8 | enc); 2057 emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0xB*B8 | enc);
2019 } else { 2058 } else {
2020 // Synthesise the double from ARM immediates. This could be implemented 2059 RecordRelocInfo(imm);
2021 // using vldr from a constant pool. 2060 vldr(dst, MemOperand(pc, 0), cond);
2022 uint32_t lo, hi; 2061 // TODO(jfb) Constant blinding, denorm to zero, no NaN.
2023 DoubleAsTwoUInt32(imm, &lo, &hi);
2024 mov(ip, Operand(lo));
2025
2026 if (scratch.is(no_reg)) {
2027 // Move the low part of the double into the lower of the corresponsing S
2028 // registers of D register dst.
2029 vmov(dst.low(), ip, cond);
2030
2031 // Move the high part of the double into the higher of the corresponsing S
2032 // registers of D register dst.
2033 mov(ip, Operand(hi));
2034 vmov(dst.high(), ip, cond);
2035 } else {
2036 // Move the low and high parts of the double to a D register in one
2037 // instruction.
2038 mov(scratch, Operand(hi));
2039 vmov(dst, ip, scratch, cond);
2040 }
2041 } 2062 }
2042 } 2063 }
2043 2064
2044 2065
2045 void Assembler::vmov(const SwVfpRegister dst, 2066 void Assembler::vmov(const SwVfpRegister dst,
2046 const SwVfpRegister src, 2067 const SwVfpRegister src,
2047 const Condition cond) { 2068 const Condition cond) {
2048 // Sd = Sm 2069 // Sd = Sm
2049 // Instruction details available in ARM DDI 0406B, A8-642. 2070 // Instruction details available in ARM DDI 0406B, A8-642.
2050 ASSERT(CpuFeatures::IsEnabled(VFP2)); 2071 ASSERT(CpuFeatures::IsEnabled(VFP2));
(...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after
2544 } 2565 }
2545 } 2566 }
2546 } 2567 }
2547 2568
2548 2569
2549 void Assembler::db(uint8_t data) { 2570 void Assembler::db(uint8_t data) {
2550 // No relocation info should be pending while using db. db is used 2571 // No relocation info should be pending while using db. db is used
2551 // to write pure data with no pointers and the constant pool should 2572 // to write pure data with no pointers and the constant pool should
2552 // be emitted before using db. 2573 // be emitted before using db.
2553 ASSERT(num_pending_reloc_info_ == 0); 2574 ASSERT(num_pending_reloc_info_ == 0);
2575 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2554 CheckBuffer(); 2576 CheckBuffer();
2555 *reinterpret_cast<uint8_t*>(pc_) = data; 2577 *reinterpret_cast<uint8_t*>(pc_) = data;
2556 pc_ += sizeof(uint8_t); 2578 pc_ += sizeof(uint8_t);
2557 } 2579 }
2558 2580
2559 2581
2560 void Assembler::dd(uint32_t data) { 2582 void Assembler::dd(uint32_t data) {
2561 // No relocation info should be pending while using dd. dd is used 2583 // No relocation info should be pending while using dd. dd is used
2562 // to write pure data with no pointers and the constant pool should 2584 // to write pure data with no pointers and the constant pool should
2563 // be emitted before using dd. 2585 // be emitted before using dd.
2564 ASSERT(num_pending_reloc_info_ == 0); 2586 ASSERT(num_pending_reloc_info_ == 0);
2587 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2565 CheckBuffer(); 2588 CheckBuffer();
2566 *reinterpret_cast<uint32_t*>(pc_) = data; 2589 *reinterpret_cast<uint32_t*>(pc_) = data;
2567 pc_ += sizeof(uint32_t); 2590 pc_ += sizeof(uint32_t);
2568 } 2591 }
2569 2592
2570 2593
2571 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 2594 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
2572 // We do not try to reuse pool constants. 2595 // We do not try to reuse pool constants.
2573 RelocInfo rinfo(pc_, rmode, data, NULL); 2596 RelocInfo rinfo(pc_, rmode, data, NULL);
2574 if (((rmode >= RelocInfo::JS_RETURN) && 2597 if (((rmode >= RelocInfo::JS_RETURN) &&
2575 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || 2598 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) ||
2576 (rmode == RelocInfo::CONST_POOL)) { 2599 (rmode == RelocInfo::CONST_POOL)) {
2577 // Adjust code for new modes. 2600 // Adjust code for new modes.
2578 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) 2601 ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
2579 || RelocInfo::IsJSReturn(rmode) 2602 || RelocInfo::IsJSReturn(rmode)
2580 || RelocInfo::IsComment(rmode) 2603 || RelocInfo::IsComment(rmode)
2581 || RelocInfo::IsPosition(rmode) 2604 || RelocInfo::IsPosition(rmode)
2582 || RelocInfo::IsConstPool(rmode)); 2605 || RelocInfo::IsConstPool(rmode));
2583 // These modes do not need an entry in the constant pool. 2606 // These modes do not need an entry in the constant pool.
2584 } else { 2607 } else {
2585 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo); 2608 RecordRelocInfoConstantPoolEntryHelper(rinfo);
2586 if (num_pending_reloc_info_ == 0) {
2587 first_const_pool_use_ = pc_offset();
2588 }
2589 pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
2590 // Make sure the constant pool is not emitted in place of the next
2591 // instruction for which we just recorded relocation info.
2592 BlockConstPoolFor(1);
2593 } 2609 }
2594 if (rinfo.rmode() != RelocInfo::NONE) { 2610 if (!RelocInfo::IsNone(rinfo.rmode())) {
2595 // Don't record external references unless the heap will be serialized. 2611 // Don't record external references unless the heap will be serialized.
2596 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { 2612 if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
2597 #ifdef DEBUG 2613 #ifdef DEBUG
2598 if (!Serializer::enabled()) { 2614 if (!Serializer::enabled()) {
2599 Serializer::TooLateToEnableNow(); 2615 Serializer::TooLateToEnableNow();
2600 } 2616 }
2601 #endif 2617 #endif
2602 if (!Serializer::enabled() && !emit_debug_code()) { 2618 if (!Serializer::enabled() && !emit_debug_code()) {
2603 return; 2619 return;
2604 } 2620 }
2605 } 2621 }
2606 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here 2622 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
2607 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { 2623 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
2608 RelocInfo reloc_info_with_ast_id(pc_, 2624 RelocInfo reloc_info_with_ast_id(pc_,
2609 rmode, 2625 rmode,
2610 RecordedAstId().ToInt(), 2626 RecordedAstId().ToInt(),
2611 NULL); 2627 NULL);
2612 ClearRecordedAstId(); 2628 ClearRecordedAstId();
2613 reloc_info_writer.Write(&reloc_info_with_ast_id); 2629 reloc_info_writer.Write(&reloc_info_with_ast_id);
2614 } else { 2630 } else {
2615 reloc_info_writer.Write(&rinfo); 2631 reloc_info_writer.Write(&rinfo);
2616 } 2632 }
2617 } 2633 }
2618 } 2634 }
2619 2635
2636 void Assembler::RecordRelocInfo(double data) {
2637 // We do not try to reuse pool constants.
2638 RelocInfo rinfo(pc_, data);
2639 RecordRelocInfoConstantPoolEntryHelper(rinfo);
2640 }
2641
2642
2643 void Assembler::RecordRelocInfoConstantPoolEntryHelper(const RelocInfo& rinfo) {
2644 ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo);
2645 if (num_pending_reloc_info_ == 0) {
2646 first_const_pool_use_ = pc_offset();
2647 }
2648 pending_reloc_info_[num_pending_reloc_info_++] = rinfo;
2649 if (rinfo.rmode() == RelocInfo::NONE64) {
2650 ++num_pending_64_bit_reloc_info_;
2651 }
2652 ASSERT(num_pending_64_bit_reloc_info_ <= num_pending_reloc_info_);
2653 // Make sure the constant pool is not emitted in place of the next
2654 // instruction for which we just recorded relocation info.
2655 BlockConstPoolFor(1);
2656 }
2657
2620 2658
2621 void Assembler::BlockConstPoolFor(int instructions) { 2659 void Assembler::BlockConstPoolFor(int instructions) {
2622 int pc_limit = pc_offset() + instructions * kInstrSize; 2660 int pc_limit = pc_offset() + instructions * kInstrSize;
2623 if (no_const_pool_before_ < pc_limit) { 2661 if (no_const_pool_before_ < pc_limit) {
2624 // If there are some pending entries, the constant pool cannot be blocked 2662 // If there are some pending entries, the constant pool cannot be blocked
2625 // further than first_const_pool_use_ + kMaxDistToPool 2663 // further than constant pool instruction's reach.
2626 ASSERT((num_pending_reloc_info_ == 0) || 2664 ASSERT((num_pending_reloc_info_ == 0) ||
2627 (pc_limit < (first_const_pool_use_ + kMaxDistToPool))); 2665 (pc_limit - first_const_pool_use_ < kMaxDistToIntPool));
2666 // TODO(jfb) Also check 64-bit entries are in range (requires splitting
2667 // them up from 32-bit entries).
2628 no_const_pool_before_ = pc_limit; 2668 no_const_pool_before_ = pc_limit;
2629 } 2669 }
2630 2670
2631 if (next_buffer_check_ < no_const_pool_before_) { 2671 if (next_buffer_check_ < no_const_pool_before_) {
2632 next_buffer_check_ = no_const_pool_before_; 2672 next_buffer_check_ = no_const_pool_before_;
2633 } 2673 }
2634 } 2674 }
2635 2675
2636 2676
2637 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { 2677 void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
2638 // Some short sequence of instruction mustn't be broken up by constant pool 2678 // Some short sequence of instruction mustn't be broken up by constant pool
2639 // emission, such sequences are protected by calls to BlockConstPoolFor and 2679 // emission, such sequences are protected by calls to BlockConstPoolFor and
2640 // BlockConstPoolScope. 2680 // BlockConstPoolScope.
2641 if (is_const_pool_blocked()) { 2681 if (is_const_pool_blocked()) {
2642 // Something is wrong if emission is forced and blocked at the same time. 2682 // Something is wrong if emission is forced and blocked at the same time.
2643 ASSERT(!force_emit); 2683 ASSERT(!force_emit);
2644 return; 2684 return;
2645 } 2685 }
2646 2686
2647 // There is nothing to do if there are no pending constant pool entries. 2687 // There is nothing to do if there are no pending constant pool entries.
2648 if (num_pending_reloc_info_ == 0) { 2688 if (num_pending_reloc_info_ == 0) {
2689 ASSERT(num_pending_64_bit_reloc_info_ == 0);
2649 // Calculate the offset of the next check. 2690 // Calculate the offset of the next check.
2650 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2691 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
2651 return; 2692 return;
2652 } 2693 }
2653 2694
2654 // We emit a constant pool when:
2655 // * requested to do so by parameter force_emit (e.g. after each function).
2656 // * the distance to the first instruction accessing the constant pool is
2657 // kAvgDistToPool or more.
2658 // * no jump is required and the distance to the first instruction accessing
2659 // the constant pool is at least kMaxDistToPool / 2.
2660 ASSERT(first_const_pool_use_ >= 0);
2661 int dist = pc_offset() - first_const_pool_use_;
2662 if (!force_emit && dist < kAvgDistToPool &&
2663 (require_jump || (dist < (kMaxDistToPool / 2)))) {
2664 return;
2665 }
2666
2667 // Check that the code buffer is large enough before emitting the constant 2695 // Check that the code buffer is large enough before emitting the constant
2668 // pool (include the jump over the pool and the constant pool marker and 2696 // pool (include the jump over the pool and the constant pool marker and
2669 // the gap to the relocation information). 2697 // the gap to the relocation information).
2698 // Note 64-bit values are wider, and the first one needs to be 64-bit aligned.
2670 int jump_instr = require_jump ? kInstrSize : 0; 2699 int jump_instr = require_jump ? kInstrSize : 0;
2671 int size = jump_instr + kInstrSize + num_pending_reloc_info_ * kPointerSize; 2700 int size = kInstrSize + jump_instr + num_pending_reloc_info_ * kPointerSize;
2701 bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0);
2702 // 64-bit values must be 64-bit aligned.
2703 bool require_64_bit_align = has_fp_values && (((uintptr_t)pc_ + size) & 0x3);
2704 if (require_64_bit_align) {
2705 size += kInstrSize;
2706 }
2707 // num_pending_reloc_info_ also contains 64-bit entries, the above code
2708 // therefore already counted half of the size for 64-bit entries. Add the
2709 // remaining size.
2710 STATIC_ASSERT(kPointerSize == kDoubleSize / 2);
2711 size += num_pending_64_bit_reloc_info_ * (kDoubleSize / 2);
2712 int marker_num = (size - kInstrSize - jump_instr) / 4;
2713
2714 // We emit a constant pool when:
2715 // * requested to do so by parameter force_emit (e.g. after each function).
2716 // * the distance from the first instruction accessing the constant pool to
2717 // any of the constant pool entries will exceed its limit the next
2718 // time the pool is checked. This is overly restrictive, but we don't emit
2719 // constant pool entries in-order so it's conservatively correct.
2720 // * the instruction doesn't require a jump after itself to jump over the
2721 // constant pool, and we're getting close to running out of range.
2722 if (!force_emit) {
2723 ASSERT((first_const_pool_use_ >= 0) && (num_pending_reloc_info_ > 0));
2724 int dist = pc_offset() + size - first_const_pool_use_;
2725 if (has_fp_values) {
2726 if ((dist < kMaxDistToFPPool - kCheckPoolInterval) &&
2727 (require_jump || (dist < kMaxDistToFPPool / 2))) {
2728 return;
2729 }
2730 } else {
2731 if ((dist < kMaxDistToIntPool - kCheckPoolInterval) &&
2732 (require_jump || (dist < kMaxDistToIntPool / 2))) {
2733 return;
2734 }
2735 }
2736 }
2737
2672 int needed_space = size + kGap; 2738 int needed_space = size + kGap;
2673 while (buffer_space() <= needed_space) GrowBuffer(); 2739 while (buffer_space() <= needed_space) GrowBuffer();
2674 2740
2675 { 2741 {
2676 // Block recursive calls to CheckConstPool. 2742 // Block recursive calls to CheckConstPool.
2677 BlockConstPoolScope block_const_pool(this); 2743 BlockConstPoolScope block_const_pool(this);
2678 RecordComment("[ Constant Pool"); 2744 RecordComment("[ Constant Pool");
2679 RecordConstPool(size); 2745 RecordConstPool(size);
2680 2746
2681 // Emit jump over constant pool if necessary. 2747 // Emit jump over constant pool if necessary.
2682 Label after_pool; 2748 Label after_pool;
2683 if (require_jump) { 2749 if (require_jump) {
2684 b(&after_pool); 2750 b(&after_pool);
2685 } 2751 }
2686 2752
2687 // Put down constant pool marker "Undefined instruction" as specified by 2753 // Put down constant pool marker "Undefined instruction" as specified by
2688 // A5.6 (ARMv7) Instruction set encoding. 2754 // A5.6 (ARMv7) Instruction set encoding.
2689 emit(kConstantPoolMarker | num_pending_reloc_info_); 2755 emit(kConstantPoolMarker | marker_num);
2690 2756
2691 // Emit constant pool entries. 2757 if (require_64_bit_align) {
2758 emit(kConstantPoolMarker);
2759 }
2760
2761 // Emit 64-bit constant pool entries first: their range is smaller than
2762 // 32-bit entries.
2763 for (int i = 0; i < num_pending_reloc_info_; i++) {
2764 RelocInfo& rinfo = pending_reloc_info_[i];
2765
2766 if (rinfo.rmode() != RelocInfo::NONE64) {
2767 // 32-bit values emitted later.
2768 continue;
2769 }
2770
2771 ASSERT(!((uintptr_t)pc_ & 0x3)); // Check 64-bit alignment.
2772
2773 Instr instr = instr_at(rinfo.pc());
2774 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0.
2775 ASSERT((IsVldrDPcImmediateOffset(instr) &&
2776 GetVldrDRegisterImmediateOffset(instr) == 0));
2777
2778 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
2779 ASSERT(is_uint10(delta));
2780
2781 instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta));
2782
2783 const double double_data = rinfo.data64();
2784 uint64_t uint_data = 0;
2785 memcpy(&uint_data, &double_data, sizeof(double_data));
2786 emit(uint_data & 0xFFFFFFFF);
2787 emit(uint_data >> 32);
2788 }
2789
2790 // Emit 32-bit constant pool entries.
2692 for (int i = 0; i < num_pending_reloc_info_; i++) { 2791 for (int i = 0; i < num_pending_reloc_info_; i++) {
2693 RelocInfo& rinfo = pending_reloc_info_[i]; 2792 RelocInfo& rinfo = pending_reloc_info_[i];
2694 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && 2793 ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
2695 rinfo.rmode() != RelocInfo::POSITION && 2794 rinfo.rmode() != RelocInfo::POSITION &&
2696 rinfo.rmode() != RelocInfo::STATEMENT_POSITION && 2795 rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
2697 rinfo.rmode() != RelocInfo::CONST_POOL); 2796 rinfo.rmode() != RelocInfo::CONST_POOL);
2698 2797
2798 if (rinfo.rmode() == RelocInfo::NONE64) {
2799 // 64-bit values emitted earlier.
2800 continue;
2801 }
2802
2699 Instr instr = instr_at(rinfo.pc()); 2803 Instr instr = instr_at(rinfo.pc());
2700 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. 2804 // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0.
2701 ASSERT(IsLdrPcImmediateOffset(instr) && 2805 ASSERT((IsLdrPcImmediateOffset(instr) &&
2702 GetLdrRegisterImmediateOffset(instr) == 0); 2806 GetLdrRegisterImmediateOffset(instr) == 0));
Michael Starzinger 2012/10/25 13:03:37 This change seems obsolete.
2703 2807
2704 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 2808 int delta = pc_ - rinfo.pc() - kPcLoadDelta;
2705 // 0 is the smallest delta: 2809 // 0 is the smallest delta:
2706 // ldr rd, [pc, #0] 2810 // ldr rd, [pc, #0]
2707 // constant pool marker 2811 // constant pool marker
2708 // data 2812 // data
2709 ASSERT(is_uint12(delta)); 2813 ASSERT(is_uint12(delta));
2710 2814
2711 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta)); 2815 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta));
2712 emit(rinfo.data()); 2816 emit(rinfo.data());
2713 } 2817 }
2714 2818
2715 num_pending_reloc_info_ = 0; 2819 num_pending_reloc_info_ = 0;
2820 num_pending_64_bit_reloc_info_ = 0;
2716 first_const_pool_use_ = -1; 2821 first_const_pool_use_ = -1;
2717 2822
2718 RecordComment("]"); 2823 RecordComment("]");
2719 2824
2720 if (after_pool.is_linked()) { 2825 if (after_pool.is_linked()) {
2721 bind(&after_pool); 2826 bind(&after_pool);
2722 } 2827 }
2723 } 2828 }
2724 2829
2725 // Since a constant pool was just emitted, move the check offset forward by 2830 // Since a constant pool was just emitted, move the check offset forward by
2726 // the standard interval. 2831 // the standard interval.
2727 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 2832 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
2728 } 2833 }
2729 2834
2730 2835
2731 } } // namespace v8::internal 2836 } } // namespace v8::internal
2732 2837
2733 #endif // V8_TARGET_ARCH_ARM 2838 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/constants-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698