| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 464 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 475 cmp(scratch, Operand(ExternalReference::new_space_start(isolate()))); | 475 cmp(scratch, Operand(ExternalReference::new_space_start(isolate()))); |
| 476 b(cond, branch); | 476 b(cond, branch); |
| 477 } | 477 } |
| 478 | 478 |
| 479 | 479 |
| 480 void MacroAssembler::RecordWriteField( | 480 void MacroAssembler::RecordWriteField( |
| 481 Register object, | 481 Register object, |
| 482 int offset, | 482 int offset, |
| 483 Register value, | 483 Register value, |
| 484 Register dst, | 484 Register dst, |
| 485 LinkRegisterStatus lr_status, |
| 485 SaveFPRegsMode save_fp, | 486 SaveFPRegsMode save_fp, |
| 486 RememberedSetAction remembered_set_action, | 487 RememberedSetAction remembered_set_action, |
| 487 SmiCheck smi_check) { | 488 SmiCheck smi_check) { |
| 488 // First, check if a write barrier is even needed. The tests below | 489 // First, check if a write barrier is even needed. The tests below |
| 489 // catch stores of Smis. | 490 // catch stores of Smis. |
| 490 Label done; | 491 Label done; |
| 491 | 492 |
| 492 // Skip barrier if writing a smi. | 493 // Skip barrier if writing a smi. |
| 493 if (smi_check == INLINE_SMI_CHECK) { | 494 if (smi_check == INLINE_SMI_CHECK) { |
| 494 JumpIfSmi(value, &done); | 495 JumpIfSmi(value, &done); |
| 495 } | 496 } |
| 496 | 497 |
| 497 // Although the object register is tagged, the offset is relative to the start | 498 // Although the object register is tagged, the offset is relative to the start |
| 498 // of the object, so so offset must be a multiple of kPointerSize. | 499 // of the object, so so offset must be a multiple of kPointerSize. |
| 499 ASSERT(IsAligned(offset, kPointerSize)); | 500 ASSERT(IsAligned(offset, kPointerSize)); |
| 500 | 501 |
| 501 add(dst, object, Operand(offset)); | 502 add(dst, object, Operand(offset)); |
| 502 if (emit_debug_code()) { | 503 if (emit_debug_code()) { |
| 503 Label ok; | 504 Label ok; |
| 504 JumpIfNotSmi(dst, &ok); | 505 JumpIfNotSmi(dst, &ok); |
| 505 stop("Unaligned cell in write barrier"); | 506 stop("Unaligned cell in write barrier"); |
| 506 bind(&ok); | 507 bind(&ok); |
| 507 } | 508 } |
| 508 | 509 |
| 509 RecordWrite( | 510 RecordWrite(object, |
| 510 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK); | 511 dst, |
| 512 value, |
| 513 lr_status, |
| 514 save_fp, |
| 515 remembered_set_action, |
| 516 OMIT_SMI_CHECK); |
| 511 | 517 |
| 512 bind(&done); | 518 bind(&done); |
| 513 | 519 |
| 514 // Clobber clobbered input registers when running with the debug-code flag | 520 // Clobber clobbered input registers when running with the debug-code flag |
| 515 // turned on to provoke errors. | 521 // turned on to provoke errors. |
| 516 if (emit_debug_code()) { | 522 if (emit_debug_code()) { |
| 517 mov(value, Operand(BitCast<int32_t>(kZapValue + 4))); | 523 mov(value, Operand(BitCast<int32_t>(kZapValue + 4))); |
| 518 mov(dst, Operand(BitCast<int32_t>(kZapValue + 8))); | 524 mov(dst, Operand(BitCast<int32_t>(kZapValue + 8))); |
| 519 } | 525 } |
| 520 } | 526 } |
| 521 | 527 |
| 522 | 528 |
| 523 // Will clobber 4 registers: object, address, scratch, ip. The | 529 // Will clobber 4 registers: object, address, scratch, ip. The |
| 524 // register 'object' contains a heap object pointer. The heap object | 530 // register 'object' contains a heap object pointer. The heap object |
| 525 // tag is shifted away. | 531 // tag is shifted away. |
| 526 void MacroAssembler::RecordWrite(Register object, | 532 void MacroAssembler::RecordWrite(Register object, |
| 527 Register address, | 533 Register address, |
| 528 Register scratch, | 534 Register value, |
| 535 LinkRegisterStatus lr_status, |
| 529 SaveFPRegsMode fp_mode, | 536 SaveFPRegsMode fp_mode, |
| 530 RememberedSetAction remembered_set_action, | 537 RememberedSetAction remembered_set_action, |
| 531 SmiCheck smi_check) { | 538 SmiCheck smi_check) { |
| 532 // The compiled code assumes that record write doesn't change the | 539 // The compiled code assumes that record write doesn't change the |
| 533 // context register, so we check that none of the clobbered | 540 // context register, so we check that none of the clobbered |
| 534 // registers are cp. | 541 // registers are cp. |
| 535 ASSERT(!address.is(cp) && !scratch.is(cp)); | 542 ASSERT(!address.is(cp) && !value.is(cp)); |
| 536 | 543 |
| 537 Label done; | 544 Label done; |
| 538 | 545 |
| 539 // First, test that the object is not in the new space. We cannot set | 546 if (smi_check == INLINE_SMI_CHECK) { |
| 540 // region marks for new space pages. | 547 ASSERT_EQ(0, kSmiTag); |
| 541 InNewSpace(object, scratch, eq, &done); | 548 tst(value, Operand(kSmiTagMask)); |
| 549 b(eq, &done); |
| 550 } |
| 551 |
| 552 CheckPageFlag(value, |
| 553 value, // Used as scratch. |
| 554 MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING, |
| 555 eq, |
| 556 &done); |
| 557 CheckPageFlag(object, |
| 558 value, // Used as scratch. |
| 559 MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING, |
| 560 eq, |
| 561 &done); |
| 542 | 562 |
| 543 // Record the actual write. | 563 // Record the actual write. |
| 544 RememberedSetHelper(address, scratch, fp_mode); | 564 if (lr_status == kLRHasNotBeenSaved) { |
| 565 push(lr); |
| 566 } |
| 567 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); |
| 568 CallStub(&stub); |
| 569 if (lr_status == kLRHasNotBeenSaved) { |
| 570 pop(lr); |
| 571 } |
| 545 | 572 |
| 546 bind(&done); | 573 bind(&done); |
| 547 | 574 |
| 548 // Clobber all input registers when running with the debug-code flag | 575 // Clobber clobbered registers when running with the debug-code flag |
| 549 // turned on to provoke errors. | 576 // turned on to provoke errors. |
| 550 if (emit_debug_code()) { | 577 if (emit_debug_code()) { |
| 551 mov(address, Operand(BitCast<int32_t>(kZapValue + 12))); | 578 mov(address, Operand(BitCast<int32_t>(kZapValue + 12))); |
| 552 mov(scratch, Operand(BitCast<int32_t>(kZapValue + 16))); | 579 mov(value, Operand(BitCast<int32_t>(kZapValue + 16))); |
| 553 } | 580 } |
| 554 } | 581 } |
| 555 | 582 |
| 556 | 583 |
| 557 void MacroAssembler::RememberedSetHelper(Register address, | 584 void MacroAssembler::RememberedSetHelper(Register address, |
| 558 Register scratch, | 585 Register scratch, |
| 559 SaveFPRegsMode fp_mode) { | 586 SaveFPRegsMode fp_mode, |
| 587 RememberedSetFinalAction and_then) { |
| 560 Label done; | 588 Label done; |
| 561 // Load store buffer top. | 589 // Load store buffer top. |
| 562 ExternalReference store_buffer = | 590 ExternalReference store_buffer = |
| 563 ExternalReference::store_buffer_top(isolate()); | 591 ExternalReference::store_buffer_top(isolate()); |
| 564 mov(ip, Operand(store_buffer)); | 592 mov(ip, Operand(store_buffer)); |
| 565 ldr(scratch, MemOperand(ip)); | 593 ldr(scratch, MemOperand(ip)); |
| 566 // Store pointer to buffer and increment buffer top. | 594 // Store pointer to buffer and increment buffer top. |
| 567 str(address, MemOperand(scratch, kPointerSize, PostIndex)); | 595 str(address, MemOperand(scratch, kPointerSize, PostIndex)); |
| 568 // Write back new top of buffer. | 596 // Write back new top of buffer. |
| 569 str(scratch, MemOperand(ip)); | 597 str(scratch, MemOperand(ip)); |
| 570 // Call stub on end of buffer. | 598 // Call stub on end of buffer. |
| 571 // Check for end of buffer. | 599 // Check for end of buffer. |
| 572 tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit)); | 600 tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit)); |
| 573 b(eq, &done); | 601 if (and_then == kFallThroughAtEnd) { |
| 602 b(eq, &done); |
| 603 } else { |
| 604 ASSERT(and_then == kReturnAtEnd); |
| 605 Ret(ne); |
| 606 } |
| 574 push(lr); | 607 push(lr); |
| 575 StoreBufferOverflowStub store_buffer_overflow = | 608 StoreBufferOverflowStub store_buffer_overflow = |
| 576 StoreBufferOverflowStub(fp_mode); | 609 StoreBufferOverflowStub(fp_mode); |
| 577 CallStub(&store_buffer_overflow); | 610 CallStub(&store_buffer_overflow); |
| 578 pop(lr); | 611 pop(lr); |
| 579 bind(&done); | 612 bind(&done); |
| 613 if (and_then == kReturnAtEnd) { |
| 614 Ret(); |
| 615 } |
| 580 } | 616 } |
| 581 | 617 |
| 582 | 618 |
| 583 // Push and pop all registers that can hold pointers. | 619 // Push and pop all registers that can hold pointers. |
| 584 void MacroAssembler::PushSafepointRegisters() { | 620 void MacroAssembler::PushSafepointRegisters() { |
| 585 // Safepoints expect a block of contiguous register values starting with r0: | 621 // Safepoints expect a block of contiguous register values starting with r0: |
| 586 ASSERT(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters); | 622 ASSERT(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters); |
| 587 // Safepoints expect a block of kNumSafepointRegisters values on the | 623 // Safepoints expect a block of kNumSafepointRegisters values on the |
| 588 // stack, so adjust the stack for unsaved registers. | 624 // stack, so adjust the stack for unsaved registers. |
| 589 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 625 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |
| (...skipping 2495 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3085 MemoryChunk::MemoryChunkFlags flag, | 3121 MemoryChunk::MemoryChunkFlags flag, |
| 3086 Condition cc, | 3122 Condition cc, |
| 3087 Label* condition_met) { | 3123 Label* condition_met) { |
| 3088 and_(scratch, object, Operand(~Page::kPageAlignmentMask)); | 3124 and_(scratch, object, Operand(~Page::kPageAlignmentMask)); |
| 3089 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset)); | 3125 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset)); |
| 3090 tst(scratch, Operand(1 << flag)); | 3126 tst(scratch, Operand(1 << flag)); |
| 3091 b(cc, condition_met); | 3127 b(cc, condition_met); |
| 3092 } | 3128 } |
| 3093 | 3129 |
| 3094 | 3130 |
| 3131 void MacroAssembler::IsBlack(Register object, |
| 3132 Register scratch0, |
| 3133 Register scratch1, |
| 3134 Label* is_black) { |
| 3135 HasColor(object, scratch0, scratch1, is_black, 1, 0); // kBlackBitPattern. |
| 3136 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); |
| 3137 } |
| 3138 |
| 3139 |
| 3140 void MacroAssembler::HasColor(Register object, |
| 3141 Register bitmap_scratch, |
| 3142 Register mask_scratch, |
| 3143 Label* has_color, |
| 3144 int first_bit, |
| 3145 int second_bit) { |
| 3146 ASSERT(!Aliasing(object, bitmap_scratch, mask_scratch, no_reg)); |
| 3147 |
| 3148 GetMarkBits(object, bitmap_scratch, mask_scratch); |
| 3149 |
| 3150 Label other_color, word_boundary; |
| 3151 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); |
| 3152 tst(ip, Operand(mask_scratch)); |
| 3153 b(first_bit == 1 ? eq : ne, &other_color); |
| 3154 // Shift left 1 by adding. |
| 3155 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC); |
| 3156 b(eq, &word_boundary); |
| 3157 tst(ip, Operand(mask_scratch)); |
| 3158 b(second_bit == 1 ? ne : eq, has_color); |
| 3159 jmp(&other_color); |
| 3160 |
| 3161 bind(&word_boundary); |
| 3162 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize)); |
| 3163 tst(ip, Operand(1)); |
| 3164 b(second_bit == 1 ? ne : eq, has_color); |
| 3165 bind(&other_color); |
| 3166 } |
| 3167 |
| 3168 |
| 3169 void MacroAssembler::GetMarkBits(Register addr_reg, |
| 3170 Register bitmap_reg, |
| 3171 Register mask_reg) { |
| 3172 ASSERT(!Aliasing(addr_reg, bitmap_reg, mask_reg, no_reg)); |
| 3173 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask)); |
| 3174 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2); |
| 3175 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2; |
| 3176 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits); |
| 3177 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2)); |
| 3178 mov(ip, Operand(1)); |
| 3179 mov(mask_reg, Operand(ip, LSL, mask_reg)); |
| 3180 } |
| 3181 |
| 3182 |
| 3095 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { | 3183 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { |
| 3096 Usat(output_reg, 8, Operand(input_reg)); | 3184 Usat(output_reg, 8, Operand(input_reg)); |
| 3097 } | 3185 } |
| 3098 | 3186 |
| 3099 | 3187 |
| 3100 void MacroAssembler::ClampDoubleToUint8(Register result_reg, | 3188 void MacroAssembler::ClampDoubleToUint8(Register result_reg, |
| 3101 DoubleRegister input_reg, | 3189 DoubleRegister input_reg, |
| 3102 DoubleRegister temp_double_reg) { | 3190 DoubleRegister temp_double_reg) { |
| 3103 Label above_zero; | 3191 Label above_zero; |
| 3104 Label done; | 3192 Label done; |
| (...skipping 29 matching lines...) Expand all Loading... |
| 3134 Register descriptors) { | 3222 Register descriptors) { |
| 3135 ldr(descriptors, | 3223 ldr(descriptors, |
| 3136 FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); | 3224 FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); |
| 3137 Label not_smi; | 3225 Label not_smi; |
| 3138 JumpIfNotSmi(descriptors, ¬_smi); | 3226 JumpIfNotSmi(descriptors, ¬_smi); |
| 3139 mov(descriptors, Operand(FACTORY->empty_descriptor_array())); | 3227 mov(descriptors, Operand(FACTORY->empty_descriptor_array())); |
| 3140 bind(¬_smi); | 3228 bind(¬_smi); |
| 3141 } | 3229 } |
| 3142 | 3230 |
| 3143 | 3231 |
| 3232 bool Aliasing(Register r1, Register r2, Register r3, Register r4) { |
| 3233 if (r1.is(r2)) return true; |
| 3234 if (r1.is(r3)) return true; |
| 3235 if (r1.is(r4)) return true; |
| 3236 if (r2.is(r3)) return true; |
| 3237 if (r2.is(r4)) return true; |
| 3238 if (r3.is(r4)) return true; |
| 3239 return false; |
| 3240 } |
| 3241 |
| 3242 |
| 3144 CodePatcher::CodePatcher(byte* address, int instructions) | 3243 CodePatcher::CodePatcher(byte* address, int instructions) |
| 3145 : address_(address), | 3244 : address_(address), |
| 3146 instructions_(instructions), | 3245 instructions_(instructions), |
| 3147 size_(instructions * Assembler::kInstrSize), | 3246 size_(instructions * Assembler::kInstrSize), |
| 3148 masm_(Isolate::Current(), address, size_ + Assembler::kGap) { | 3247 masm_(Isolate::Current(), address, size_ + Assembler::kGap) { |
| 3149 // Create a new macro assembler pointing to the address of the code to patch. | 3248 // Create a new macro assembler pointing to the address of the code to patch. |
| 3150 // The size is adjusted with kGap on order for the assembler to generate size | 3249 // The size is adjusted with kGap on order for the assembler to generate size |
| 3151 // bytes of instructions without failing with buffer size constraints. | 3250 // bytes of instructions without failing with buffer size constraints. |
| 3152 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 3251 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
| 3153 } | 3252 } |
| (...skipping 22 matching lines...) Expand all Loading... |
| 3176 void CodePatcher::EmitCondition(Condition cond) { | 3275 void CodePatcher::EmitCondition(Condition cond) { |
| 3177 Instr instr = Assembler::instr_at(masm_.pc_); | 3276 Instr instr = Assembler::instr_at(masm_.pc_); |
| 3178 instr = (instr & ~kCondMask) | cond; | 3277 instr = (instr & ~kCondMask) | cond; |
| 3179 masm_.emit(instr); | 3278 masm_.emit(instr); |
| 3180 } | 3279 } |
| 3181 | 3280 |
| 3182 | 3281 |
| 3183 } } // namespace v8::internal | 3282 } } // namespace v8::internal |
| 3184 | 3283 |
| 3185 #endif // V8_TARGET_ARCH_ARM | 3284 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |