Index: src/mips/macro-assembler-mips.cc |
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc |
index 77d03b5554909215a4ce23324276c19efb2b3175..08ff433a1767692a4b5c49b0160d91b4ad880f73 100644 |
--- a/src/mips/macro-assembler-mips.cc |
+++ b/src/mips/macro-assembler-mips.cc |
@@ -759,12 +759,25 @@ void MacroAssembler::Ror(Register rd, Register rs, const Operand& rt) { |
} |
+static const int kInvalidRootIndex = -1; |
+ |
+int MacroAssembler::FindRootIndex(Object* heap_object) { |
+ Heap* heap = HEAP; |
+ if (heap->InNewSpace(heap_object)) return kInvalidRootIndex; |
Erik Corry
2012/03/17 02:49:18
Since your comment indicated that this function is
|
+ for (int i = 0; i < Heap::kRootListLength; i++) { |
+ Object* root = heap->roots_array_start()[i]; |
+ if (!root->IsSmi() && root == heap_object) return i; |
+ } |
+ return kInvalidRootIndex; |
+} |
+ |
+ |
//------------Pseudo-instructions------------- |
-void MacroAssembler::li(Register rd, Operand j, bool gen2instr) { |
+void MacroAssembler::li(Register rd, Operand j, LiFlags mode) { |
ASSERT(!j.is_reg()); |
BlockTrampolinePoolScope block_trampoline_pool(this); |
- if (!MustUseReg(j.rmode_) && !gen2instr) { |
+ if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) { |
// Normal load of an immediate value which does not need Relocation Info. |
if (is_int16(j.imm32_)) { |
addiu(rd, zero_reg, j.imm32_); |
@@ -776,7 +789,21 @@ void MacroAssembler::li(Register rd, Operand j, bool gen2instr) { |
lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); |
ori(rd, rd, (j.imm32_ & kImm16Mask)); |
} |
- } else if (MustUseReg(j.rmode_) || gen2instr) { |
+ } else if (SerializingTryLoadFromRoot(j.rmode_) && mode == OPTIMIZE_SIZE) { |
Erik Corry
2012/03/17 02:49:18
I took out this change.
|
+ int32_t index = FindRootIndex(*(reinterpret_cast<Object**>(j.imm32_))); |
+ if (index != kInvalidRootIndex) { |
+ // Replace lui/ori pair for references that are found in root array with |
+ // relative load using LoadRoot with no relocation info. This replacement |
+ // is performed only if serialization is turned on. |
+ LoadRoot(rd, static_cast<Heap::RootListIndex>(index)); |
+ } else { |
+ if (MustUseReg(j.rmode_)) { |
+ RecordRelocInfo(j.rmode_, j.imm32_); |
+ } |
+ lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); |
+ ori(rd, rd, (j.imm32_ & kImm16Mask)); |
+ } |
+ } else { |
if (MustUseReg(j.rmode_)) { |
RecordRelocInfo(j.rmode_, j.imm32_); |
} |
@@ -1497,6 +1524,16 @@ void MacroAssembler::Branch(Label* L, Condition cond, Register rs, |
} |
+void MacroAssembler::Branch(Label* L, |
+ Condition cond, |
+ Register rs, |
+ Heap::RootListIndex index, |
+ BranchDelaySlot bdslot) { |
+ LoadRoot(at, index); |
+ Branch(L, cond, rs, Operand(at), bdslot); |
+} |
+ |
+ |
void MacroAssembler::BranchShort(int16_t offset, BranchDelaySlot bdslot) { |
b(offset); |
@@ -2384,7 +2421,7 @@ void MacroAssembler::Call(Address target, |
// Must record previous source positions before the |
// li() generates a new code target. |
positions_recorder()->WriteRecordedPositions(); |
- li(t9, Operand(target_int, rmode), true); |
+ li(t9, Operand(target_int, rmode), CONSTANT_SIZE); |
Call(t9, cond, rs, rt, bd); |
ASSERT_EQ(CallSize(target, rmode, cond, rs, rt, bd), |
SizeOfCodeGeneratedSince(&start)); |
@@ -2593,7 +2630,7 @@ void MacroAssembler::PushTryHandler(StackHandler::Kind kind, |
unsigned state = |
StackHandler::IndexField::encode(handler_index) | |
StackHandler::KindField::encode(kind); |
- li(t1, Operand(CodeObject())); |
+ li(t1, Operand(CodeObject()), CONSTANT_SIZE); |
li(t2, Operand(state)); |
// Push the frame pointer, context, state, and code object. |
@@ -3222,7 +3259,7 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg, |
// Ensure that the object is a heap number |
CheckMap(value_reg, |
scratch1, |
- isolate()->factory()->heap_number_map(), |
+ Heap::kHeapNumberMapRootIndex, |
fail, |
DONT_DO_SMI_CHECK); |
@@ -4325,7 +4362,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
void MacroAssembler::EnterFrame(StackFrame::Type type) { |
addiu(sp, sp, -5 * kPointerSize); |
li(t8, Operand(Smi::FromInt(type))); |
- li(t9, Operand(CodeObject())); |
+ li(t9, Operand(CodeObject()), CONSTANT_SIZE); |
sw(ra, MemOperand(sp, 4 * kPointerSize)); |
sw(fp, MemOperand(sp, 3 * kPointerSize)); |
sw(cp, MemOperand(sp, 2 * kPointerSize)); |
@@ -4369,7 +4406,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, |
sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
} |
- li(t8, Operand(CodeObject())); // Accessed from ExitFrame::code_slot. |
+ // Accessed from ExitFrame::code_slot. |
+ li(t8, Operand(CodeObject()), CONSTANT_SIZE); |
sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); |
// Save the frame pointer and the context in top. |
@@ -5088,7 +5126,7 @@ void MacroAssembler::LoadInstanceDescriptors(Register map, |
FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); |
Label not_smi; |
JumpIfNotSmi(descriptors, ¬_smi); |
- li(descriptors, Operand(FACTORY->empty_descriptor_array())); |
+ LoadRoot(descriptors, Heap::kEmptyDescriptorArrayRootIndex); |
bind(¬_smi); |
} |