Index: src/mips/lithium-codegen-mips.cc |
diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc |
index 4c2182bdb004ebf57b1ac3fff3f0cdfbdf43b59b..ae232ae9a390648ea8ce559982017fe148d272a2 100644 |
--- a/src/mips/lithium-codegen-mips.cc |
+++ b/src/mips/lithium-codegen-mips.cc |
@@ -2622,137 +2622,7 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { |
} |
-void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { |
- Register elements = ToRegister(instr->elements()); |
- Register result = ToRegister(instr->result()); |
- Register scratch = scratch0(); |
- Register store_base = scratch; |
- int offset = 0; |
- |
- if (instr->key()->IsConstantOperand()) { |
- LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
- offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + |
- instr->additional_index()); |
- store_base = elements; |
- } else { |
- Register key = EmitLoadRegister(instr->key(), scratch); |
- // Even though the HLoadKeyedFastElement instruction forces the input |
- // representation for the key to be an integer, the input gets replaced |
- // during bound check elimination with the index argument to the bounds |
- // check, which can be tagged, so that case must be handled here, too. |
- if (instr->hydrogen()->key()->representation().IsTagged()) { |
- __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize); |
- __ addu(scratch, elements, scratch); |
- } else { |
- __ sll(scratch, key, kPointerSizeLog2); |
- __ addu(scratch, elements, scratch); |
- } |
- offset = FixedArray::OffsetOfElementAt(instr->additional_index()); |
- } |
- __ lw(result, FieldMemOperand(store_base, offset)); |
- |
- // Check for the hole value. |
- if (instr->hydrogen()->RequiresHoleCheck()) { |
- if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { |
- __ And(scratch, result, Operand(kSmiTagMask)); |
- DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg)); |
- } else { |
- __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); |
- DeoptimizeIf(eq, instr->environment(), result, Operand(scratch)); |
- } |
- } |
-} |
- |
- |
-void LCodeGen::DoLoadKeyedFastDoubleElement( |
- LLoadKeyedFastDoubleElement* instr) { |
- Register elements = ToRegister(instr->elements()); |
- bool key_is_constant = instr->key()->IsConstantOperand(); |
- Register key = no_reg; |
- DoubleRegister result = ToDoubleRegister(instr->result()); |
- Register scratch = scratch0(); |
- |
- int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
- int shift_size = (instr->hydrogen()->key()->representation().IsTagged()) |
- ? (element_size_shift - kSmiTagSize) : element_size_shift; |
- int constant_key = 0; |
- if (key_is_constant) { |
- constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
- if (constant_key & 0xF0000000) { |
- Abort("array index constant value too big."); |
- } |
- } else { |
- key = ToRegister(instr->key()); |
- } |
- |
- if (key_is_constant) { |
- __ Addu(elements, elements, |
- Operand(((constant_key + instr->additional_index()) << |
- element_size_shift) + |
- FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
- } else { |
- __ sll(scratch, key, shift_size); |
- __ Addu(elements, elements, Operand(scratch)); |
- __ Addu(elements, elements, |
- Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) + |
- (instr->additional_index() << element_size_shift))); |
- } |
- |
- if (instr->hydrogen()->RequiresHoleCheck()) { |
- __ lw(scratch, MemOperand(elements, sizeof(kHoleNanLower32))); |
- DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32)); |
- } |
- |
- __ ldc1(result, MemOperand(elements)); |
-} |
- |
- |
-MemOperand LCodeGen::PrepareKeyedOperand(Register key, |
- Register base, |
- bool key_is_constant, |
- int constant_key, |
- int element_size, |
- int shift_size, |
- int additional_index, |
- int additional_offset) { |
- if (additional_index != 0 && !key_is_constant) { |
- additional_index *= 1 << (element_size - shift_size); |
- __ Addu(scratch0(), key, Operand(additional_index)); |
- } |
- |
- if (key_is_constant) { |
- return MemOperand(base, |
- (constant_key << element_size) + additional_offset); |
- } |
- |
- if (additional_index == 0) { |
- if (shift_size >= 0) { |
- __ sll(scratch0(), key, shift_size); |
- __ Addu(scratch0(), base, scratch0()); |
- return MemOperand(scratch0()); |
- } else { |
- ASSERT_EQ(-1, shift_size); |
- __ srl(scratch0(), key, 1); |
- __ Addu(scratch0(), base, scratch0()); |
- return MemOperand(scratch0()); |
- } |
- } |
- |
- if (shift_size >= 0) { |
- __ sll(scratch0(), scratch0(), shift_size); |
- __ Addu(scratch0(), base, scratch0()); |
- return MemOperand(scratch0()); |
- } else { |
- ASSERT_EQ(-1, shift_size); |
- __ srl(scratch0(), scratch0(), 1); |
- __ Addu(scratch0(), base, scratch0()); |
- return MemOperand(scratch0()); |
- } |
-} |
- |
- |
-void LCodeGen::DoLoadKeyedSpecializedArrayElement( |
- LLoadKeyedSpecializedArrayElement* instr) { |
+void LCodeGen::DoLoadKeyedExternal(LLoadKeyed* instr) { |
Register external_pointer = ToRegister(instr->external_pointer()); |
Register key = no_reg; |
ElementsKind elements_kind = instr->elements_kind(); |
@@ -2775,7 +2645,8 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement( |
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { |
FPURegister result = ToDoubleRegister(instr->result()); |
if (key_is_constant) { |
- __ Addu(scratch0(), external_pointer, constant_key << element_size_shift); |
+ __ Addu(scratch0(), external_pointer, |
+ constant_key << element_size_shift); |
} else { |
__ sll(scratch0(), key, shift_size); |
__ Addu(scratch0(), scratch0(), external_pointer); |
@@ -2814,7 +2685,7 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement( |
__ lw(result, mem_operand); |
if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) { |
DeoptimizeIf(Ugreater_equal, instr->environment(), |
- result, Operand(0x80000000)); |
+ result, Operand(0x80000000)); |
} |
break; |
case EXTERNAL_FLOAT_ELEMENTS: |
@@ -2833,6 +2704,134 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement( |
} |
} |
+void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) { |
+ if (instr->is_external()) { |
+ DoLoadKeyedExternal(instr); |
+ } else if (instr->hydrogen()->representation().IsDouble()) { |
+ Register elements = ToRegister(instr->elements()); |
+ bool key_is_constant = instr->key()->IsConstantOperand(); |
+ Register key = no_reg; |
+ DoubleRegister result = ToDoubleRegister(instr->result()); |
+ Register scratch = scratch0(); |
+ |
+ int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged()) |
+ ? (element_size_shift - kSmiTagSize) : element_size_shift; |
+ int constant_key = 0; |
+ if (key_is_constant) { |
+ constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
+ if (constant_key & 0xF0000000) { |
+ Abort("array index constant value too big."); |
+ } |
+ } else { |
+ key = ToRegister(instr->key()); |
+ } |
+ |
+ if (key_is_constant) { |
+ __ Addu(elements, elements, |
+ Operand(((constant_key + instr->additional_index()) << |
+ element_size_shift) + |
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
+ } else { |
+ __ sll(scratch, key, shift_size); |
+ __ Addu(elements, elements, Operand(scratch)); |
+ __ Addu(elements, elements, |
+ Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) + |
+ (instr->additional_index() << element_size_shift))); |
+ } |
+ |
+ if (instr->hydrogen()->RequiresHoleCheck()) { |
+ __ lw(scratch, MemOperand(elements, sizeof(kHoleNanLower32))); |
+ DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32)); |
+ } |
+ |
+ __ ldc1(result, MemOperand(elements)); |
+ } else { |
+ Register elements = ToRegister(instr->elements()); |
+ Register result = ToRegister(instr->result()); |
+ Register scratch = scratch0(); |
+ Register store_base = scratch; |
+ int offset = 0; |
+ |
+ if (instr->key()->IsConstantOperand()) { |
+ LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
+ offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + |
+ instr->additional_index()); |
+ store_base = elements; |
+ } else { |
+ Register key = EmitLoadRegister(instr->key(), scratch); |
+ // Even though the HLoadKeyed instruction forces the input |
+ // representation for the key to be an integer, the input gets replaced |
+ // during bound check elimination with the index argument to the bounds |
+ // check, which can be tagged, so that case must be handled here, too. |
+ if (instr->hydrogen()->key()->representation().IsTagged()) { |
+ __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize); |
+ __ addu(scratch, elements, scratch); |
+ } else { |
+ __ sll(scratch, key, kPointerSizeLog2); |
+ __ addu(scratch, elements, scratch); |
+ } |
+ offset = FixedArray::OffsetOfElementAt(instr->additional_index()); |
+ } |
+ __ lw(result, FieldMemOperand(store_base, offset)); |
+ |
+ // Check for the hole value. |
+ if (instr->hydrogen()->RequiresHoleCheck()) { |
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) { |
+ __ And(scratch, result, Operand(kSmiTagMask)); |
+ DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg)); |
+ } else { |
+ __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); |
+ DeoptimizeIf(eq, instr->environment(), result, Operand(scratch)); |
+ } |
+ } |
+ } |
+} |
+ |
+ |
+MemOperand LCodeGen::PrepareKeyedOperand(Register key, |
+ Register base, |
+ bool key_is_constant, |
+ int constant_key, |
+ int element_size, |
+ int shift_size, |
+ int additional_index, |
+ int additional_offset) { |
+ if (additional_index != 0 && !key_is_constant) { |
+ additional_index *= 1 << (element_size - shift_size); |
+ __ Addu(scratch0(), key, Operand(additional_index)); |
+ } |
+ |
+ if (key_is_constant) { |
+ return MemOperand(base, |
+ (constant_key << element_size) + additional_offset); |
+ } |
+ |
+ if (additional_index == 0) { |
+ if (shift_size >= 0) { |
+ __ sll(scratch0(), key, shift_size); |
+ __ Addu(scratch0(), base, scratch0()); |
+ return MemOperand(scratch0()); |
+ } else { |
+ ASSERT_EQ(-1, shift_size); |
+ __ srl(scratch0(), key, 1); |
+ __ Addu(scratch0(), base, scratch0()); |
+ return MemOperand(scratch0()); |
+ } |
+ } |
+ |
+ if (shift_size >= 0) { |
+ __ sll(scratch0(), scratch0(), shift_size); |
+ __ Addu(scratch0(), base, scratch0()); |
+ return MemOperand(scratch0()); |
+ } else { |
+ ASSERT_EQ(-1, shift_size); |
+ __ srl(scratch0(), scratch0(), 1); |
+ __ Addu(scratch0(), base, scratch0()); |
+ return MemOperand(scratch0()); |
+ } |
+} |
+ |
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { |
ASSERT(ToRegister(instr->object()).is(a1)); |
@@ -3737,107 +3736,7 @@ void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { |
} |
-void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { |
- Register value = ToRegister(instr->value()); |
- Register elements = ToRegister(instr->object()); |
- Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; |
- Register scratch = scratch0(); |
- Register store_base = scratch; |
- int offset = 0; |
- |
- // Do the store. |
- if (instr->key()->IsConstantOperand()) { |
- ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
- LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
- offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + |
- instr->additional_index()); |
- store_base = elements; |
- } else { |
- // Even though the HLoadKeyedFastElement instruction forces the input |
- // representation for the key to be an integer, the input gets replaced |
- // during bound check elimination with the index argument to the bounds |
- // check, which can be tagged, so that case must be handled here, too. |
- if (instr->hydrogen()->key()->representation().IsTagged()) { |
- __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize); |
- __ addu(scratch, elements, scratch); |
- } else { |
- __ sll(scratch, key, kPointerSizeLog2); |
- __ addu(scratch, elements, scratch); |
- } |
- offset = FixedArray::OffsetOfElementAt(instr->additional_index()); |
- } |
- __ sw(value, FieldMemOperand(store_base, offset)); |
- |
- if (instr->hydrogen()->NeedsWriteBarrier()) { |
- HType type = instr->hydrogen()->value()->type(); |
- SmiCheck check_needed = |
- type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
- // Compute address of modified element and store it into key register. |
- __ Addu(key, store_base, Operand(offset - kHeapObjectTag)); |
- __ RecordWrite(elements, |
- key, |
- value, |
- kRAHasBeenSaved, |
- kSaveFPRegs, |
- EMIT_REMEMBERED_SET, |
- check_needed); |
- } |
-} |
- |
- |
-void LCodeGen::DoStoreKeyedFastDoubleElement( |
- LStoreKeyedFastDoubleElement* instr) { |
- DoubleRegister value = ToDoubleRegister(instr->value()); |
- Register elements = ToRegister(instr->elements()); |
- Register key = no_reg; |
- Register scratch = scratch0(); |
- bool key_is_constant = instr->key()->IsConstantOperand(); |
- int constant_key = 0; |
- Label not_nan; |
- |
- // Calculate the effective address of the slot in the array to store the |
- // double value. |
- if (key_is_constant) { |
- constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
- if (constant_key & 0xF0000000) { |
- Abort("array index constant value too big."); |
- } |
- } else { |
- key = ToRegister(instr->key()); |
- } |
- int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
- int shift_size = (instr->hydrogen()->key()->representation().IsTagged()) |
- ? (element_size_shift - kSmiTagSize) : element_size_shift; |
- if (key_is_constant) { |
- __ Addu(scratch, elements, Operand((constant_key << element_size_shift) + |
- FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
- } else { |
- __ sll(scratch, key, shift_size); |
- __ Addu(scratch, elements, Operand(scratch)); |
- __ Addu(scratch, scratch, |
- Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
- } |
- |
- if (instr->NeedsCanonicalization()) { |
- Label is_nan; |
- // Check for NaN. All NaNs must be canonicalized. |
- __ BranchF(NULL, &is_nan, eq, value, value); |
- __ Branch(¬_nan); |
- |
- // Only load canonical NaN if the comparison above set the overflow. |
- __ bind(&is_nan); |
- __ Move(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double()); |
- } |
- |
- __ bind(¬_nan); |
- __ sdc1(value, MemOperand(scratch, instr->additional_index() << |
- element_size_shift)); |
-} |
- |
- |
-void LCodeGen::DoStoreKeyedSpecializedArrayElement( |
- LStoreKeyedSpecializedArrayElement* instr) { |
- |
+void LCodeGen::DoStoreKeyedExternal(LStoreKeyed* instr) { |
Register external_pointer = ToRegister(instr->external_pointer()); |
Register key = no_reg; |
ElementsKind elements_kind = instr->elements_kind(); |
@@ -3861,7 +3760,7 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement( |
FPURegister value(ToDoubleRegister(instr->value())); |
if (key_is_constant) { |
__ Addu(scratch0(), external_pointer, constant_key << |
- element_size_shift); |
+ element_size_shift); |
} else { |
__ sll(scratch0(), key, shift_size); |
__ Addu(scratch0(), scratch0(), external_pointer); |
@@ -3909,6 +3808,107 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement( |
} |
} |
+void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) { |
+ // By cases: external, fast double, fast |
+ if (instr->is_external()) { |
+ DoStoreKeyedEternal(instr); |
+ } else if (instr->hydrogen()->value()->representation().IsDouble()) { |
+ DoubleRegister value = ToDoubleRegister(instr->value()); |
+ Register elements = ToRegister(instr->elements()); |
+ Register key = no_reg; |
+ Register scratch = scratch0(); |
+ bool key_is_constant = instr->key()->IsConstantOperand(); |
+ int constant_key = 0; |
+ Label not_nan; |
+ |
+ // Calculate the effective address of the slot in the array to store the |
+ // double value. |
+ if (key_is_constant) { |
+ constant_key = ToInteger32(LConstantOperand::cast(instr->key())); |
+ if (constant_key & 0xF0000000) { |
+ Abort("array index constant value too big."); |
+ } |
+ } else { |
+ key = ToRegister(instr->key()); |
+ } |
+ int element_size_shift = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS); |
+ int shift_size = (instr->hydrogen()->key()->representation().IsTagged()) |
+ ? (element_size_shift - kSmiTagSize) : element_size_shift; |
+ if (key_is_constant) { |
+ __ Addu(scratch, elements, |
+ Operand((constant_key << element_size_shift) + |
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
+ } else { |
+ __ sll(scratch, key, shift_size); |
+ __ Addu(scratch, elements, Operand(scratch)); |
+ __ Addu(scratch, scratch, |
+ Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag)); |
+ } |
+ |
+ if (instr->NeedsCanonicalization()) { |
+ Label is_nan; |
+ // Check for NaN. All NaNs must be canonicalized. |
+ __ BranchF(NULL, &is_nan, eq, value, value); |
+ __ Branch(¬_nan); |
+ |
+ // Only load canonical NaN if the comparison above set the overflow. |
+ __ bind(&is_nan); |
+ __ Move(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double()); |
+ } |
+ |
+ __ bind(¬_nan); |
+ __ sdc1(value, MemOperand(scratch, instr->additional_index() << |
+ element_size_shift)); |
+ } else { |
+ Register value = ToRegister(instr->value()); |
+ Register elements = ToRegister(instr->object()); |
+ Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) |
+ : no_reg; |
+ Register scratch = scratch0(); |
+ Register store_base = scratch; |
+ int offset = 0; |
+ |
+ // Do the store. |
+ if (instr->key()->IsConstantOperand()) { |
+ ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); |
+ LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
+ offset = FixedArray::OffsetOfElementAt(ToInteger32(const_operand) + |
+ instr->additional_index()); |
+ store_base = elements; |
+ } else { |
+ // Even though the HLoadKeyed instruction forces the input |
+ // representation for the key to be an integer, the input gets replaced |
+ // during bound check elimination with the index argument to the bounds |
+ // check, which can be tagged, so that case must be handled here, too. |
+ if (instr->hydrogen()->key()->representation().IsTagged()) { |
+ __ sll(scratch, key, kPointerSizeLog2 - kSmiTagSize); |
+ __ addu(scratch, elements, scratch); |
+ } else { |
+ __ sll(scratch, key, kPointerSizeLog2); |
+ __ addu(scratch, elements, scratch); |
+ } |
+ offset = FixedArray::OffsetOfElementAt(instr->additional_index()); |
+ } |
+ __ sw(value, FieldMemOperand(store_base, offset)); |
+ |
+ if (instr->hydrogen()->NeedsWriteBarrier()) { |
+ HType type = instr->hydrogen()->value()->type(); |
+ SmiCheck check_needed = |
+ type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
+ // Compute address of modified element and store it into key register. |
+ __ Addu(key, store_base, Operand(offset - kHeapObjectTag)); |
+ __ RecordWrite(elements, |
+ key, |
+ value, |
+ kRAHasBeenSaved, |
+ kSaveFPRegs, |
+ EMIT_REMEMBERED_SET, |
+ check_needed); |
+ } |
+ } |
+} |
+ |
+ |
void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
ASSERT(ToRegister(instr->object()).is(a2)); |
ASSERT(ToRegister(instr->key()).is(a1)); |