Index: src/arm/builtins-arm.cc |
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc |
index eff47e2692bfc2627f4e4269da609b030d39367d..0581a12ec209ea81e18ceff2fd1146608e7d99e9 100644 |
--- a/src/arm/builtins-arm.cc |
+++ b/src/arm/builtins-arm.cc |
@@ -460,9 +460,8 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
// r3: object size (in words) |
// r4: JSObject (not tagged) |
// r5: First in-object property of JSObject (not tagged) |
- __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. |
ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize); |
- __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); |
+ __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); |
if (count_constructions) { |
__ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset)); |
__ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, |
@@ -470,14 +469,16 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
__ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2)); |
// r0: offset of first field after pre-allocated fields |
if (FLAG_debug_code) { |
- __ cmp(r0, r6); |
+ __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. |
+ __ cmp(r0, ip); |
__ Assert(le, "Unexpected number of pre-allocated property fields."); |
} |
- __ InitializeFieldsWithFiller(r5, r0, r7); |
+ __ InitializeFieldsWithFiller(r5, r0, r6); |
// To allow for truncation. |
- __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex); |
+ __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex); |
} |
- __ InitializeFieldsWithFiller(r5, r6, r7); |
+ __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object. |
+ __ InitializeFieldsWithFiller(r5, r0, r6); |
// Add the object tag to make the JSObject real, so that we can continue |
// and jump into the continuation code at any time from now on. Any |
@@ -542,16 +543,10 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
__ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object. |
ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize); |
{ Label loop, entry; |
- if (count_constructions) { |
- __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); |
- } else if (FLAG_debug_code) { |
- __ LoadRoot(r8, Heap::kUndefinedValueRootIndex); |
- __ cmp(r7, r8); |
- __ Assert(eq, "Undefined value not loaded."); |
- } |
JF
2013/07/29 17:20:09
I'm not sure I understand this change.
rmcilroy
2013/07/30 11:39:44
Beforehand, the code would only reloaded r7 with H
|
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
__ b(&entry); |
__ bind(&loop); |
- __ str(r7, MemOperand(r2, kPointerSize, PostIndex)); |
+ __ str(r0, MemOperand(r2, kPointerSize, PostIndex)); |
__ bind(&entry); |
__ cmp(r2, r6); |
__ b(lt, &loop); |
@@ -715,7 +710,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
// r2: receiver |
// r3: argc |
// r4: argv |
- // r5-r7, cp may be clobbered |
+ // r5-r6, cp may be clobbered |
JF
2013/07/29 17:20:09
r7 can still be clobbered if the flag doesn't sequ
rmcilroy
2013/07/30 11:39:44
Done.
|
ProfileEntryHookStub::MaybeCallEntryHook(masm); |
// Clear the context before we push it when entering the internal frame. |
@@ -755,7 +750,6 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
__ mov(r5, Operand(r4)); |
__ mov(r6, Operand(r4)); |
- __ mov(r7, Operand(r4)); |
JF
2013/07/29 17:20:09
This should be conditional on the flag being enabl
rmcilroy
2013/07/30 11:39:44
Done.
|
if (kR9Available == 1) { |
__ mov(r9, Operand(r4)); |
} |