Index: src/x87/macro-assembler-x87.cc |
diff --git a/src/x87/macro-assembler-x87.cc b/src/x87/macro-assembler-x87.cc |
index 5607426171f8bcb15bf83f2744697e7c48dd2c90..5d9f9bd33deb0acb7eca747a49fb04e4d84d6594 100644 |
--- a/src/x87/macro-assembler-x87.cc |
+++ b/src/x87/macro-assembler-x87.cc |
@@ -3174,18 +3174,36 @@ void MacroAssembler::TestJSArrayForAllocationMemento( |
Register receiver_reg, |
Register scratch_reg, |
Label* no_memento_found) { |
- ExternalReference new_space_start = |
- ExternalReference::new_space_start(isolate()); |
+ Label map_check; |
ExternalReference new_space_allocation_top = |
ExternalReference::new_space_allocation_top_address(isolate()); |
- |
- lea(scratch_reg, Operand(receiver_reg, |
- JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag)); |
- cmp(scratch_reg, Immediate(new_space_start)); |
- j(less, no_memento_found); |
+ const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag; |
+ const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize; |
+ |
+ // Bail out if the object sits on the page boundary as no memento can follow |
+ // and we cannot touch the memory following it. |
+ lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset)); |
+ xor_(scratch_reg, receiver_reg); |
+ cmp(scratch_reg, Immediate(Page::kPageSize)); |
+ j(greater, no_memento_found); |
+ // Bail out if the object is not in new space. |
+ lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset)); |
+ JumpIfNotInNewSpace(scratch_reg, scratch_reg, no_memento_found); |
+ // If the object is in new space, we need to check whether it is on the same |
+ // page as the current top. |
+ lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset)); |
+ xor_(scratch_reg, Operand::StaticVariable(new_space_allocation_top)); |
+ test(scratch_reg, Immediate(~Page::kPageAlignmentMask)); |
+ // If top is not on the same page as the current object, we can just continue |
+ // with the map check. |
+ j(not_zero, &map_check); |
+ // Otherwise, we have to check whether we are still below top, to ensure that |
+ // we are not checking against a stale memento. |
+ lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset)); |
cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top)); |
j(greater, no_memento_found); |
- cmp(MemOperand(scratch_reg, -AllocationMemento::kSize), |
+ bind(&map_check); |
+ cmp(MemOperand(receiver_reg, kMementoMapOffset), |
Immediate(isolate()->factory()->allocation_memento_map())); |
} |