| Index: test/cctest/test-unboxed-doubles.cc
 | 
| diff --git a/test/cctest/test-unboxed-doubles.cc b/test/cctest/test-unboxed-doubles.cc
 | 
| index fdcac3af355067c2d1e5945ef39e83cdec439692..e99e99f08cbc2fe7561747828ade80b718ef9637 100644
 | 
| --- a/test/cctest/test-unboxed-doubles.cc
 | 
| +++ b/test/cctest/test-unboxed-doubles.cc
 | 
| @@ -48,6 +48,12 @@ static Handle<String> MakeName(const char* str, int suffix) {
 | 
|  }
 | 
|  
 | 
|  
 | 
| +Handle<JSObject> GetObject(const char* name) {
 | 
| +  return v8::Utils::OpenHandle(
 | 
| +      *v8::Handle<v8::Object>::Cast(CcTest::global()->Get(v8_str(name))));
 | 
| +}
 | 
| +
 | 
| +
 | 
|  static double GetDoubleFieldValue(JSObject* obj, FieldIndex field_index) {
 | 
|    if (obj->IsUnboxedDoubleField(field_index)) {
 | 
|      return obj->RawFastDoublePropertyAt(field_index);
 | 
| @@ -1305,4 +1311,223 @@ TEST(WriteBarriersInCopyJSObject) {
 | 
|    CHECK_EQ(boom_value, clone->RawFastDoublePropertyAt(index));
 | 
|  }
 | 
|  
 | 
| +
 | 
| +static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,
 | 
| +                             int tagged_descriptor, int double_descriptor,
 | 
| +                             bool check_tagged_value = true) {
 | 
| +  FLAG_stress_compaction = true;
 | 
| +  FLAG_manual_evacuation_candidates_selection = true;
 | 
| +  Isolate* isolate = CcTest::i_isolate();
 | 
| +  Factory* factory = isolate->factory();
 | 
| +  Heap* heap = CcTest::heap();
 | 
| +  PagedSpace* old_pointer_space = heap->old_pointer_space();
 | 
| +
 | 
| +  // The plan: create |obj| by |map| in old space, create |obj_value| in
 | 
| +  // new space and ensure that write barrier is triggered when |obj_value| is
 | 
| +  // written to property |tagged_descriptor| of |obj|.
 | 
| +  // Then migrate object to |new_map| and set proper value for property
 | 
| +  // |double_descriptor|. Call GC and ensure that it did not crash during
 | 
| +  // store buffer entries updating.
 | 
| +
 | 
| +  Handle<JSObject> obj;
 | 
| +  Handle<HeapObject> obj_value;
 | 
| +  {
 | 
| +    AlwaysAllocateScope always_allocate(isolate);
 | 
| +    obj = factory->NewJSObjectFromMap(map, TENURED, false);
 | 
| +    CHECK(old_pointer_space->Contains(*obj));
 | 
| +
 | 
| +    obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS);
 | 
| +  }
 | 
| +
 | 
| +  CHECK(heap->InNewSpace(*obj_value));
 | 
| +
 | 
| +  StoreBuffer* store_buffer = heap->store_buffer();
 | 
| +  USE(store_buffer);
 | 
| +  Address slot;
 | 
| +  {
 | 
| +    FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
 | 
| +    int offset = index.offset();
 | 
| +    slot = reinterpret_cast<Address>(HeapObject::RawField(*obj, offset));
 | 
| +    USE(slot);
 | 
| +    DCHECK(!store_buffer->CellIsInStoreBuffer(slot));
 | 
| +
 | 
| +    const int n = 153;
 | 
| +    for (int i = 0; i < n; i++) {
 | 
| +      obj->FastPropertyAtPut(index, *obj_value);
 | 
| +    }
 | 
| +    // Ensure that the slot was actually added to the store buffer.
 | 
| +    DCHECK(store_buffer->CellIsInStoreBuffer(slot));
 | 
| +  }
 | 
| +
 | 
| +  // Migrate |obj| to |new_map| which should shift fields and put the
 | 
| +  // |boom_value| to the slot that was earlier recorded by write barrier.
 | 
| +  JSObject::MigrateToMap(obj, new_map);
 | 
| +
 | 
| +  // Ensure that invalid entries were removed from the store buffer.
 | 
| +  DCHECK(!store_buffer->CellIsInStoreBuffer(slot));
 | 
| +
 | 
| +  Address fake_object = reinterpret_cast<Address>(*obj_value) + kPointerSize;
 | 
| +  double boom_value = bit_cast<double>(fake_object);
 | 
| +
 | 
| +  FieldIndex double_field_index =
 | 
| +      FieldIndex::ForDescriptor(*new_map, double_descriptor);
 | 
| +  CHECK(obj->IsUnboxedDoubleField(double_field_index));
 | 
| +  obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
 | 
| +
 | 
| +  // Trigger GC to evacuate all candidates.
 | 
| +  CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
 | 
| +
 | 
| +  if (check_tagged_value) {
 | 
| +    FieldIndex tagged_field_index =
 | 
| +        FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
 | 
| +    CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
 | 
| +  }
 | 
| +  CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
 | 
| +}
 | 
| +
 | 
| +
 | 
| +static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
 | 
| +                                        int tagged_descriptor,
 | 
| +                                        int double_descriptor,
 | 
| +                                        bool check_tagged_value = true) {
 | 
| +  if (FLAG_never_compact || !FLAG_incremental_marking) return;
 | 
| +  FLAG_stress_compaction = true;
 | 
| +  FLAG_manual_evacuation_candidates_selection = true;
 | 
| +  Isolate* isolate = CcTest::i_isolate();
 | 
| +  Factory* factory = isolate->factory();
 | 
| +  Heap* heap = CcTest::heap();
 | 
| +  PagedSpace* old_pointer_space = heap->old_pointer_space();
 | 
| +
 | 
| +  // The plan: create |obj| by |map| in old space, create |obj_value| in
 | 
| +  // old space and ensure it end up in evacuation candidate page. Start
 | 
| +  // incremental marking and ensure that incremental write barrier is triggered
 | 
| +  // when |obj_value| is written to property |tagged_descriptor| of |obj|.
 | 
| +  // Then migrate object to |new_map| and set proper value for property
 | 
| +  // |double_descriptor|. Call GC and ensure that it did not crash during
 | 
| +  // slots buffer entries updating.
 | 
| +
 | 
| +  Handle<JSObject> obj;
 | 
| +  Handle<HeapObject> obj_value;
 | 
| +  Page* ec_page;
 | 
| +  {
 | 
| +    AlwaysAllocateScope always_allocate(isolate);
 | 
| +    obj = factory->NewJSObjectFromMap(map, TENURED, false);
 | 
| +    CHECK(old_pointer_space->Contains(*obj));
 | 
| +
 | 
| +    // Make sure |obj_value| is placed on an old-space evacuation candidate.
 | 
| +    SimulateFullSpace(old_pointer_space);
 | 
| +    obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
 | 
| +    ec_page = Page::FromAddress(obj_value->address());
 | 
| +    CHECK_NE(ec_page, Page::FromAddress(obj->address()));
 | 
| +  }
 | 
| +
 | 
| +  // Heap is ready, force |ec_page| to become an evacuation candidate and
 | 
| +  // simulate incremental marking.
 | 
| +  ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
 | 
| +  SimulateIncrementalMarking(heap);
 | 
| +
 | 
| +  // Check that everything is ready for triggering incremental write barrier
 | 
| +  // (i.e. that both |obj| and |obj_value| are black and the marking phase is
 | 
| +  // still active and |obj_value|'s page is indeed an evacuation candidate).
 | 
| +  IncrementalMarking* marking = heap->incremental_marking();
 | 
| +  CHECK(marking->IsMarking());
 | 
| +  CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj)));
 | 
| +  CHECK(Marking::IsBlack(Marking::MarkBitFrom(*obj_value)));
 | 
| +  CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
 | 
| +
 | 
| +  // Trigger incremental write barrier, which should add a slot to |ec_page|'s
 | 
| +  // slots buffer.
 | 
| +  {
 | 
| +    int slots_buffer_len = SlotsBuffer::SizeOfChain(ec_page->slots_buffer());
 | 
| +    FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
 | 
| +    const int n = SlotsBuffer::kNumberOfElements + 10;
 | 
| +    for (int i = 0; i < n; i++) {
 | 
| +      obj->FastPropertyAtPut(index, *obj_value);
 | 
| +    }
 | 
| +    // Ensure that the slot was actually added to the |ec_page|'s slots buffer.
 | 
| +    CHECK_EQ(slots_buffer_len + n,
 | 
| +             SlotsBuffer::SizeOfChain(ec_page->slots_buffer()));
 | 
| +  }
 | 
| +
 | 
| +  // Migrate |obj| to |new_map| which should shift fields and put the
 | 
| +  // |boom_value| to the slot that was earlier recorded by incremental write
 | 
| +  // barrier.
 | 
| +  JSObject::MigrateToMap(obj, new_map);
 | 
| +
 | 
| +  double boom_value = bit_cast<double>(UINT64_C(0xbaad0176a37c28e1));
 | 
| +
 | 
| +  FieldIndex double_field_index =
 | 
| +      FieldIndex::ForDescriptor(*new_map, double_descriptor);
 | 
| +  CHECK(obj->IsUnboxedDoubleField(double_field_index));
 | 
| +  obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
 | 
| +
 | 
| +  // Trigger GC to evacuate all candidates.
 | 
| +  CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE, "boom");
 | 
| +
 | 
| +  // Ensure that the values are still there and correct.
 | 
| +  CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
 | 
| +
 | 
| +  if (check_tagged_value) {
 | 
| +    FieldIndex tagged_field_index =
 | 
| +        FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
 | 
| +    CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
 | 
| +  }
 | 
| +  CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index));
 | 
| +}
 | 
| +
 | 
| +
 | 
| +enum WriteBarrierKind { OLD_TO_OLD_WRITE_BARRIER, OLD_TO_NEW_WRITE_BARRIER };
 | 
| +static void TestWriteBarrierObjectShiftFieldsRight(
 | 
| +    WriteBarrierKind write_barrier_kind) {
 | 
| +  CcTest::InitializeVM();
 | 
| +  Isolate* isolate = CcTest::i_isolate();
 | 
| +  v8::HandleScope scope(CcTest::isolate());
 | 
| +
 | 
| +  Handle<HeapType> any_type = HeapType::Any(isolate);
 | 
| +
 | 
| +  CompileRun("function func() { return 1; }");
 | 
| +
 | 
| +  Handle<JSObject> func = GetObject("func");
 | 
| +
 | 
| +  Handle<Map> map = Map::Create(isolate, 10);
 | 
| +  map = Map::CopyWithConstant(map, MakeName("prop", 0), func, NONE,
 | 
| +                              INSERT_TRANSITION).ToHandleChecked();
 | 
| +  map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE,
 | 
| +                           Representation::Double(),
 | 
| +                           INSERT_TRANSITION).ToHandleChecked();
 | 
| +  map = Map::CopyWithField(map, MakeName("prop", 2), any_type, NONE,
 | 
| +                           Representation::Tagged(),
 | 
| +                           INSERT_TRANSITION).ToHandleChecked();
 | 
| +
 | 
| +  // Shift fields right by turning constant property to a field.
 | 
| +  Handle<Map> new_map = Map::ReconfigureProperty(
 | 
| +      map, 0, kData, NONE, Representation::Tagged(), any_type, FORCE_FIELD);
 | 
| +
 | 
| +  if (write_barrier_kind == OLD_TO_NEW_WRITE_BARRIER) {
 | 
| +    TestWriteBarrier(map, new_map, 2, 1);
 | 
| +  } else {
 | 
| +    CHECK_EQ(OLD_TO_OLD_WRITE_BARRIER, write_barrier_kind);
 | 
| +    TestIncrementalWriteBarrier(map, new_map, 2, 1);
 | 
| +  }
 | 
| +}
 | 
| +
 | 
| +
 | 
| +TEST(WriteBarrierObjectShiftFieldsRight) {
 | 
| +  TestWriteBarrierObjectShiftFieldsRight(OLD_TO_NEW_WRITE_BARRIER);
 | 
| +}
 | 
| +
 | 
| +
 | 
| +TEST(IncrementalWriteBarrierObjectShiftFieldsRight) {
 | 
| +  TestWriteBarrierObjectShiftFieldsRight(OLD_TO_OLD_WRITE_BARRIER);
 | 
| +}
 | 
| +
 | 
| +
 | 
| +// TODO(ishell): add respective tests for property kind reconfiguring from
 | 
| +// accessor field to double, once accessor fields are supported by
 | 
| +// Map::ReconfigureProperty().
 | 
| +
 | 
| +
 | 
| +// TODO(ishell): add respective tests for fast property removal case once
 | 
| +// Map::ReconfigureProperty() supports that.
 | 
| +
 | 
|  #endif
 | 
| 
 |