OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 // TODO(mythria): Remove this define after this flag is turned on globally | 5 // TODO(mythria): Remove this define after this flag is turned on globally |
6 #define V8_IMMINENT_DEPRECATION_WARNINGS | 6 #define V8_IMMINENT_DEPRECATION_WARNINGS |
7 | 7 |
8 #include <stdlib.h> | 8 #include <stdlib.h> |
9 #include <utility> | 9 #include <utility> |
10 | 10 |
(...skipping 1386 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1397 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); | 1397 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); |
1398 chunk->set_scan_on_scavenge(true); | 1398 chunk->set_scan_on_scavenge(true); |
1399 | 1399 |
1400 // Trigger GCs and force evacuation. Should not crash there. | 1400 // Trigger GCs and force evacuation. Should not crash there. |
1401 CcTest::heap()->CollectAllGarbage(); | 1401 CcTest::heap()->CollectAllGarbage(); |
1402 | 1402 |
1403 CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index)); | 1403 CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index)); |
1404 } | 1404 } |
1405 | 1405 |
1406 | 1406 |
1407 static int LenFromSize(int size) { | |
1408 return (size - FixedArray::kHeaderSize) / kPointerSize; | |
1409 } | |
1410 | |
1411 | |
1412 HEAP_TEST(WriteBarriersInCopyJSObject) { | |
1413 FLAG_max_semi_space_size = 1; // Ensure new space is not growing. | |
1414 CcTest::InitializeVM(); | |
1415 Isolate* isolate = CcTest::i_isolate(); | |
1416 Heap* heap = CcTest::heap(); | |
1417 | |
1418 v8::HandleScope scope(CcTest::isolate()); | |
1419 | |
1420 // The plan: create JSObject which contains unboxed double value that looks | |
1421 // like a reference to an object in new space. | |
1422 // Then clone this object (forcing it to go into old space) and check | |
1423 // that the value of the unboxed double property of the cloned object has | |
1424 // was not corrupted by GC. | |
1425 | |
1426 // Step 1: prepare a map for the object. We add unboxed double property to it. | |
1427 // Create a map with single inobject property. | |
1428 Handle<Map> my_map = Map::Create(isolate, 1); | |
1429 Handle<String> name = isolate->factory()->InternalizeUtf8String("foo"); | |
1430 my_map = Map::CopyWithField(my_map, name, HeapType::Any(isolate), NONE, | |
1431 Representation::Double(), | |
1432 INSERT_TRANSITION).ToHandleChecked(); | |
1433 | |
1434 int object_size = my_map->instance_size(); | |
1435 | |
1436 // Step 2: allocate a lot of objects so to almost fill new space: we need | |
1437 // just enough room to allocate JSObject and thus fill the newspace. | |
1438 | |
1439 int allocation_amount = | |
1440 Min(FixedArray::kMaxSize, Page::kMaxRegularHeapObjectSize + kPointerSize); | |
1441 int allocation_len = LenFromSize(allocation_amount); | |
1442 NewSpace* new_space = heap->new_space(); | |
1443 DisableInlineAllocationSteps(new_space); | |
1444 Address* top_addr = new_space->allocation_top_address(); | |
1445 Address* limit_addr = new_space->allocation_limit_address(); | |
1446 while ((*limit_addr - *top_addr) > allocation_amount) { | |
1447 CHECK(!heap->always_allocate()); | |
1448 Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked(); | |
1449 CHECK(new_space->Contains(array)); | |
1450 } | |
1451 | |
1452 // Step 3: now allocate fixed array and JSObject to fill the whole new space. | |
1453 int to_fill = static_cast<int>(*limit_addr - *top_addr - object_size); | |
1454 int fixed_array_len = LenFromSize(to_fill); | |
1455 CHECK(fixed_array_len < FixedArray::kMaxLength); | |
1456 | |
1457 CHECK(!heap->always_allocate()); | |
1458 Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked(); | |
1459 CHECK(new_space->Contains(array)); | |
1460 | |
1461 Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked(); | |
1462 CHECK(new_space->Contains(object)); | |
1463 JSObject* jsobject = JSObject::cast(object); | |
1464 CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length()); | |
1465 CHECK_EQ(0, jsobject->properties()->length()); | |
1466 | |
1467 // Construct a double value that looks like a pointer to the new space object | |
1468 // and store it into the obj. | |
1469 Address fake_object = reinterpret_cast<Address>(array) + kPointerSize; | |
1470 double boom_value = bit_cast<double>(fake_object); | |
1471 FieldIndex index = FieldIndex::ForDescriptor(*my_map, 0); | |
1472 jsobject->RawFastDoublePropertyAtPut(index, boom_value); | |
1473 | |
1474 CHECK_EQ(0, static_cast<int>(*limit_addr - *top_addr)); | |
1475 | |
1476 // Step 4: clone jsobject, but force always allocate first to create a clone | |
1477 // in old pointer space. | |
1478 AlwaysAllocateScope aa_scope(isolate); | |
1479 Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked(); | |
1480 Handle<JSObject> clone(JSObject::cast(clone_obj)); | |
1481 CHECK(heap->old_space()->Contains(clone->address())); | |
1482 | |
1483 CcTest::heap()->CollectGarbage(NEW_SPACE, "boom"); | |
1484 | |
1485 // The value in cloned object should not be corrupted by GC. | |
1486 CHECK_EQ(boom_value, clone->RawFastDoublePropertyAt(index)); | |
1487 } | |
1488 | |
1489 | |
1490 static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map, | 1407 static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map, |
1491 int tagged_descriptor, int double_descriptor, | 1408 int tagged_descriptor, int double_descriptor, |
1492 bool check_tagged_value = true) { | 1409 bool check_tagged_value = true) { |
1493 FLAG_stress_compaction = true; | 1410 FLAG_stress_compaction = true; |
1494 FLAG_manual_evacuation_candidates_selection = true; | 1411 FLAG_manual_evacuation_candidates_selection = true; |
1495 Isolate* isolate = CcTest::i_isolate(); | 1412 Isolate* isolate = CcTest::i_isolate(); |
1496 Factory* factory = isolate->factory(); | 1413 Factory* factory = isolate->factory(); |
1497 Heap* heap = CcTest::heap(); | 1414 Heap* heap = CcTest::heap(); |
1498 PagedSpace* old_space = heap->old_space(); | 1415 PagedSpace* old_space = heap->old_space(); |
1499 | 1416 |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1546 } | 1463 } |
1547 CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index)); | 1464 CHECK_EQ(boom_value, obj->RawFastDoublePropertyAt(double_field_index)); |
1548 } | 1465 } |
1549 | 1466 |
1550 | 1467 |
1551 static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map, | 1468 static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map, |
1552 int tagged_descriptor, | 1469 int tagged_descriptor, |
1553 int double_descriptor, | 1470 int double_descriptor, |
1554 bool check_tagged_value = true) { | 1471 bool check_tagged_value = true) { |
1555 if (FLAG_never_compact || !FLAG_incremental_marking) return; | 1472 if (FLAG_never_compact || !FLAG_incremental_marking) return; |
1556 FLAG_stress_compaction = true; | |
1557 FLAG_manual_evacuation_candidates_selection = true; | 1473 FLAG_manual_evacuation_candidates_selection = true; |
1558 Isolate* isolate = CcTest::i_isolate(); | 1474 Isolate* isolate = CcTest::i_isolate(); |
1559 Factory* factory = isolate->factory(); | 1475 Factory* factory = isolate->factory(); |
1560 Heap* heap = CcTest::heap(); | 1476 Heap* heap = CcTest::heap(); |
1561 PagedSpace* old_space = heap->old_space(); | 1477 PagedSpace* old_space = heap->old_space(); |
1562 | 1478 |
1563 // The plan: create |obj| by |map| in old space, create |obj_value| in | 1479 // The plan: create |obj| by |map| in old space, create |obj_value| in |
1564 // old space and ensure it end up in evacuation candidate page. Start | 1480 // old space and ensure it end up in evacuation candidate page. Start |
1565 // incremental marking and ensure that incremental write barrier is triggered | 1481 // incremental marking and ensure that incremental write barrier is triggered |
1566 // when |obj_value| is written to property |tagged_descriptor| of |obj|. | 1482 // when |obj_value| is written to property |tagged_descriptor| of |obj|. |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1688 | 1604 |
1689 // TODO(ishell): add respective tests for property kind reconfiguring from | 1605 // TODO(ishell): add respective tests for property kind reconfiguring from |
1690 // accessor field to double, once accessor fields are supported by | 1606 // accessor field to double, once accessor fields are supported by |
1691 // Map::ReconfigureProperty(). | 1607 // Map::ReconfigureProperty(). |
1692 | 1608 |
1693 | 1609 |
1694 // TODO(ishell): add respective tests for fast property removal case once | 1610 // TODO(ishell): add respective tests for fast property removal case once |
1695 // Map::ReconfigureProperty() supports that. | 1611 // Map::ReconfigureProperty() supports that. |
1696 | 1612 |
1697 #endif | 1613 #endif |
OLD | NEW |