Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(247)

Side by Side Diff: src/objects-inl.h

Issue 3089005: [Isolates] Add a pointer to Heap to a meta map. (Closed)
Patch Set: review feedback Created 10 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/objects.cc ('k') | src/serialize.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
100 100
101 #define BOOL_ACCESSORS(holder, field, name, offset) \ 101 #define BOOL_ACCESSORS(holder, field, name, offset) \
102 bool holder::name() { \ 102 bool holder::name() { \
103 return BooleanBit::get(field(), offset); \ 103 return BooleanBit::get(field(), offset); \
104 } \ 104 } \
105 void holder::set_##name(bool value) { \ 105 void holder::set_##name(bool value) { \
106 set_##field(BooleanBit::set(field(), offset, value)); \ 106 set_##field(BooleanBit::set(field(), offset, value)); \
107 } 107 }
108 108
109 109
110 #define GET_HEAP (HeapObject::cast(this)->GetHeap())
111
112
110 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) { 113 bool Object::IsInstanceOf(FunctionTemplateInfo* expected) {
111 // There is a constraint on the object; check. 114 // There is a constraint on the object; check.
112 if (!this->IsJSObject()) return false; 115 if (!this->IsJSObject()) return false;
113 // Fetch the constructor function of the object. 116 // Fetch the constructor function of the object.
114 Object* cons_obj = JSObject::cast(this)->map()->constructor(); 117 Object* cons_obj = JSObject::cast(this)->map()->constructor();
115 if (!cons_obj->IsJSFunction()) return false; 118 if (!cons_obj->IsJSFunction()) return false;
116 JSFunction* fun = JSFunction::cast(cons_obj); 119 JSFunction* fun = JSFunction::cast(cons_obj);
117 // Iterate through the chain of inheriting function templates to 120 // Iterate through the chain of inheriting function templates to
118 // see if the required one occurs. 121 // see if the required one occurs.
119 for (Object* type = fun->shared()->function_data(); 122 for (Object* type = fun->shared()->function_data();
(...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after
442 } 445 }
443 446
444 447
445 bool Object::IsDescriptorArray() { 448 bool Object::IsDescriptorArray() {
446 return IsFixedArray(); 449 return IsFixedArray();
447 } 450 }
448 451
449 452
450 bool Object::IsContext() { 453 bool Object::IsContext() {
451 return Object::IsHeapObject() 454 return Object::IsHeapObject()
452 && (HeapObject::cast(this)->map() == HEAP->context_map() || 455 && (HeapObject::cast(this)->map() == GET_HEAP->context_map() ||
453 HeapObject::cast(this)->map() == HEAP->catch_context_map() || 456 HeapObject::cast(this)->map() == GET_HEAP->catch_context_map() ||
454 HeapObject::cast(this)->map() == HEAP->global_context_map()); 457 HeapObject::cast(this)->map() == GET_HEAP->global_context_map());
455 } 458 }
456 459
457 460
458 bool Object::IsCatchContext() { 461 bool Object::IsCatchContext() {
459 return Object::IsHeapObject() 462 return Object::IsHeapObject()
460 && HeapObject::cast(this)->map() == HEAP->catch_context_map(); 463 && HeapObject::cast(this)->map() == GET_HEAP->catch_context_map();
461 } 464 }
462 465
463 466
464 bool Object::IsGlobalContext() { 467 bool Object::IsGlobalContext() {
465 return Object::IsHeapObject() 468 return Object::IsHeapObject()
466 && HeapObject::cast(this)->map() == HEAP->global_context_map(); 469 && HeapObject::cast(this)->map() == GET_HEAP->global_context_map();
467 } 470 }
468 471
469 472
470 bool Object::IsJSFunction() { 473 bool Object::IsJSFunction() {
471 return Object::IsHeapObject() 474 return Object::IsHeapObject()
472 && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE; 475 && HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
473 } 476 }
474 477
475 478
476 template <> inline bool Is<JSFunction>(Object* obj) { 479 template <> inline bool Is<JSFunction>(Object* obj) {
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
539 } 542 }
540 543
541 544
542 template <> inline bool Is<JSArray>(Object* obj) { 545 template <> inline bool Is<JSArray>(Object* obj) {
543 return obj->IsJSArray(); 546 return obj->IsJSArray();
544 } 547 }
545 548
546 549
547 bool Object::IsHashTable() { 550 bool Object::IsHashTable() {
548 return Object::IsHeapObject() 551 return Object::IsHeapObject()
549 && HeapObject::cast(this)->map() == HEAP->hash_table_map(); 552 && HeapObject::cast(this)->map() == GET_HEAP->hash_table_map();
550 } 553 }
551 554
552 555
553 bool Object::IsDictionary() { 556 bool Object::IsDictionary() {
554 return IsHashTable() && this != HEAP->symbol_table(); 557 return IsHashTable() && this != GET_HEAP->symbol_table();
555 } 558 }
556 559
557 560
558 bool Object::IsSymbolTable() { 561 bool Object::IsSymbolTable() {
559 return IsHashTable() && this == HEAP->raw_unchecked_symbol_table(); 562 return IsHashTable() && this == GET_HEAP->raw_unchecked_symbol_table();
560 } 563 }
561 564
562 565
563 bool Object::IsJSFunctionResultCache() { 566 bool Object::IsJSFunctionResultCache() {
564 if (!IsFixedArray()) return false; 567 if (!IsFixedArray()) return false;
565 FixedArray* self = FixedArray::cast(this); 568 FixedArray* self = FixedArray::cast(this);
566 int length = self->length(); 569 int length = self->length();
567 if (length < JSFunctionResultCache::kEntriesIndex) return false; 570 if (length < JSFunctionResultCache::kEntriesIndex) return false;
568 if ((length - JSFunctionResultCache::kEntriesIndex) 571 if ((length - JSFunctionResultCache::kEntriesIndex)
569 % JSFunctionResultCache::kEntrySize != 0) { 572 % JSFunctionResultCache::kEntrySize != 0) {
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
731 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag) 734 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
732 735
733 #define READ_FIELD(p, offset) \ 736 #define READ_FIELD(p, offset) \
734 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset))) 737 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
735 738
736 #define WRITE_FIELD(p, offset, value) \ 739 #define WRITE_FIELD(p, offset, value) \
737 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value) 740 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
738 741
739 // TODO(isolates): Pass heap in to these macros. 742 // TODO(isolates): Pass heap in to these macros.
740 #define WRITE_BARRIER(object, offset) \ 743 #define WRITE_BARRIER(object, offset) \
741 HEAP->RecordWrite(object->address(), offset); 744 object->GetHeap()->RecordWrite(object->address(), offset);
742 745
743 // CONDITIONAL_WRITE_BARRIER must be issued after the actual 746 // CONDITIONAL_WRITE_BARRIER must be issued after the actual
744 // write due to the assert validating the written value. 747 // write due to the assert validating the written value.
745 #define CONDITIONAL_WRITE_BARRIER(object, offset, mode) \ 748 #define CONDITIONAL_WRITE_BARRIER(object, offset, mode) \
746 if (mode == UPDATE_WRITE_BARRIER) { \ 749 if (mode == UPDATE_WRITE_BARRIER) { \
747 HEAP->RecordWrite(object->address(), offset); \ 750 object->GetHeap()->RecordWrite(object->address(), offset); \
748 } else { \ 751 } else { \
749 ASSERT(mode == SKIP_WRITE_BARRIER); \ 752 ASSERT(mode == SKIP_WRITE_BARRIER); \
750 ASSERT(HEAP->InNewSpace(object) || \ 753 ASSERT(object->GetHeap()->InNewSpace(object) || \
751 !HEAP->InNewSpace(READ_FIELD(object, offset)) || \ 754 !object->GetHeap()->InNewSpace(READ_FIELD(object, offset)) || \
752 Page::FromAddress(object->address())-> \ 755 Page::FromAddress(object->address())-> \
753 IsRegionDirty(object->address() + offset)); \ 756 IsRegionDirty(object->address() + offset)); \
754 } 757 }
755 758
756 #define READ_DOUBLE_FIELD(p, offset) \ 759 #define READ_DOUBLE_FIELD(p, offset) \
757 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset))) 760 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
758 761
759 #define WRITE_DOUBLE_FIELD(p, offset, value) \ 762 #define WRITE_DOUBLE_FIELD(p, offset, value) \
760 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) 763 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
761 764
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
1036 void HeapObject::VerifyObjectField(int offset) { 1039 void HeapObject::VerifyObjectField(int offset) {
1037 VerifyPointer(READ_FIELD(this, offset)); 1040 VerifyPointer(READ_FIELD(this, offset));
1038 } 1041 }
1039 1042
1040 void HeapObject::VerifySmiField(int offset) { 1043 void HeapObject::VerifySmiField(int offset) {
1041 ASSERT(READ_FIELD(this, offset)->IsSmi()); 1044 ASSERT(READ_FIELD(this, offset)->IsSmi());
1042 } 1045 }
1043 #endif 1046 #endif
1044 1047
1045 1048
1049 Heap* HeapObject::GetHeap() {
1050 // During GC, the map pointer in HeapObject is used in various ways that
1051 // prevent us from retrieving Heap from the map.
1052 // Assert that we are not in GC, implement GC code in a way that it doesn't
1053 // pull heap from the map.
1054 ASSERT(HEAP->gc_state() == Heap::NOT_IN_GC);
1055 return map()->map()->heap();
1056 }
1057
1058
1046 Map* HeapObject::map() { 1059 Map* HeapObject::map() {
1047 return map_word().ToMap(); 1060 return map_word().ToMap();
1048 } 1061 }
1049 1062
1050 1063
1051 void HeapObject::set_map(Map* value) { 1064 void HeapObject::set_map(Map* value) {
1052 set_map_word(MapWord::FromMap(value)); 1065 set_map_word(MapWord::FromMap(value));
1053 } 1066 }
1054 1067
1055 1068
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
1172 // In the assert below Dictionary is covered under FixedArray. 1185 // In the assert below Dictionary is covered under FixedArray.
1173 ASSERT(value->IsFixedArray() || value->IsPixelArray() || 1186 ASSERT(value->IsFixedArray() || value->IsPixelArray() ||
1174 value->IsExternalArray()); 1187 value->IsExternalArray());
1175 WRITE_FIELD(this, kElementsOffset, value); 1188 WRITE_FIELD(this, kElementsOffset, value);
1176 CONDITIONAL_WRITE_BARRIER(this, kElementsOffset, mode); 1189 CONDITIONAL_WRITE_BARRIER(this, kElementsOffset, mode);
1177 } 1190 }
1178 1191
1179 1192
1180 void JSObject::initialize_properties() { 1193 void JSObject::initialize_properties() {
1181 ASSERT(!HEAP->InNewSpace(HEAP->empty_fixed_array())); 1194 ASSERT(!HEAP->InNewSpace(HEAP->empty_fixed_array()));
1182 WRITE_FIELD(this, kPropertiesOffset, HEAP->empty_fixed_array()); 1195 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1183 } 1196 }
1184 1197
1185 1198
1186 void JSObject::initialize_elements() { 1199 void JSObject::initialize_elements() {
1187 ASSERT(map()->has_fast_elements()); 1200 ASSERT(map()->has_fast_elements());
1188 ASSERT(!HEAP->InNewSpace(HEAP->empty_fixed_array())); 1201 ASSERT(!HEAP->InNewSpace(HEAP->empty_fixed_array()));
1189 WRITE_FIELD(this, kElementsOffset, HEAP->empty_fixed_array()); 1202 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1190 } 1203 }
1191 1204
1192 1205
1193 Object* JSObject::ResetElements() { 1206 Object* JSObject::ResetElements() {
1194 Object* obj = map()->GetFastElementsMap(); 1207 Object* obj = map()->GetFastElementsMap();
1195 if (obj->IsFailure()) return obj; 1208 if (obj->IsFailure()) return obj;
1196 set_map(Map::cast(obj)); 1209 set_map(Map::cast(obj));
1197 initialize_elements(); 1210 initialize_elements();
1198 return this; 1211 return this;
1199 } 1212 }
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after
1332 ASSERT(index < 0); 1345 ASSERT(index < 0);
1333 int offset = map()->instance_size() + (index * kPointerSize); 1346 int offset = map()->instance_size() + (index * kPointerSize);
1334 WRITE_FIELD(this, offset, value); 1347 WRITE_FIELD(this, offset, value);
1335 CONDITIONAL_WRITE_BARRIER(this, offset, mode); 1348 CONDITIONAL_WRITE_BARRIER(this, offset, mode);
1336 return value; 1349 return value;
1337 } 1350 }
1338 1351
1339 1352
1340 1353
1341 void JSObject::InitializeBody(int object_size) { 1354 void JSObject::InitializeBody(int object_size) {
1342 Object* value = HEAP->undefined_value(); 1355 Object* value = GetHeap()->undefined_value();
1343 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { 1356 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1344 WRITE_FIELD(this, offset, value); 1357 WRITE_FIELD(this, offset, value);
1345 } 1358 }
1346 } 1359 }
1347 1360
1348 1361
1349 bool JSObject::HasFastProperties() { 1362 bool JSObject::HasFastProperties() {
1350 return !properties()->IsDictionary(); 1363 return !properties()->IsDictionary();
1351 } 1364 }
1352 1365
1353 1366
1354 int JSObject::MaxFastProperties() { 1367 int JSObject::MaxFastProperties() {
1355 // Allow extra fast properties if the object has more than 1368 // Allow extra fast properties if the object has more than
1356 // kMaxFastProperties in-object properties. When this is the case, 1369 // kMaxFastProperties in-object properties. When this is the case,
1357 // it is very unlikely that the object is being used as a dictionary 1370 // it is very unlikely that the object is being used as a dictionary
1358 // and there is a good chance that allowing more map transitions 1371 // and there is a good chance that allowing more map transitions
1359 // will be worth it. 1372 // will be worth it.
1360 return Max(map()->inobject_properties(), kMaxFastProperties); 1373 return Max(map()->inobject_properties(), kMaxFastProperties);
1361 } 1374 }
1362 1375
1363 1376
1364 void Struct::InitializeBody(int object_size) { 1377 void Struct::InitializeBody(int object_size) {
1365 Object* value = HEAP->undefined_value(); 1378 Object* value = GetHeap()->undefined_value();
1366 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { 1379 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1367 WRITE_FIELD(this, offset, value); 1380 WRITE_FIELD(this, offset, value);
1368 } 1381 }
1369 } 1382 }
1370 1383
1371 1384
1372 bool Object::ToArrayIndex(uint32_t* index) { 1385 bool Object::ToArrayIndex(uint32_t* index) {
1373 if (IsSmi()) { 1386 if (IsSmi()) {
1374 int value = Smi::cast(this)->value(); 1387 int value = Smi::cast(this)->value();
1375 if (value < 0) return false; 1388 if (value < 0) return false;
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
1416 1429
1417 void FixedArray::set(int index, Object* value) { 1430 void FixedArray::set(int index, Object* value) {
1418 ASSERT(index >= 0 && index < this->length()); 1431 ASSERT(index >= 0 && index < this->length());
1419 int offset = kHeaderSize + index * kPointerSize; 1432 int offset = kHeaderSize + index * kPointerSize;
1420 WRITE_FIELD(this, offset, value); 1433 WRITE_FIELD(this, offset, value);
1421 WRITE_BARRIER(this, offset); 1434 WRITE_BARRIER(this, offset);
1422 } 1435 }
1423 1436
1424 1437
1425 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) { 1438 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1426 if (HEAP->InNewSpace(this)) return SKIP_WRITE_BARRIER; 1439 if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1427 return UPDATE_WRITE_BARRIER; 1440 return UPDATE_WRITE_BARRIER;
1428 } 1441 }
1429 1442
1430 1443
1431 void FixedArray::set(int index, 1444 void FixedArray::set(int index,
1432 Object* value, 1445 Object* value,
1433 WriteBarrierMode mode) { 1446 WriteBarrierMode mode) {
1434 ASSERT(index >= 0 && index < this->length()); 1447 ASSERT(index >= 0 && index < this->length());
1435 int offset = kHeaderSize + index * kPointerSize; 1448 int offset = kHeaderSize + index * kPointerSize;
1436 WRITE_FIELD(this, offset, value); 1449 WRITE_FIELD(this, offset, value);
1437 CONDITIONAL_WRITE_BARRIER(this, offset, mode); 1450 CONDITIONAL_WRITE_BARRIER(this, offset, mode);
1438 } 1451 }
1439 1452
1440 1453
1441 void FixedArray::fast_set(FixedArray* array, int index, Object* value) { 1454 void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1442 ASSERT(index >= 0 && index < array->length()); 1455 ASSERT(index >= 0 && index < array->length());
1443 ASSERT(!HEAP->InNewSpace(value)); 1456 ASSERT(!HEAP->InNewSpace(value));
1444 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); 1457 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1445 } 1458 }
1446 1459
1447 1460
1448 void FixedArray::set_undefined(int index) { 1461 void FixedArray::set_undefined(int index) {
1462 set_undefined(GetHeap(), index);
1463 }
1464
1465
1466 void FixedArray::set_undefined(Heap* heap, int index) {
1449 ASSERT(index >= 0 && index < this->length()); 1467 ASSERT(index >= 0 && index < this->length());
1450 ASSERT(!HEAP->InNewSpace(HEAP->undefined_value())); 1468 ASSERT(!heap->InNewSpace(heap->undefined_value()));
1451 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, 1469 WRITE_FIELD(this, kHeaderSize + index * kPointerSize,
1452 HEAP->undefined_value()); 1470 heap->undefined_value());
1453 } 1471 }
1454 1472
1455 1473
1456 void FixedArray::set_null(int index) { 1474 void FixedArray::set_null(int index) {
1475 set_null(GetHeap(),index);
1476 }
1477
1478
1479 void FixedArray::set_null(Heap* heap, int index) {
1457 ASSERT(index >= 0 && index < this->length()); 1480 ASSERT(index >= 0 && index < this->length());
1458 ASSERT(!HEAP->InNewSpace(HEAP->null_value())); 1481 ASSERT(!heap->InNewSpace(heap->null_value()));
1459 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, HEAP->null_value()); 1482 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1460 } 1483 }
1461 1484
1462 1485
1463 void FixedArray::set_the_hole(int index) { 1486 void FixedArray::set_the_hole(int index) {
1464 ASSERT(index >= 0 && index < this->length()); 1487 ASSERT(index >= 0 && index < this->length());
1465 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value())); 1488 ASSERT(!HEAP->InNewSpace(HEAP->the_hole_value()));
1466 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, HEAP->the_hole_value()); 1489 WRITE_FIELD(this,
1490 kHeaderSize + index * kPointerSize,
1491 GetHeap()->the_hole_value());
1467 } 1492 }
1468 1493
1469 1494
1470 Object** FixedArray::data_start() { 1495 Object** FixedArray::data_start() {
1471 return HeapObject::RawField(this, kHeaderSize); 1496 return HeapObject::RawField(this, kHeaderSize);
1472 } 1497 }
1473 1498
1474 1499
1475 bool DescriptorArray::IsEmpty() { 1500 bool DescriptorArray::IsEmpty() {
1476 ASSERT(this == HEAP->empty_descriptor_array() || 1501 ASSERT(this->length() > kFirstIndex ||
1477 this->length() > 2); 1502 this == HEAP->empty_descriptor_array());
1478 return this == HEAP->empty_descriptor_array(); 1503 return length() <= kFirstIndex;
1479 } 1504 }
1480 1505
1481 1506
1482 void DescriptorArray::fast_swap(FixedArray* array, int first, int second) { 1507 void DescriptorArray::fast_swap(FixedArray* array, int first, int second) {
1483 Object* tmp = array->get(first); 1508 Object* tmp = array->get(first);
1484 fast_set(array, first, array->get(second)); 1509 fast_set(array, first, array->get(second));
1485 fast_set(array, second, tmp); 1510 fast_set(array, second, tmp);
1486 } 1511 }
1487 1512
1488 1513
(...skipping 406 matching lines...) Expand 10 before | Expand all | Expand 10 after
1895 1920
1896 void JSFunctionResultCache::MakeZeroSize() { 1921 void JSFunctionResultCache::MakeZeroSize() {
1897 set(kFingerIndex, Smi::FromInt(kEntriesIndex)); 1922 set(kFingerIndex, Smi::FromInt(kEntriesIndex));
1898 set(kCacheSizeIndex, Smi::FromInt(kEntriesIndex)); 1923 set(kCacheSizeIndex, Smi::FromInt(kEntriesIndex));
1899 } 1924 }
1900 1925
1901 1926
1902 void JSFunctionResultCache::Clear() { 1927 void JSFunctionResultCache::Clear() {
1903 int cache_size = Smi::cast(get(kCacheSizeIndex))->value(); 1928 int cache_size = Smi::cast(get(kCacheSizeIndex))->value();
1904 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex)); 1929 Object** entries_start = RawField(this, OffsetOfElementAt(kEntriesIndex));
1905 MemsetPointer(entries_start, HEAP->the_hole_value(), cache_size); 1930 MemsetPointer(entries_start, GetHeap()->the_hole_value(), cache_size);
1906 MakeZeroSize(); 1931 MakeZeroSize();
1907 } 1932 }
1908 1933
1909 1934
1910 byte ByteArray::get(int index) { 1935 byte ByteArray::get(int index) {
1911 ASSERT(index >= 0 && index < this->length()); 1936 ASSERT(index >= 0 && index < this->length());
1912 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize); 1937 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
1913 } 1938 }
1914 1939
1915 1940
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
2068 void ExternalFloatArray::set(int index, float value) { 2093 void ExternalFloatArray::set(int index, float value) {
2069 ASSERT((index >= 0) && (index < this->length())); 2094 ASSERT((index >= 0) && (index < this->length()));
2070 float* ptr = static_cast<float*>(external_pointer()); 2095 float* ptr = static_cast<float*>(external_pointer());
2071 ptr[index] = value; 2096 ptr[index] = value;
2072 } 2097 }
2073 2098
2074 inline Scavenger Map::scavenger() { 2099 inline Scavenger Map::scavenger() {
2075 Scavenger callback = reinterpret_cast<Scavenger>( 2100 Scavenger callback = reinterpret_cast<Scavenger>(
2076 READ_INTPTR_FIELD(this, kScavengerCallbackOffset)); 2101 READ_INTPTR_FIELD(this, kScavengerCallbackOffset));
2077 2102
2103 ASSERT(instance_type() != MAP_TYPE); // MAP_TYPE has Heap pointer instead.
2078 ASSERT(callback == Heap::GetScavenger(instance_type(), 2104 ASSERT(callback == Heap::GetScavenger(instance_type(),
2079 instance_size())); 2105 instance_size()));
2080 2106
2081 return callback; 2107 return callback;
2082 } 2108 }
2083 2109
2084 inline void Map::set_scavenger(Scavenger callback) { 2110 inline void Map::set_scavenger(Scavenger callback) {
2111 ASSERT(instance_type() != MAP_TYPE); // MAP_TYPE has Heap pointer instead.
2085 WRITE_INTPTR_FIELD(this, 2112 WRITE_INTPTR_FIELD(this,
2086 kScavengerCallbackOffset, 2113 kScavengerCallbackOffset,
2087 reinterpret_cast<intptr_t>(callback)); 2114 reinterpret_cast<intptr_t>(callback));
2088 } 2115 }
2089 2116
2090 int Map::instance_size() { 2117 int Map::instance_size() {
2091 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2; 2118 return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
2092 } 2119 }
2093 2120
2094 2121
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after
2387 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); 2414 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
2388 // GetCodeFromTargetAddress might be called when marking objects during mark 2415 // GetCodeFromTargetAddress might be called when marking objects during mark
2389 // sweep. reinterpret_cast is therefore used instead of the more appropriate 2416 // sweep. reinterpret_cast is therefore used instead of the more appropriate
2390 // Code::cast. Code::cast does not work when the object's map is 2417 // Code::cast. Code::cast does not work when the object's map is
2391 // marked. 2418 // marked.
2392 Code* result = reinterpret_cast<Code*>(code); 2419 Code* result = reinterpret_cast<Code*>(code);
2393 return result; 2420 return result;
2394 } 2421 }
2395 2422
2396 2423
2424 Heap* Map::heap() {
2425 ASSERT(instance_type() == MAP_TYPE);
2426 ASSERT(this == map());
2427 Heap* heap = reinterpret_cast<Heap*>(
2428 READ_INTPTR_FIELD(this, kScavengerCallbackOffset));
2429 ASSERT(heap != NULL);
2430 ASSERT(heap->isolate() == Isolate::Current());
2431 return heap;
2432 }
2433
2434
2435 void Map::set_heap(Heap* heap) {
2436 ASSERT(heap != NULL);
2437 ASSERT(heap->isolate() == Isolate::Current());
2438 ASSERT(instance_type() == MAP_TYPE);
2439 // WRITE_FIELD does not invoke write barrier, but there is no need here.
2440 WRITE_INTPTR_FIELD(this,
2441 kScavengerCallbackOffset,
2442 reinterpret_cast<intptr_t>(heap));
2443 }
2444
2445
2397 Object* Map::prototype() { 2446 Object* Map::prototype() {
2398 return READ_FIELD(this, kPrototypeOffset); 2447 return READ_FIELD(this, kPrototypeOffset);
2399 } 2448 }
2400 2449
2401 2450
2402 void Map::set_prototype(Object* value, WriteBarrierMode mode) { 2451 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
2403 ASSERT(value->IsNull() || value->IsJSObject()); 2452 ASSERT(value->IsNull() || value->IsJSObject());
2404 WRITE_FIELD(this, kPrototypeOffset, value); 2453 WRITE_FIELD(this, kPrototypeOffset, value);
2405 CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, mode); 2454 CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, mode);
2406 } 2455 }
(...skipping 536 matching lines...) Expand 10 before | Expand all | Expand 10 after
2943 ASSERT(TypeTag() != NOT_COMPILED); 2992 ASSERT(TypeTag() != NOT_COMPILED);
2944 ASSERT(index >= kDataIndex); // Only implementation data can be set this way. 2993 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
2945 FixedArray::cast(data())->set(index, value); 2994 FixedArray::cast(data())->set(index, value);
2946 } 2995 }
2947 2996
2948 2997
2949 JSObject::ElementsKind JSObject::GetElementsKind() { 2998 JSObject::ElementsKind JSObject::GetElementsKind() {
2950 HeapObject* array = elements(); 2999 HeapObject* array = elements();
2951 if (array->IsFixedArray()) { 3000 if (array->IsFixedArray()) {
2952 // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a FixedArray. 3001 // FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a FixedArray.
2953 if (array->map() == HEAP->fixed_array_map()) { 3002 if (map()->has_fast_elements()) {
2954 ASSERT(map()->has_fast_elements()); 3003 ASSERT(array->map() == GetHeap()->fixed_array_map());
2955 return FAST_ELEMENTS; 3004 return FAST_ELEMENTS;
2956 } 3005 }
2957 ASSERT(array->IsDictionary()); 3006 ASSERT(array->IsDictionary());
2958 ASSERT(!map()->has_fast_elements());
2959 return DICTIONARY_ELEMENTS; 3007 return DICTIONARY_ELEMENTS;
2960 } 3008 }
2961 ASSERT(!map()->has_fast_elements()); 3009 ASSERT(!map()->has_fast_elements());
2962 if (array->IsExternalArray()) { 3010 if (array->IsExternalArray()) {
2963 switch (array->map()->instance_type()) { 3011 switch (array->map()->instance_type()) {
2964 case EXTERNAL_BYTE_ARRAY_TYPE: 3012 case EXTERNAL_BYTE_ARRAY_TYPE:
2965 return EXTERNAL_BYTE_ELEMENTS; 3013 return EXTERNAL_BYTE_ELEMENTS;
2966 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: 3014 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
2967 return EXTERNAL_UNSIGNED_BYTE_ELEMENTS; 3015 return EXTERNAL_UNSIGNED_BYTE_ELEMENTS;
2968 case EXTERNAL_SHORT_ARRAY_TYPE: 3016 case EXTERNAL_SHORT_ARRAY_TYPE:
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
3173 3221
3174 PropertyAttributes JSObject::GetPropertyAttribute(String* key) { 3222 PropertyAttributes JSObject::GetPropertyAttribute(String* key) {
3175 return GetPropertyAttributeWithReceiver(this, key); 3223 return GetPropertyAttributeWithReceiver(this, key);
3176 } 3224 }
3177 3225
3178 // TODO(504): this may be useful in other places too where JSGlobalProxy 3226 // TODO(504): this may be useful in other places too where JSGlobalProxy
3179 // is used. 3227 // is used.
3180 Object* JSObject::BypassGlobalProxy() { 3228 Object* JSObject::BypassGlobalProxy() {
3181 if (IsJSGlobalProxy()) { 3229 if (IsJSGlobalProxy()) {
3182 Object* proto = GetPrototype(); 3230 Object* proto = GetPrototype();
3183 if (proto->IsNull()) return HEAP->undefined_value(); 3231 if (proto->IsNull()) return GetHeap()->undefined_value();
3184 ASSERT(proto->IsJSGlobalObject()); 3232 ASSERT(proto->IsJSGlobalObject());
3185 return proto; 3233 return proto;
3186 } 3234 }
3187 return this; 3235 return this;
3188 } 3236 }
3189 3237
3190 3238
3191 bool JSObject::HasHiddenPropertiesObject() { 3239 bool JSObject::HasHiddenPropertiesObject() {
3192 ASSERT(!IsJSGlobalProxy()); 3240 ASSERT(!IsJSGlobalProxy());
3193 return GetPropertyAttributePostInterceptor(this, 3241 return GetPropertyAttributePostInterceptor(this,
3194 HEAP->hidden_symbol(), 3242 GetHeap()->hidden_symbol(),
3195 false) != ABSENT; 3243 false) != ABSENT;
3196 } 3244 }
3197 3245
3198 3246
3199 Object* JSObject::GetHiddenPropertiesObject() { 3247 Object* JSObject::GetHiddenPropertiesObject() {
3200 ASSERT(!IsJSGlobalProxy()); 3248 ASSERT(!IsJSGlobalProxy());
3201 PropertyAttributes attributes; 3249 PropertyAttributes attributes;
3202 return GetLocalPropertyPostInterceptor(this, 3250 return GetLocalPropertyPostInterceptor(this,
3203 HEAP->hidden_symbol(), 3251 GetHeap()->hidden_symbol(),
3204 &attributes); 3252 &attributes);
3205 } 3253 }
3206 3254
3207 3255
3208 Object* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) { 3256 Object* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
3209 ASSERT(!IsJSGlobalProxy()); 3257 ASSERT(!IsJSGlobalProxy());
3210 return SetPropertyPostInterceptor(HEAP->hidden_symbol(), 3258 return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
3211 hidden_obj, 3259 hidden_obj,
3212 DONT_ENUM); 3260 DONT_ENUM);
3213 } 3261 }
3214 3262
3215 3263
3216 bool JSObject::HasElement(uint32_t index) { 3264 bool JSObject::HasElement(uint32_t index) {
3217 return HasElementWithReceiver(this, index); 3265 return HasElementWithReceiver(this, index);
3218 } 3266 }
3219 3267
3220 3268
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
3267 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0); 3315 ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
3268 int index = HashTable<Shape, Key>::EntryToIndex(entry); 3316 int index = HashTable<Shape, Key>::EntryToIndex(entry);
3269 AssertNoAllocation no_gc; 3317 AssertNoAllocation no_gc;
3270 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc); 3318 WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
3271 FixedArray::set(index, key, mode); 3319 FixedArray::set(index, key, mode);
3272 FixedArray::set(index+1, value, mode); 3320 FixedArray::set(index+1, value, mode);
3273 FixedArray::fast_set(this, index+2, details.AsSmi()); 3321 FixedArray::fast_set(this, index+2, details.AsSmi());
3274 } 3322 }
3275 3323
3276 3324
3277 void Map::ClearCodeCache() { 3325 void Map::ClearCodeCache(Heap* heap) {
3278 // No write barrier is needed since empty_fixed_array is not in new space. 3326 // No write barrier is needed since empty_fixed_array is not in new space.
3279 // Please note this function is used during marking: 3327 // Please note this function is used during marking:
3280 // - MarkCompactCollector::MarkUnmarkedObject 3328 // - MarkCompactCollector::MarkUnmarkedObject
3281 ASSERT(!HEAP->InNewSpace(HEAP->raw_unchecked_empty_fixed_array())); 3329 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
3282 WRITE_FIELD(this, kCodeCacheOffset, HEAP->raw_unchecked_empty_fixed_array()); 3330 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
3283 } 3331 }
3284 3332
3285 3333
3286 void JSArray::EnsureSize(int required_size) { 3334 void JSArray::EnsureSize(int required_size) {
3287 ASSERT(HasFastElements()); 3335 ASSERT(HasFastElements());
3288 FixedArray* elts = FixedArray::cast(elements()); 3336 FixedArray* elts = FixedArray::cast(elements());
3289 const int kArraySizeThatFitsComfortablyInNewSpace = 128; 3337 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
3290 if (elts->length() < required_size) { 3338 if (elts->length() < required_size) {
3291 // Doubling in size would be overkill, but leave some slack to avoid 3339 // Doubling in size would be overkill, but leave some slack to avoid
3292 // constantly growing. 3340 // constantly growing.
3293 Expand(required_size + (required_size >> 3)); 3341 Expand(required_size + (required_size >> 3));
3294 // It's a performance benefit to keep a frequently used array in new-space. 3342 // It's a performance benefit to keep a frequently used array in new-space.
3295 } else if (!HEAP->new_space()->Contains(elts) && 3343 } else if (!GetHeap()->new_space()->Contains(elts) &&
3296 required_size < kArraySizeThatFitsComfortablyInNewSpace) { 3344 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
3297 // Expand will allocate a new backing store in new space even if the size 3345 // Expand will allocate a new backing store in new space even if the size
3298 // we asked for isn't larger than what we had before. 3346 // we asked for isn't larger than what we had before.
3299 Expand(required_size); 3347 Expand(required_size);
3300 } 3348 }
3301 } 3349 }
3302 3350
3303 3351
3304 void JSArray::set_length(Smi* length) { 3352 void JSArray::set_length(Smi* length) {
3305 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER); 3353 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
3306 } 3354 }
3307 3355
3308 3356
3309 void JSArray::SetContent(FixedArray* storage) { 3357 void JSArray::SetContent(FixedArray* storage) {
3310 set_length(Smi::FromInt(storage->length())); 3358 set_length(Smi::FromInt(storage->length()));
3311 set_elements(storage); 3359 set_elements(storage);
3312 } 3360 }
3313 3361
3314 3362
3315 Object* FixedArray::Copy() { 3363 Object* FixedArray::Copy() {
3316 if (length() == 0) return this; 3364 if (length() == 0) return this;
3317 return HEAP->CopyFixedArray(this); 3365 return GetHeap()->CopyFixedArray(this);
3318 } 3366 }
3319 3367
3320 3368
3321 Relocatable::Relocatable() { 3369 Relocatable::Relocatable() {
3322 Isolate* isolate = Isolate::Current(); 3370 Isolate* isolate = Isolate::Current();
3323 prev_ = isolate->relocatable_top(); 3371 prev_ = isolate->relocatable_top();
3324 isolate->set_relocatable_top(this); 3372 isolate->set_relocatable_top(this);
3325 } 3373 }
3326 3374
3327 3375
(...skipping 21 matching lines...) Expand all
3349 #undef WRITE_INT_FIELD 3397 #undef WRITE_INT_FIELD
3350 #undef READ_SHORT_FIELD 3398 #undef READ_SHORT_FIELD
3351 #undef WRITE_SHORT_FIELD 3399 #undef WRITE_SHORT_FIELD
3352 #undef READ_BYTE_FIELD 3400 #undef READ_BYTE_FIELD
3353 #undef WRITE_BYTE_FIELD 3401 #undef WRITE_BYTE_FIELD
3354 3402
3355 3403
3356 } } // namespace v8::internal 3404 } } // namespace v8::internal
3357 3405
3358 #endif // V8_OBJECTS_INL_H_ 3406 #endif // V8_OBJECTS_INL_H_
OLDNEW
« no previous file with comments | « src/objects.cc ('k') | src/serialize.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698