Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(28)

Side by Side Diff: src/objects-inl.h

Issue 7945009: Merge experimental/gc branch to the bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/objects-debug.cc ('k') | src/objects-printer.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 25 matching lines...) Expand all
36 #define V8_OBJECTS_INL_H_ 36 #define V8_OBJECTS_INL_H_
37 37
38 #include "elements.h" 38 #include "elements.h"
39 #include "objects.h" 39 #include "objects.h"
40 #include "contexts.h" 40 #include "contexts.h"
41 #include "conversions-inl.h" 41 #include "conversions-inl.h"
42 #include "heap.h" 42 #include "heap.h"
43 #include "isolate.h" 43 #include "isolate.h"
44 #include "property.h" 44 #include "property.h"
45 #include "spaces.h" 45 #include "spaces.h"
46 #include "store-buffer.h"
46 #include "v8memory.h" 47 #include "v8memory.h"
47 48
49 #include "incremental-marking.h"
50
48 namespace v8 { 51 namespace v8 {
49 namespace internal { 52 namespace internal {
50 53
51 PropertyDetails::PropertyDetails(Smi* smi) { 54 PropertyDetails::PropertyDetails(Smi* smi) {
52 value_ = smi->value(); 55 value_ = smi->value();
53 } 56 }
54 57
55 58
56 Smi* PropertyDetails::AsSmi() { 59 Smi* PropertyDetails::AsSmi() {
57 return Smi::FromInt(value_); 60 return Smi::FromInt(value_);
(...skipping 15 matching lines...) Expand all
73 76
74 #define INT_ACCESSORS(holder, name, offset) \ 77 #define INT_ACCESSORS(holder, name, offset) \
75 int holder::name() { return READ_INT_FIELD(this, offset); } \ 78 int holder::name() { return READ_INT_FIELD(this, offset); } \
76 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); } 79 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
77 80
78 81
79 #define ACCESSORS(holder, name, type, offset) \ 82 #define ACCESSORS(holder, name, type, offset) \
80 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \ 83 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
81 void holder::set_##name(type* value, WriteBarrierMode mode) { \ 84 void holder::set_##name(type* value, WriteBarrierMode mode) { \
82 WRITE_FIELD(this, offset, value); \ 85 WRITE_FIELD(this, offset, value); \
83 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode); \ 86 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
84 } 87 }
85 88
86 89
87 // GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead. 90 // GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
88 #define ACCESSORS_GCSAFE(holder, name, type, offset) \ 91 #define ACCESSORS_GCSAFE(holder, name, type, offset) \
89 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \ 92 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
90 void holder::set_##name(type* value, WriteBarrierMode mode) { \ 93 void holder::set_##name(type* value, WriteBarrierMode mode) { \
91 WRITE_FIELD(this, offset, value); \ 94 WRITE_FIELD(this, offset, value); \
92 CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode); \ 95 CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, value, mode); \
93 } 96 }
94 97
95 98
96 #define SMI_ACCESSORS(holder, name, offset) \ 99 #define SMI_ACCESSORS(holder, name, offset) \
97 int holder::name() { \ 100 int holder::name() { \
98 Object* value = READ_FIELD(this, offset); \ 101 Object* value = READ_FIELD(this, offset); \
99 return Smi::cast(value)->value(); \ 102 return Smi::cast(value)->value(); \
100 } \ 103 } \
101 void holder::set_##name(int value) { \ 104 void holder::set_##name(int value) { \
102 WRITE_FIELD(this, offset, Smi::FromInt(value)); \ 105 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
140 bool Object::IsSmi() { 143 bool Object::IsSmi() {
141 return HAS_SMI_TAG(this); 144 return HAS_SMI_TAG(this);
142 } 145 }
143 146
144 147
145 bool Object::IsHeapObject() { 148 bool Object::IsHeapObject() {
146 return Internals::HasHeapObjectTag(this); 149 return Internals::HasHeapObjectTag(this);
147 } 150 }
148 151
149 152
153 bool Object::NonFailureIsHeapObject() {
154 ASSERT(!this->IsFailure());
155 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
156 }
157
158
150 bool Object::IsHeapNumber() { 159 bool Object::IsHeapNumber() {
151 return Object::IsHeapObject() 160 return Object::IsHeapObject()
152 && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE; 161 && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
153 } 162 }
154 163
155 164
156 bool Object::IsString() { 165 bool Object::IsString() {
157 return Object::IsHeapObject() 166 return Object::IsHeapObject()
158 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE; 167 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
159 } 168 }
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after
395 return IsSmi() || IsHeapNumber(); 404 return IsSmi() || IsHeapNumber();
396 } 405 }
397 406
398 407
399 bool Object::IsByteArray() { 408 bool Object::IsByteArray() {
400 return Object::IsHeapObject() 409 return Object::IsHeapObject()
401 && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE; 410 && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
402 } 411 }
403 412
404 413
414 bool Object::IsFreeSpace() {
415 return Object::IsHeapObject()
416 && HeapObject::cast(this)->map()->instance_type() == FREE_SPACE_TYPE;
417 }
418
419
420 bool Object::IsFiller() {
421 if (!Object::IsHeapObject()) return false;
422 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
423 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
424 }
425
426
405 bool Object::IsExternalPixelArray() { 427 bool Object::IsExternalPixelArray() {
406 return Object::IsHeapObject() && 428 return Object::IsHeapObject() &&
407 HeapObject::cast(this)->map()->instance_type() == 429 HeapObject::cast(this)->map()->instance_type() ==
408 EXTERNAL_PIXEL_ARRAY_TYPE; 430 EXTERNAL_PIXEL_ARRAY_TYPE;
409 } 431 }
410 432
411 433
412 bool Object::IsExternalArray() { 434 bool Object::IsExternalArray() {
413 if (!Object::IsHeapObject()) 435 if (!Object::IsHeapObject())
414 return false; 436 return false;
(...skipping 220 matching lines...) Expand 10 before | Expand all | Expand 10 after
635 } 657 }
636 658
637 659
638 bool Object::IsCode() { 660 bool Object::IsCode() {
639 return Object::IsHeapObject() 661 return Object::IsHeapObject()
640 && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE; 662 && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
641 } 663 }
642 664
643 665
644 bool Object::IsOddball() { 666 bool Object::IsOddball() {
645 ASSERT(HEAP->is_safe_to_read_maps());
646 return Object::IsHeapObject() 667 return Object::IsHeapObject()
647 && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE; 668 && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
648 } 669 }
649 670
650 671
651 bool Object::IsJSGlobalPropertyCell() { 672 bool Object::IsJSGlobalPropertyCell() {
652 return Object::IsHeapObject() 673 return Object::IsHeapObject()
653 && HeapObject::cast(this)->map()->instance_type() 674 && HeapObject::cast(this)->map()->instance_type()
654 == JS_GLOBAL_PROPERTY_CELL_TYPE; 675 == JS_GLOBAL_PROPERTY_CELL_TYPE;
655 } 676 }
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after
932 953
933 #define FIELD_ADDR(p, offset) \ 954 #define FIELD_ADDR(p, offset) \
934 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag) 955 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
935 956
936 #define READ_FIELD(p, offset) \ 957 #define READ_FIELD(p, offset) \
937 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset))) 958 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
938 959
939 #define WRITE_FIELD(p, offset, value) \ 960 #define WRITE_FIELD(p, offset, value) \
940 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value) 961 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
941 962
942 // TODO(isolates): Pass heap in to these macros. 963 #define WRITE_BARRIER(heap, object, offset, value) \
943 #define WRITE_BARRIER(object, offset) \ 964 heap->incremental_marking()->RecordWrite( \
944 object->GetHeap()->RecordWrite(object->address(), offset); 965 object, HeapObject::RawField(object, offset), value); \
966 if (heap->InNewSpace(value)) { \
967 heap->RecordWrite(object->address(), offset); \
968 }
945 969
946 // CONDITIONAL_WRITE_BARRIER must be issued after the actual 970 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
947 // write due to the assert validating the written value. 971 if (mode == UPDATE_WRITE_BARRIER) { \
948 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \ 972 heap->incremental_marking()->RecordWrite( \
949 if (mode == UPDATE_WRITE_BARRIER) { \ 973 object, HeapObject::RawField(object, offset), value); \
950 heap->RecordWrite(object->address(), offset); \ 974 if (heap->InNewSpace(value)) { \
951 } else { \ 975 heap->RecordWrite(object->address(), offset); \
952 ASSERT(mode == SKIP_WRITE_BARRIER); \ 976 } \
953 ASSERT(heap->InNewSpace(object) || \
954 !heap->InNewSpace(READ_FIELD(object, offset)) || \
955 Page::FromAddress(object->address())-> \
956 IsRegionDirty(object->address() + offset)); \
957 } 977 }
958 978
959 #ifndef V8_TARGET_ARCH_MIPS 979 #ifndef V8_TARGET_ARCH_MIPS
960 #define READ_DOUBLE_FIELD(p, offset) \ 980 #define READ_DOUBLE_FIELD(p, offset) \
961 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset))) 981 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
962 #else // V8_TARGET_ARCH_MIPS 982 #else // V8_TARGET_ARCH_MIPS
963 // Prevent gcc from using load-double (mips ldc1) on (possibly) 983 // Prevent gcc from using load-double (mips ldc1) on (possibly)
964 // non-64-bit aligned HeapNumber::value. 984 // non-64-bit aligned HeapNumber::value.
965 static inline double read_double_field(void* p, int offset) { 985 static inline double read_double_field(void* p, int offset) {
966 union conversion { 986 union conversion {
967 double d; 987 double d;
968 uint32_t u[2]; 988 uint32_t u[2];
969 } c; 989 } c;
970 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))); 990 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
971 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))); 991 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
972 return c.d; 992 return c.d;
973 } 993 }
974 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset) 994 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
975 #endif // V8_TARGET_ARCH_MIPS 995 #endif // V8_TARGET_ARCH_MIPS
976 996
977
978 #ifndef V8_TARGET_ARCH_MIPS 997 #ifndef V8_TARGET_ARCH_MIPS
979 #define WRITE_DOUBLE_FIELD(p, offset, value) \ 998 #define WRITE_DOUBLE_FIELD(p, offset, value) \
980 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) 999 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
981 #else // V8_TARGET_ARCH_MIPS 1000 #else // V8_TARGET_ARCH_MIPS
982 // Prevent gcc from using store-double (mips sdc1) on (possibly) 1001 // Prevent gcc from using store-double (mips sdc1) on (possibly)
983 // non-64-bit aligned HeapNumber::value. 1002 // non-64-bit aligned HeapNumber::value.
984 static inline void write_double_field(void* p, int offset, 1003 static inline void write_double_field(void* p, int offset,
985 double value) { 1004 double value) {
986 union conversion { 1005 union conversion {
987 double d; 1006 double d;
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
1162 return MapWord(reinterpret_cast<uintptr_t>(raw)); 1181 return MapWord(reinterpret_cast<uintptr_t>(raw));
1163 } 1182 }
1164 1183
1165 1184
1166 HeapObject* MapWord::ToForwardingAddress() { 1185 HeapObject* MapWord::ToForwardingAddress() {
1167 ASSERT(IsForwardingAddress()); 1186 ASSERT(IsForwardingAddress());
1168 return HeapObject::FromAddress(reinterpret_cast<Address>(value_)); 1187 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1169 } 1188 }
1170 1189
1171 1190
1172 bool MapWord::IsMarked() {
1173 return (value_ & kMarkingMask) == 0;
1174 }
1175
1176
1177 void MapWord::SetMark() {
1178 value_ &= ~kMarkingMask;
1179 }
1180
1181
1182 void MapWord::ClearMark() {
1183 value_ |= kMarkingMask;
1184 }
1185
1186
1187 bool MapWord::IsOverflowed() {
1188 return (value_ & kOverflowMask) != 0;
1189 }
1190
1191
1192 void MapWord::SetOverflow() {
1193 value_ |= kOverflowMask;
1194 }
1195
1196
1197 void MapWord::ClearOverflow() {
1198 value_ &= ~kOverflowMask;
1199 }
1200
1201
1202 MapWord MapWord::EncodeAddress(Address map_address, int offset) {
1203 // Offset is the distance in live bytes from the first live object in the
1204 // same page. The offset between two objects in the same page should not
1205 // exceed the object area size of a page.
1206 ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
1207
1208 uintptr_t compact_offset = offset >> kObjectAlignmentBits;
1209 ASSERT(compact_offset < (1 << kForwardingOffsetBits));
1210
1211 Page* map_page = Page::FromAddress(map_address);
1212 ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
1213
1214 uintptr_t map_page_offset =
1215 map_page->Offset(map_address) >> kMapAlignmentBits;
1216
1217 uintptr_t encoding =
1218 (compact_offset << kForwardingOffsetShift) |
1219 (map_page_offset << kMapPageOffsetShift) |
1220 (map_page->mc_page_index << kMapPageIndexShift);
1221 return MapWord(encoding);
1222 }
1223
1224
1225 Address MapWord::DecodeMapAddress(MapSpace* map_space) {
1226 int map_page_index =
1227 static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
1228 ASSERT_MAP_PAGE_INDEX(map_page_index);
1229
1230 int map_page_offset = static_cast<int>(
1231 ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) <<
1232 kMapAlignmentBits);
1233
1234 return (map_space->PageAddress(map_page_index) + map_page_offset);
1235 }
1236
1237
1238 int MapWord::DecodeOffset() {
1239 // The offset field is represented in the kForwardingOffsetBits
1240 // most-significant bits.
1241 uintptr_t offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
1242 ASSERT(offset < static_cast<uintptr_t>(Page::kObjectAreaSize));
1243 return static_cast<int>(offset);
1244 }
1245
1246
1247 MapWord MapWord::FromEncodedAddress(Address address) {
1248 return MapWord(reinterpret_cast<uintptr_t>(address));
1249 }
1250
1251
1252 Address MapWord::ToEncodedAddress() {
1253 return reinterpret_cast<Address>(value_);
1254 }
1255
1256
1257 #ifdef DEBUG 1191 #ifdef DEBUG
1258 void HeapObject::VerifyObjectField(int offset) { 1192 void HeapObject::VerifyObjectField(int offset) {
1259 VerifyPointer(READ_FIELD(this, offset)); 1193 VerifyPointer(READ_FIELD(this, offset));
1260 } 1194 }
1261 1195
1262 void HeapObject::VerifySmiField(int offset) { 1196 void HeapObject::VerifySmiField(int offset) {
1263 ASSERT(READ_FIELD(this, offset)->IsSmi()); 1197 ASSERT(READ_FIELD(this, offset)->IsSmi());
1264 } 1198 }
1265 #endif 1199 #endif
1266 1200
1267 1201
1268 Heap* HeapObject::GetHeap() { 1202 Heap* HeapObject::GetHeap() {
1269 // During GC, the map pointer in HeapObject is used in various ways that 1203 Heap* heap =
1270 // prevent us from retrieving Heap from the map. 1204 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1271 // Assert that we are not in GC, implement GC code in a way that it doesn't 1205 ASSERT(heap != NULL);
1272 // pull heap from the map. 1206 ASSERT(heap->isolate() == Isolate::Current());
1273 ASSERT(HEAP->is_safe_to_read_maps()); 1207 return heap;
1274 return map()->heap();
1275 } 1208 }
1276 1209
1277 1210
1278 Isolate* HeapObject::GetIsolate() { 1211 Isolate* HeapObject::GetIsolate() {
1279 return GetHeap()->isolate(); 1212 return GetHeap()->isolate();
1280 } 1213 }
1281 1214
1282 1215
1283 Map* HeapObject::map() { 1216 Map* HeapObject::map() {
1284 return map_word().ToMap(); 1217 return map_word().ToMap();
1285 } 1218 }
1286 1219
1287 1220
1288 void HeapObject::set_map(Map* value) { 1221 void HeapObject::set_map(Map* value) {
1289 set_map_word(MapWord::FromMap(value)); 1222 set_map_word(MapWord::FromMap(value));
1223 if (value != NULL) {
1224 // TODO(1600) We are passing NULL as a slot because maps can never be on
1225 // evacuation candidate.
1226 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1227 }
1290 } 1228 }
1291 1229
1292 1230
1293 MapWord HeapObject::map_word() { 1231 MapWord HeapObject::map_word() {
1294 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset))); 1232 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1295 } 1233 }
1296 1234
1297 1235
1298 void HeapObject::set_map_word(MapWord map_word) { 1236 void HeapObject::set_map_word(MapWord map_word) {
1299 // WRITE_FIELD does not invoke write barrier, but there is no need 1237 // WRITE_FIELD does not invoke write barrier, but there is no need
(...skipping 22 matching lines...) Expand all
1322 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)), 1260 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1323 reinterpret_cast<Object**>(FIELD_ADDR(this, end))); 1261 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1324 } 1262 }
1325 1263
1326 1264
1327 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) { 1265 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1328 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset))); 1266 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1329 } 1267 }
1330 1268
1331 1269
1332 bool HeapObject::IsMarked() {
1333 return map_word().IsMarked();
1334 }
1335
1336
1337 void HeapObject::SetMark() {
1338 ASSERT(!IsMarked());
1339 MapWord first_word = map_word();
1340 first_word.SetMark();
1341 set_map_word(first_word);
1342 }
1343
1344
1345 void HeapObject::ClearMark() {
1346 ASSERT(IsMarked());
1347 MapWord first_word = map_word();
1348 first_word.ClearMark();
1349 set_map_word(first_word);
1350 }
1351
1352
1353 bool HeapObject::IsOverflowed() {
1354 return map_word().IsOverflowed();
1355 }
1356
1357
1358 void HeapObject::SetOverflow() {
1359 MapWord first_word = map_word();
1360 first_word.SetOverflow();
1361 set_map_word(first_word);
1362 }
1363
1364
1365 void HeapObject::ClearOverflow() {
1366 ASSERT(IsOverflowed());
1367 MapWord first_word = map_word();
1368 first_word.ClearOverflow();
1369 set_map_word(first_word);
1370 }
1371
1372
1373 double HeapNumber::value() { 1270 double HeapNumber::value() {
1374 return READ_DOUBLE_FIELD(this, kValueOffset); 1271 return READ_DOUBLE_FIELD(this, kValueOffset);
1375 } 1272 }
1376 1273
1377 1274
1378 void HeapNumber::set_value(double value) { 1275 void HeapNumber::set_value(double value) {
1379 WRITE_DOUBLE_FIELD(this, kValueOffset, value); 1276 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1380 } 1277 }
1381 1278
1382 1279
(...skipping 19 matching lines...) Expand all
1402 1299
1403 1300
1404 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) { 1301 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1405 ASSERT(map()->has_fast_elements() == 1302 ASSERT(map()->has_fast_elements() ==
1406 (value->map() == GetHeap()->fixed_array_map() || 1303 (value->map() == GetHeap()->fixed_array_map() ||
1407 value->map() == GetHeap()->fixed_cow_array_map())); 1304 value->map() == GetHeap()->fixed_cow_array_map()));
1408 ASSERT(map()->has_fast_double_elements() == 1305 ASSERT(map()->has_fast_double_elements() ==
1409 value->IsFixedDoubleArray()); 1306 value->IsFixedDoubleArray());
1410 ASSERT(value->HasValidElements()); 1307 ASSERT(value->HasValidElements());
1411 WRITE_FIELD(this, kElementsOffset, value); 1308 WRITE_FIELD(this, kElementsOffset, value);
1412 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode); 1309 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1413 } 1310 }
1414 1311
1415 1312
1416 void JSObject::initialize_properties() { 1313 void JSObject::initialize_properties() {
1417 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); 1314 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1418 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array()); 1315 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1419 } 1316 }
1420 1317
1421 1318
1422 void JSObject::initialize_elements() { 1319 void JSObject::initialize_elements() {
(...skipping 12 matching lines...) Expand all
1435 initialize_elements(); 1332 initialize_elements();
1436 return this; 1333 return this;
1437 } 1334 }
1438 1335
1439 1336
1440 ACCESSORS(Oddball, to_string, String, kToStringOffset) 1337 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1441 ACCESSORS(Oddball, to_number, Object, kToNumberOffset) 1338 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1442 1339
1443 1340
1444 byte Oddball::kind() { 1341 byte Oddball::kind() {
1445 return READ_BYTE_FIELD(this, kKindOffset); 1342 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1446 } 1343 }
1447 1344
1448 1345
1449 void Oddball::set_kind(byte value) { 1346 void Oddball::set_kind(byte value) {
1450 WRITE_BYTE_FIELD(this, kKindOffset, value); 1347 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1451 } 1348 }
1452 1349
1453 1350
1454 Object* JSGlobalPropertyCell::value() { 1351 Object* JSGlobalPropertyCell::value() {
1455 return READ_FIELD(this, kValueOffset); 1352 return READ_FIELD(this, kValueOffset);
1456 } 1353 }
1457 1354
1458 1355
1459 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) { 1356 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1460 // The write barrier is not used for global property cells. 1357 // The write barrier is not used for global property cells.
1461 ASSERT(!val->IsJSGlobalPropertyCell()); 1358 ASSERT(!val->IsJSGlobalPropertyCell());
1462 WRITE_FIELD(this, kValueOffset, val); 1359 WRITE_FIELD(this, kValueOffset, val);
1360 GetHeap()->incremental_marking()->RecordWrite(
1361 this, HeapObject::RawField(this, kValueOffset), val);
1463 } 1362 }
1464 1363
1465 1364
1466 int JSObject::GetHeaderSize() { 1365 int JSObject::GetHeaderSize() {
1467 InstanceType type = map()->instance_type(); 1366 InstanceType type = map()->instance_type();
1468 // Check for the most common kind of JavaScript object before 1367 // Check for the most common kind of JavaScript object before
1469 // falling into the generic switch. This speeds up the internal 1368 // falling into the generic switch. This speeds up the internal
1470 // field operations considerably on average. 1369 // field operations considerably on average.
1471 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize; 1370 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1472 switch (type) { 1371 switch (type) {
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1521 } 1420 }
1522 1421
1523 1422
1524 void JSObject::SetInternalField(int index, Object* value) { 1423 void JSObject::SetInternalField(int index, Object* value) {
1525 ASSERT(index < GetInternalFieldCount() && index >= 0); 1424 ASSERT(index < GetInternalFieldCount() && index >= 0);
1526 // Internal objects do follow immediately after the header, whereas in-object 1425 // Internal objects do follow immediately after the header, whereas in-object
1527 // properties are at the end of the object. Therefore there is no need 1426 // properties are at the end of the object. Therefore there is no need
1528 // to adjust the index here. 1427 // to adjust the index here.
1529 int offset = GetHeaderSize() + (kPointerSize * index); 1428 int offset = GetHeaderSize() + (kPointerSize * index);
1530 WRITE_FIELD(this, offset, value); 1429 WRITE_FIELD(this, offset, value);
1531 WRITE_BARRIER(this, offset); 1430 WRITE_BARRIER(GetHeap(), this, offset, value);
1532 } 1431 }
1533 1432
1534 1433
1535 // Access fast-case object properties at index. The use of these routines 1434 // Access fast-case object properties at index. The use of these routines
1536 // is needed to correctly distinguish between properties stored in-object and 1435 // is needed to correctly distinguish between properties stored in-object and
1537 // properties stored in the properties array. 1436 // properties stored in the properties array.
1538 Object* JSObject::FastPropertyAt(int index) { 1437 Object* JSObject::FastPropertyAt(int index) {
1539 // Adjust for the number of properties stored in the object. 1438 // Adjust for the number of properties stored in the object.
1540 index -= map()->inobject_properties(); 1439 index -= map()->inobject_properties();
1541 if (index < 0) { 1440 if (index < 0) {
1542 int offset = map()->instance_size() + (index * kPointerSize); 1441 int offset = map()->instance_size() + (index * kPointerSize);
1543 return READ_FIELD(this, offset); 1442 return READ_FIELD(this, offset);
1544 } else { 1443 } else {
1545 ASSERT(index < properties()->length()); 1444 ASSERT(index < properties()->length());
1546 return properties()->get(index); 1445 return properties()->get(index);
1547 } 1446 }
1548 } 1447 }
1549 1448
1550 1449
1551 Object* JSObject::FastPropertyAtPut(int index, Object* value) { 1450 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1552 // Adjust for the number of properties stored in the object. 1451 // Adjust for the number of properties stored in the object.
1553 index -= map()->inobject_properties(); 1452 index -= map()->inobject_properties();
1554 if (index < 0) { 1453 if (index < 0) {
1555 int offset = map()->instance_size() + (index * kPointerSize); 1454 int offset = map()->instance_size() + (index * kPointerSize);
1556 WRITE_FIELD(this, offset, value); 1455 WRITE_FIELD(this, offset, value);
1557 WRITE_BARRIER(this, offset); 1456 WRITE_BARRIER(GetHeap(), this, offset, value);
1558 } else { 1457 } else {
1559 ASSERT(index < properties()->length()); 1458 ASSERT(index < properties()->length());
1560 properties()->set(index, value); 1459 properties()->set(index, value);
1561 } 1460 }
1562 return value; 1461 return value;
1563 } 1462 }
1564 1463
1565 1464
1566 int JSObject::GetInObjectPropertyOffset(int index) { 1465 int JSObject::GetInObjectPropertyOffset(int index) {
1567 // Adjust for the number of properties stored in the object. 1466 // Adjust for the number of properties stored in the object.
(...skipping 13 matching lines...) Expand all
1581 1480
1582 1481
1583 Object* JSObject::InObjectPropertyAtPut(int index, 1482 Object* JSObject::InObjectPropertyAtPut(int index,
1584 Object* value, 1483 Object* value,
1585 WriteBarrierMode mode) { 1484 WriteBarrierMode mode) {
1586 // Adjust for the number of properties stored in the object. 1485 // Adjust for the number of properties stored in the object.
1587 index -= map()->inobject_properties(); 1486 index -= map()->inobject_properties();
1588 ASSERT(index < 0); 1487 ASSERT(index < 0);
1589 int offset = map()->instance_size() + (index * kPointerSize); 1488 int offset = map()->instance_size() + (index * kPointerSize);
1590 WRITE_FIELD(this, offset, value); 1489 WRITE_FIELD(this, offset, value);
1591 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode); 1490 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1592 return value; 1491 return value;
1593 } 1492 }
1594 1493
1595 1494
1596 1495
1597 void JSObject::InitializeBody(int object_size, Object* value) { 1496 void JSObject::InitializeBody(int object_size, Object* value) {
1598 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value)); 1497 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
1599 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { 1498 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
1600 WRITE_FIELD(this, offset, value); 1499 WRITE_FIELD(this, offset, value);
1601 } 1500 }
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
1676 int offset = kHeaderSize + index * kPointerSize; 1575 int offset = kHeaderSize + index * kPointerSize;
1677 WRITE_FIELD(this, offset, value); 1576 WRITE_FIELD(this, offset, value);
1678 } 1577 }
1679 1578
1680 1579
1681 void FixedArray::set(int index, Object* value) { 1580 void FixedArray::set(int index, Object* value) {
1682 ASSERT(map() != HEAP->fixed_cow_array_map()); 1581 ASSERT(map() != HEAP->fixed_cow_array_map());
1683 ASSERT(index >= 0 && index < this->length()); 1582 ASSERT(index >= 0 && index < this->length());
1684 int offset = kHeaderSize + index * kPointerSize; 1583 int offset = kHeaderSize + index * kPointerSize;
1685 WRITE_FIELD(this, offset, value); 1584 WRITE_FIELD(this, offset, value);
1686 WRITE_BARRIER(this, offset); 1585 WRITE_BARRIER(GetHeap(), this, offset, value);
1687 } 1586 }
1688 1587
1689 1588
1690 inline bool FixedDoubleArray::is_the_hole_nan(double value) { 1589 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1691 return BitCast<uint64_t, double>(value) == kHoleNanInt64; 1590 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1692 } 1591 }
1693 1592
1694 1593
1695 inline double FixedDoubleArray::hole_nan_as_double() { 1594 inline double FixedDoubleArray::hole_nan_as_double() {
1696 return BitCast<double, uint64_t>(kHoleNanInt64); 1595 return BitCast<double, uint64_t>(kHoleNanInt64);
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
1795 Object* key = from->KeyAt(i); 1694 Object* key = from->KeyAt(i);
1796 if (key->IsNumber()) { 1695 if (key->IsNumber()) {
1797 uint32_t entry = static_cast<uint32_t>(key->Number()); 1696 uint32_t entry = static_cast<uint32_t>(key->Number());
1798 set(entry, from->ValueAt(i)->Number()); 1697 set(entry, from->ValueAt(i)->Number());
1799 } 1698 }
1800 } 1699 }
1801 } 1700 }
1802 1701
1803 1702
1804 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) { 1703 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1805 if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER; 1704 Heap* heap = GetHeap();
1705 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1706 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1806 return UPDATE_WRITE_BARRIER; 1707 return UPDATE_WRITE_BARRIER;
1807 } 1708 }
1808 1709
1809 1710
1810 void FixedArray::set(int index, 1711 void FixedArray::set(int index,
1811 Object* value, 1712 Object* value,
1812 WriteBarrierMode mode) { 1713 WriteBarrierMode mode) {
1813 ASSERT(map() != HEAP->fixed_cow_array_map()); 1714 ASSERT(map() != HEAP->fixed_cow_array_map());
1814 ASSERT(index >= 0 && index < this->length()); 1715 ASSERT(index >= 0 && index < this->length());
1815 int offset = kHeaderSize + index * kPointerSize; 1716 int offset = kHeaderSize + index * kPointerSize;
1816 WRITE_FIELD(this, offset, value); 1717 WRITE_FIELD(this, offset, value);
1817 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode); 1718 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1818 } 1719 }
1819 1720
1820 1721
1821 void FixedArray::fast_set(FixedArray* array, int index, Object* value) { 1722 void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1822 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map()); 1723 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1823 ASSERT(index >= 0 && index < array->length()); 1724 ASSERT(index >= 0 && index < array->length());
1824 ASSERT(!HEAP->InNewSpace(value)); 1725 ASSERT(!HEAP->InNewSpace(value));
1825 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); 1726 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1727 array->GetHeap()->incremental_marking()->RecordWrite(
1728 array,
1729 HeapObject::RawField(array, kHeaderSize + index * kPointerSize),
1730 value);
1826 } 1731 }
1827 1732
1828 1733
1829 void FixedArray::set_undefined(int index) { 1734 void FixedArray::set_undefined(int index) {
1830 ASSERT(map() != HEAP->fixed_cow_array_map()); 1735 ASSERT(map() != HEAP->fixed_cow_array_map());
1831 set_undefined(GetHeap(), index); 1736 set_undefined(GetHeap(), index);
1832 } 1737 }
1833 1738
1834 1739
1835 void FixedArray::set_undefined(Heap* heap, int index) { 1740 void FixedArray::set_undefined(Heap* heap, int index) {
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1868 WRITE_FIELD(this, offset, value); 1773 WRITE_FIELD(this, offset, value);
1869 } 1774 }
1870 1775
1871 1776
1872 void FixedArray::set_unchecked(Heap* heap, 1777 void FixedArray::set_unchecked(Heap* heap,
1873 int index, 1778 int index,
1874 Object* value, 1779 Object* value,
1875 WriteBarrierMode mode) { 1780 WriteBarrierMode mode) {
1876 int offset = kHeaderSize + index * kPointerSize; 1781 int offset = kHeaderSize + index * kPointerSize;
1877 WRITE_FIELD(this, offset, value); 1782 WRITE_FIELD(this, offset, value);
1878 CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode); 1783 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1879 } 1784 }
1880 1785
1881 1786
1882 void FixedArray::set_null_unchecked(Heap* heap, int index) { 1787 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1883 ASSERT(index >= 0 && index < this->length()); 1788 ASSERT(index >= 0 && index < this->length());
1884 ASSERT(!HEAP->InNewSpace(heap->null_value())); 1789 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1885 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value()); 1790 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1886 } 1791 }
1887 1792
1888 1793
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
2147 CAST_ACCESSOR(JSGlobalObject) 2052 CAST_ACCESSOR(JSGlobalObject)
2148 CAST_ACCESSOR(JSBuiltinsObject) 2053 CAST_ACCESSOR(JSBuiltinsObject)
2149 CAST_ACCESSOR(Code) 2054 CAST_ACCESSOR(Code)
2150 CAST_ACCESSOR(JSArray) 2055 CAST_ACCESSOR(JSArray)
2151 CAST_ACCESSOR(JSRegExp) 2056 CAST_ACCESSOR(JSRegExp)
2152 CAST_ACCESSOR(JSProxy) 2057 CAST_ACCESSOR(JSProxy)
2153 CAST_ACCESSOR(JSFunctionProxy) 2058 CAST_ACCESSOR(JSFunctionProxy)
2154 CAST_ACCESSOR(JSWeakMap) 2059 CAST_ACCESSOR(JSWeakMap)
2155 CAST_ACCESSOR(Foreign) 2060 CAST_ACCESSOR(Foreign)
2156 CAST_ACCESSOR(ByteArray) 2061 CAST_ACCESSOR(ByteArray)
2062 CAST_ACCESSOR(FreeSpace)
2157 CAST_ACCESSOR(ExternalArray) 2063 CAST_ACCESSOR(ExternalArray)
2158 CAST_ACCESSOR(ExternalByteArray) 2064 CAST_ACCESSOR(ExternalByteArray)
2159 CAST_ACCESSOR(ExternalUnsignedByteArray) 2065 CAST_ACCESSOR(ExternalUnsignedByteArray)
2160 CAST_ACCESSOR(ExternalShortArray) 2066 CAST_ACCESSOR(ExternalShortArray)
2161 CAST_ACCESSOR(ExternalUnsignedShortArray) 2067 CAST_ACCESSOR(ExternalUnsignedShortArray)
2162 CAST_ACCESSOR(ExternalIntArray) 2068 CAST_ACCESSOR(ExternalIntArray)
2163 CAST_ACCESSOR(ExternalUnsignedIntArray) 2069 CAST_ACCESSOR(ExternalUnsignedIntArray)
2164 CAST_ACCESSOR(ExternalFloatArray) 2070 CAST_ACCESSOR(ExternalFloatArray)
2165 CAST_ACCESSOR(ExternalDoubleArray) 2071 CAST_ACCESSOR(ExternalDoubleArray)
2166 CAST_ACCESSOR(ExternalPixelArray) 2072 CAST_ACCESSOR(ExternalPixelArray)
2167 CAST_ACCESSOR(Struct) 2073 CAST_ACCESSOR(Struct)
2168 2074
2169 2075
2170 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name) 2076 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2171 STRUCT_LIST(MAKE_STRUCT_CAST) 2077 STRUCT_LIST(MAKE_STRUCT_CAST)
2172 #undef MAKE_STRUCT_CAST 2078 #undef MAKE_STRUCT_CAST
2173 2079
2174 2080
2175 template <typename Shape, typename Key> 2081 template <typename Shape, typename Key>
2176 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) { 2082 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2177 ASSERT(obj->IsHashTable()); 2083 ASSERT(obj->IsHashTable());
2178 return reinterpret_cast<HashTable*>(obj); 2084 return reinterpret_cast<HashTable*>(obj);
2179 } 2085 }
2180 2086
2181 2087
2182 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) 2088 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2089 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2183 2090
2184 SMI_ACCESSORS(String, length, kLengthOffset) 2091 SMI_ACCESSORS(String, length, kLengthOffset)
2185 2092
2186 2093
2187 uint32_t String::hash_field() { 2094 uint32_t String::hash_field() {
2188 return READ_UINT32_FIELD(this, kHashFieldOffset); 2095 return READ_UINT32_FIELD(this, kHashFieldOffset);
2189 } 2096 }
2190 2097
2191 2098
2192 void String::set_hash_field(uint32_t value) { 2099 void String::set_hash_field(uint32_t value) {
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after
2349 } 2256 }
2350 2257
2351 2258
2352 Object* ConsString::unchecked_first() { 2259 Object* ConsString::unchecked_first() {
2353 return READ_FIELD(this, kFirstOffset); 2260 return READ_FIELD(this, kFirstOffset);
2354 } 2261 }
2355 2262
2356 2263
2357 void ConsString::set_first(String* value, WriteBarrierMode mode) { 2264 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2358 WRITE_FIELD(this, kFirstOffset, value); 2265 WRITE_FIELD(this, kFirstOffset, value);
2359 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode); 2266 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2360 } 2267 }
2361 2268
2362 2269
2363 String* ConsString::second() { 2270 String* ConsString::second() {
2364 return String::cast(READ_FIELD(this, kSecondOffset)); 2271 return String::cast(READ_FIELD(this, kSecondOffset));
2365 } 2272 }
2366 2273
2367 2274
2368 Object* ConsString::unchecked_second() { 2275 Object* ConsString::unchecked_second() {
2369 return READ_FIELD(this, kSecondOffset); 2276 return READ_FIELD(this, kSecondOffset);
2370 } 2277 }
2371 2278
2372 2279
2373 void ConsString::set_second(String* value, WriteBarrierMode mode) { 2280 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2374 WRITE_FIELD(this, kSecondOffset, value); 2281 WRITE_FIELD(this, kSecondOffset, value);
2375 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode); 2282 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2376 } 2283 }
2377 2284
2378 2285
2379 ExternalAsciiString::Resource* ExternalAsciiString::resource() { 2286 ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2380 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); 2287 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2381 } 2288 }
2382 2289
2383 2290
2384 void ExternalAsciiString::set_resource( 2291 void ExternalAsciiString::set_resource(
2385 ExternalAsciiString::Resource* resource) { 2292 ExternalAsciiString::Resource* resource) {
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after
2687 if (instance_type == FIXED_ARRAY_TYPE) { 2594 if (instance_type == FIXED_ARRAY_TYPE) {
2688 return FixedArray::BodyDescriptor::SizeOf(map, this); 2595 return FixedArray::BodyDescriptor::SizeOf(map, this);
2689 } 2596 }
2690 if (instance_type == ASCII_STRING_TYPE) { 2597 if (instance_type == ASCII_STRING_TYPE) {
2691 return SeqAsciiString::SizeFor( 2598 return SeqAsciiString::SizeFor(
2692 reinterpret_cast<SeqAsciiString*>(this)->length()); 2599 reinterpret_cast<SeqAsciiString*>(this)->length());
2693 } 2600 }
2694 if (instance_type == BYTE_ARRAY_TYPE) { 2601 if (instance_type == BYTE_ARRAY_TYPE) {
2695 return reinterpret_cast<ByteArray*>(this)->ByteArraySize(); 2602 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2696 } 2603 }
2604 if (instance_type == FREE_SPACE_TYPE) {
2605 return reinterpret_cast<FreeSpace*>(this)->size();
2606 }
2697 if (instance_type == STRING_TYPE) { 2607 if (instance_type == STRING_TYPE) {
2698 return SeqTwoByteString::SizeFor( 2608 return SeqTwoByteString::SizeFor(
2699 reinterpret_cast<SeqTwoByteString*>(this)->length()); 2609 reinterpret_cast<SeqTwoByteString*>(this)->length());
2700 } 2610 }
2701 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) { 2611 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2702 return FixedDoubleArray::SizeFor( 2612 return FixedDoubleArray::SizeFor(
2703 reinterpret_cast<FixedDoubleArray*>(this)->length()); 2613 reinterpret_cast<FixedDoubleArray*>(this)->length());
2704 } 2614 }
2705 ASSERT(instance_type == CODE_TYPE); 2615 ASSERT(instance_type == CODE_TYPE);
2706 return reinterpret_cast<Code*>(this)->CodeSize(); 2616 return reinterpret_cast<Code*>(this)->CodeSize();
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
2848 bool Map::is_shared() { 2758 bool Map::is_shared() {
2849 return ((1 << kIsShared) & bit_field3()) != 0; 2759 return ((1 << kIsShared) & bit_field3()) != 0;
2850 } 2760 }
2851 2761
2852 2762
2853 JSFunction* Map::unchecked_constructor() { 2763 JSFunction* Map::unchecked_constructor() {
2854 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset)); 2764 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2855 } 2765 }
2856 2766
2857 2767
2858 FixedArray* Map::unchecked_prototype_transitions() {
2859 return reinterpret_cast<FixedArray*>(
2860 READ_FIELD(this, kPrototypeTransitionsOffset));
2861 }
2862
2863
2864 Code::Flags Code::flags() { 2768 Code::Flags Code::flags() {
2865 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset)); 2769 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2866 } 2770 }
2867 2771
2868 2772
2869 void Code::set_flags(Code::Flags flags) { 2773 void Code::set_flags(Code::Flags flags) {
2870 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1); 2774 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2871 // Make sure that all call stubs have an arguments count. 2775 // Make sure that all call stubs have an arguments count.
2872 ASSERT((ExtractKindFromFlags(flags) != CALL_IC && 2776 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2873 ExtractKindFromFlags(flags) != KEYED_CALL_IC) || 2777 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after
3175 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); 3079 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3176 // GetCodeFromTargetAddress might be called when marking objects during mark 3080 // GetCodeFromTargetAddress might be called when marking objects during mark
3177 // sweep. reinterpret_cast is therefore used instead of the more appropriate 3081 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3178 // Code::cast. Code::cast does not work when the object's map is 3082 // Code::cast. Code::cast does not work when the object's map is
3179 // marked. 3083 // marked.
3180 Code* result = reinterpret_cast<Code*>(code); 3084 Code* result = reinterpret_cast<Code*>(code);
3181 return result; 3085 return result;
3182 } 3086 }
3183 3087
3184 3088
3185 Isolate* Map::isolate() {
3186 return heap()->isolate();
3187 }
3188
3189
3190 Heap* Map::heap() {
3191 // NOTE: address() helper is not used to save one instruction.
3192 Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3193 ASSERT(heap != NULL);
3194 ASSERT(heap->isolate() == Isolate::Current());
3195 return heap;
3196 }
3197
3198
3199 Heap* Code::heap() {
3200 // NOTE: address() helper is not used to save one instruction.
3201 Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3202 ASSERT(heap != NULL);
3203 ASSERT(heap->isolate() == Isolate::Current());
3204 return heap;
3205 }
3206
3207
3208 Isolate* Code::isolate() {
3209 return heap()->isolate();
3210 }
3211
3212
3213 Heap* JSGlobalPropertyCell::heap() {
3214 // NOTE: address() helper is not used to save one instruction.
3215 Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3216 ASSERT(heap != NULL);
3217 ASSERT(heap->isolate() == Isolate::Current());
3218 return heap;
3219 }
3220
3221
3222 Isolate* JSGlobalPropertyCell::isolate() {
3223 return heap()->isolate();
3224 }
3225
3226
3227 Object* Code::GetObjectFromEntryAddress(Address location_of_address) { 3089 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3228 return HeapObject:: 3090 return HeapObject::
3229 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); 3091 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3230 } 3092 }
3231 3093
3232 3094
3233 Object* Map::prototype() { 3095 Object* Map::prototype() {
3234 return READ_FIELD(this, kPrototypeOffset); 3096 return READ_FIELD(this, kPrototypeOffset);
3235 } 3097 }
3236 3098
3237 3099
3238 void Map::set_prototype(Object* value, WriteBarrierMode mode) { 3100 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3239 ASSERT(value->IsNull() || value->IsJSReceiver()); 3101 ASSERT(value->IsNull() || value->IsJSReceiver());
3240 WRITE_FIELD(this, kPrototypeOffset, value); 3102 WRITE_FIELD(this, kPrototypeOffset, value);
3241 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode); 3103 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3242 } 3104 }
3243 3105
3244 3106
3245 DescriptorArray* Map::instance_descriptors() { 3107 DescriptorArray* Map::instance_descriptors() {
3246 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset); 3108 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3247 if (object->IsSmi()) { 3109 if (object->IsSmi()) {
3248 return HEAP->empty_descriptor_array(); 3110 return HEAP->empty_descriptor_array();
3249 } else { 3111 } else {
3250 return DescriptorArray::cast(object); 3112 return DescriptorArray::cast(object);
3251 } 3113 }
(...skipping 14 matching lines...) Expand all
3266 kInstanceDescriptorsOrBitField3Offset, 3128 kInstanceDescriptorsOrBitField3Offset,
3267 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage())); 3129 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3268 } 3130 }
3269 } 3131 }
3270 3132
3271 3133
3272 void Map::set_instance_descriptors(DescriptorArray* value, 3134 void Map::set_instance_descriptors(DescriptorArray* value,
3273 WriteBarrierMode mode) { 3135 WriteBarrierMode mode) {
3274 Object* object = READ_FIELD(this, 3136 Object* object = READ_FIELD(this,
3275 kInstanceDescriptorsOrBitField3Offset); 3137 kInstanceDescriptorsOrBitField3Offset);
3276 if (value == isolate()->heap()->empty_descriptor_array()) { 3138 Heap* heap = GetHeap();
3139 if (value == heap->empty_descriptor_array()) {
3277 clear_instance_descriptors(); 3140 clear_instance_descriptors();
3278 return; 3141 return;
3279 } else { 3142 } else {
3280 if (object->IsSmi()) { 3143 if (object->IsSmi()) {
3281 value->set_bit_field3_storage(Smi::cast(object)->value()); 3144 value->set_bit_field3_storage(Smi::cast(object)->value());
3282 } else { 3145 } else {
3283 value->set_bit_field3_storage( 3146 value->set_bit_field3_storage(
3284 DescriptorArray::cast(object)->bit_field3_storage()); 3147 DescriptorArray::cast(object)->bit_field3_storage());
3285 } 3148 }
3286 } 3149 }
3287 ASSERT(!is_shared()); 3150 ASSERT(!is_shared());
3288 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value); 3151 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3289 CONDITIONAL_WRITE_BARRIER(GetHeap(), 3152 CONDITIONAL_WRITE_BARRIER(
3290 this, 3153 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3291 kInstanceDescriptorsOrBitField3Offset,
3292 mode);
3293 } 3154 }
3294 3155
3295 3156
3296 int Map::bit_field3() { 3157 int Map::bit_field3() {
3297 Object* object = READ_FIELD(this, 3158 Object* object = READ_FIELD(this,
3298 kInstanceDescriptorsOrBitField3Offset); 3159 kInstanceDescriptorsOrBitField3Offset);
3299 if (object->IsSmi()) { 3160 if (object->IsSmi()) {
3300 return Smi::cast(object)->value(); 3161 return Smi::cast(object)->value();
3301 } else { 3162 } else {
3302 return DescriptorArray::cast(object)->bit_field3_storage(); 3163 return DescriptorArray::cast(object)->bit_field3_storage();
3303 } 3164 }
3304 } 3165 }
3305 3166
3306 3167
3307 void Map::set_bit_field3(int value) { 3168 void Map::set_bit_field3(int value) {
3308 ASSERT(Smi::IsValid(value)); 3169 ASSERT(Smi::IsValid(value));
3309 Object* object = READ_FIELD(this, 3170 Object* object = READ_FIELD(this,
3310 kInstanceDescriptorsOrBitField3Offset); 3171 kInstanceDescriptorsOrBitField3Offset);
3311 if (object->IsSmi()) { 3172 if (object->IsSmi()) {
3312 WRITE_FIELD(this, 3173 WRITE_FIELD(this,
3313 kInstanceDescriptorsOrBitField3Offset, 3174 kInstanceDescriptorsOrBitField3Offset,
3314 Smi::FromInt(value)); 3175 Smi::FromInt(value));
3315 } else { 3176 } else {
3316 DescriptorArray::cast(object)->set_bit_field3_storage(value); 3177 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3317 } 3178 }
3318 } 3179 }
3319 3180
3320 3181
3182 FixedArray* Map::unchecked_prototype_transitions() {
3183 return reinterpret_cast<FixedArray*>(
3184 READ_FIELD(this, kPrototypeTransitionsOffset));
3185 }
3186
3187
3321 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset) 3188 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3322 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset) 3189 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3323 ACCESSORS(Map, constructor, Object, kConstructorOffset) 3190 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3324 3191
3325 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset) 3192 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3326 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset) 3193 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
3327 ACCESSORS_GCSAFE(JSFunction, next_function_link, Object, 3194 ACCESSORS(JSFunction,
3328 kNextFunctionLinkOffset) 3195 next_function_link,
3196 Object,
3197 kNextFunctionLinkOffset)
3329 3198
3330 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset) 3199 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3331 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset) 3200 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3332 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset) 3201 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3333 3202
3334 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset) 3203 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3335 3204
3336 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset) 3205 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3337 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset) 3206 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3338 ACCESSORS(AccessorInfo, data, Object, kDataOffset) 3207 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after
3614 } 3483 }
3615 3484
3616 3485
3617 Code* SharedFunctionInfo::unchecked_code() { 3486 Code* SharedFunctionInfo::unchecked_code() {
3618 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset)); 3487 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3619 } 3488 }
3620 3489
3621 3490
3622 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) { 3491 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3623 WRITE_FIELD(this, kCodeOffset, value); 3492 WRITE_FIELD(this, kCodeOffset, value);
3624 ASSERT(!Isolate::Current()->heap()->InNewSpace(value)); 3493 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3625 } 3494 }
3626 3495
3627 3496
3628 SerializedScopeInfo* SharedFunctionInfo::scope_info() { 3497 SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3629 return reinterpret_cast<SerializedScopeInfo*>( 3498 return reinterpret_cast<SerializedScopeInfo*>(
3630 READ_FIELD(this, kScopeInfoOffset)); 3499 READ_FIELD(this, kScopeInfoOffset));
3631 } 3500 }
3632 3501
3633 3502
3634 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value, 3503 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3635 WriteBarrierMode mode) { 3504 WriteBarrierMode mode) {
3636 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value)); 3505 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3637 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode); 3506 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3507 this,
3508 kScopeInfoOffset,
3509 reinterpret_cast<Object*>(value),
3510 mode);
3638 } 3511 }
3639 3512
3640 3513
3641 Smi* SharedFunctionInfo::deopt_counter() { 3514 Smi* SharedFunctionInfo::deopt_counter() {
3642 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset)); 3515 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3643 } 3516 }
3644 3517
3645 3518
3646 void SharedFunctionInfo::set_deopt_counter(Smi* value) { 3519 void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3647 WRITE_FIELD(this, kDeoptCounterOffset, value); 3520 WRITE_FIELD(this, kDeoptCounterOffset, value);
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
3724 } 3597 }
3725 3598
3726 3599
3727 Code* JSFunction::unchecked_code() { 3600 Code* JSFunction::unchecked_code() {
3728 return reinterpret_cast<Code*>( 3601 return reinterpret_cast<Code*>(
3729 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); 3602 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3730 } 3603 }
3731 3604
3732 3605
3733 void JSFunction::set_code(Code* value) { 3606 void JSFunction::set_code(Code* value) {
3734 // Skip the write barrier because code is never in new space.
3735 ASSERT(!HEAP->InNewSpace(value)); 3607 ASSERT(!HEAP->InNewSpace(value));
3736 Address entry = value->entry(); 3608 Address entry = value->entry();
3737 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); 3609 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3610 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3611 this,
3612 HeapObject::RawField(this, kCodeEntryOffset),
3613 value);
3738 } 3614 }
3739 3615
3740 3616
3741 void JSFunction::ReplaceCode(Code* code) { 3617 void JSFunction::ReplaceCode(Code* code) {
3742 bool was_optimized = IsOptimized(); 3618 bool was_optimized = IsOptimized();
3743 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION; 3619 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3744 3620
3745 set_code(code); 3621 set_code(code);
3746 3622
3747 // Add/remove the function from the list of optimized functions for this 3623 // Add/remove the function from the list of optimized functions for this
(...skipping 19 matching lines...) Expand all
3767 3643
3768 SharedFunctionInfo* JSFunction::unchecked_shared() { 3644 SharedFunctionInfo* JSFunction::unchecked_shared() {
3769 return reinterpret_cast<SharedFunctionInfo*>( 3645 return reinterpret_cast<SharedFunctionInfo*>(
3770 READ_FIELD(this, kSharedFunctionInfoOffset)); 3646 READ_FIELD(this, kSharedFunctionInfoOffset));
3771 } 3647 }
3772 3648
3773 3649
3774 void JSFunction::set_context(Object* value) { 3650 void JSFunction::set_context(Object* value) {
3775 ASSERT(value->IsUndefined() || value->IsContext()); 3651 ASSERT(value->IsUndefined() || value->IsContext());
3776 WRITE_FIELD(this, kContextOffset, value); 3652 WRITE_FIELD(this, kContextOffset, value);
3777 WRITE_BARRIER(this, kContextOffset); 3653 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3778 } 3654 }
3779 3655
3780 ACCESSORS(JSFunction, prototype_or_initial_map, Object, 3656 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3781 kPrototypeOrInitialMapOffset) 3657 kPrototypeOrInitialMapOffset)
3782 3658
3783 3659
3784 Map* JSFunction::initial_map() { 3660 Map* JSFunction::initial_map() {
3785 return Map::cast(prototype_or_initial_map()); 3661 return Map::cast(prototype_or_initial_map());
3786 } 3662 }
3787 3663
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
3841 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) { 3717 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3842 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3718 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3843 return READ_FIELD(this, OffsetOfFunctionWithId(id)); 3719 return READ_FIELD(this, OffsetOfFunctionWithId(id));
3844 } 3720 }
3845 3721
3846 3722
3847 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id, 3723 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3848 Object* value) { 3724 Object* value) {
3849 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3725 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3850 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value); 3726 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3851 WRITE_BARRIER(this, OffsetOfFunctionWithId(id)); 3727 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
3852 } 3728 }
3853 3729
3854 3730
3855 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) { 3731 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3856 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3732 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3857 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id))); 3733 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3858 } 3734 }
3859 3735
3860 3736
3861 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id, 3737 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
(...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after
4046 FixedArray::cast(data())->set(index, value); 3922 FixedArray::cast(data())->set(index, value);
4047 } 3923 }
4048 3924
4049 3925
4050 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) { 3926 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4051 ASSERT(index >= kDataIndex); // Only implementation data can be set this way. 3927 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4052 FixedArray* fa = reinterpret_cast<FixedArray*>(data()); 3928 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4053 if (value->IsSmi()) { 3929 if (value->IsSmi()) {
4054 fa->set_unchecked(index, Smi::cast(value)); 3930 fa->set_unchecked(index, Smi::cast(value));
4055 } else { 3931 } else {
3932 // We only do this during GC, so we don't need to notify the write barrier.
4056 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER); 3933 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4057 } 3934 }
4058 } 3935 }
4059 3936
4060 3937
4061 ElementsKind JSObject::GetElementsKind() { 3938 ElementsKind JSObject::GetElementsKind() {
4062 ElementsKind kind = map()->elements_kind(); 3939 ElementsKind kind = map()->elements_kind();
4063 ASSERT((kind == FAST_ELEMENTS && 3940 ASSERT((kind == FAST_ELEMENTS &&
4064 (elements()->map() == GetHeap()->fixed_array_map() || 3941 (elements()->map() == GetHeap()->fixed_array_map() ||
4065 elements()->map() == GetHeap()->fixed_cow_array_map())) || 3942 elements()->map() == GetHeap()->fixed_cow_array_map())) ||
(...skipping 454 matching lines...) Expand 10 before | Expand all | Expand 10 after
4520 } else if (!GetHeap()->new_space()->Contains(elts) && 4397 } else if (!GetHeap()->new_space()->Contains(elts) &&
4521 required_size < kArraySizeThatFitsComfortablyInNewSpace) { 4398 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4522 // Expand will allocate a new backing store in new space even if the size 4399 // Expand will allocate a new backing store in new space even if the size
4523 // we asked for isn't larger than what we had before. 4400 // we asked for isn't larger than what we had before.
4524 Expand(required_size); 4401 Expand(required_size);
4525 } 4402 }
4526 } 4403 }
4527 4404
4528 4405
4529 void JSArray::set_length(Smi* length) { 4406 void JSArray::set_length(Smi* length) {
4407 // Don't need a write barrier for a Smi.
4530 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER); 4408 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4531 } 4409 }
4532 4410
4533 4411
4534 void JSArray::SetContent(FixedArray* storage) { 4412 void JSArray::SetContent(FixedArray* storage) {
4535 set_length(Smi::FromInt(storage->length())); 4413 set_length(Smi::FromInt(storage->length()));
4536 set_elements(storage); 4414 set_elements(storage);
4537 } 4415 }
4538 4416
4539 4417
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
4643 #undef WRITE_INT_FIELD 4521 #undef WRITE_INT_FIELD
4644 #undef READ_SHORT_FIELD 4522 #undef READ_SHORT_FIELD
4645 #undef WRITE_SHORT_FIELD 4523 #undef WRITE_SHORT_FIELD
4646 #undef READ_BYTE_FIELD 4524 #undef READ_BYTE_FIELD
4647 #undef WRITE_BYTE_FIELD 4525 #undef WRITE_BYTE_FIELD
4648 4526
4649 4527
4650 } } // namespace v8::internal 4528 } } // namespace v8::internal
4651 4529
4652 #endif // V8_OBJECTS_INL_H_ 4530 #endif // V8_OBJECTS_INL_H_
OLDNEW
« no previous file with comments | « src/objects-debug.cc ('k') | src/objects-printer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698