Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(139)

Side by Side Diff: src/objects-inl.h

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/objects-debug.cc ('k') | src/objects-printer.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 25 matching lines...) Expand all
36 #define V8_OBJECTS_INL_H_ 36 #define V8_OBJECTS_INL_H_
37 37
38 #include "elements.h" 38 #include "elements.h"
39 #include "objects.h" 39 #include "objects.h"
40 #include "contexts.h" 40 #include "contexts.h"
41 #include "conversions-inl.h" 41 #include "conversions-inl.h"
42 #include "heap.h" 42 #include "heap.h"
43 #include "isolate.h" 43 #include "isolate.h"
44 #include "property.h" 44 #include "property.h"
45 #include "spaces.h" 45 #include "spaces.h"
46 #include "store-buffer.h"
46 #include "v8memory.h" 47 #include "v8memory.h"
47 48
49 #include "incremental-marking.h"
50
48 namespace v8 { 51 namespace v8 {
49 namespace internal { 52 namespace internal {
50 53
51 PropertyDetails::PropertyDetails(Smi* smi) { 54 PropertyDetails::PropertyDetails(Smi* smi) {
52 value_ = smi->value(); 55 value_ = smi->value();
53 } 56 }
54 57
55 58
56 Smi* PropertyDetails::AsSmi() { 59 Smi* PropertyDetails::AsSmi() {
57 return Smi::FromInt(value_); 60 return Smi::FromInt(value_);
(...skipping 15 matching lines...) Expand all
73 76
74 #define INT_ACCESSORS(holder, name, offset) \ 77 #define INT_ACCESSORS(holder, name, offset) \
75 int holder::name() { return READ_INT_FIELD(this, offset); } \ 78 int holder::name() { return READ_INT_FIELD(this, offset); } \
76 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); } 79 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
77 80
78 81
79 #define ACCESSORS(holder, name, type, offset) \ 82 #define ACCESSORS(holder, name, type, offset) \
80 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \ 83 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
81 void holder::set_##name(type* value, WriteBarrierMode mode) { \ 84 void holder::set_##name(type* value, WriteBarrierMode mode) { \
82 WRITE_FIELD(this, offset, value); \ 85 WRITE_FIELD(this, offset, value); \
83 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode); \ 86 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
84 }
85
86
87 // GC-safe accessors do not use HeapObject::GetHeap(), but access TLS instead.
88 #define ACCESSORS_GCSAFE(holder, name, type, offset) \
89 type* holder::name() { return type::cast(READ_FIELD(this, offset)); } \
90 void holder::set_##name(type* value, WriteBarrierMode mode) { \
91 WRITE_FIELD(this, offset, value); \
92 CONDITIONAL_WRITE_BARRIER(HEAP, this, offset, mode); \
93 } 87 }
94 88
95 89
96 #define SMI_ACCESSORS(holder, name, offset) \ 90 #define SMI_ACCESSORS(holder, name, offset) \
97 int holder::name() { \ 91 int holder::name() { \
98 Object* value = READ_FIELD(this, offset); \ 92 Object* value = READ_FIELD(this, offset); \
99 return Smi::cast(value)->value(); \ 93 return Smi::cast(value)->value(); \
100 } \ 94 } \
101 void holder::set_##name(int value) { \ 95 void holder::set_##name(int value) { \
102 WRITE_FIELD(this, offset, Smi::FromInt(value)); \ 96 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
140 bool Object::IsSmi() { 134 bool Object::IsSmi() {
141 return HAS_SMI_TAG(this); 135 return HAS_SMI_TAG(this);
142 } 136 }
143 137
144 138
145 bool Object::IsHeapObject() { 139 bool Object::IsHeapObject() {
146 return Internals::HasHeapObjectTag(this); 140 return Internals::HasHeapObjectTag(this);
147 } 141 }
148 142
149 143
144 bool Object::NonFailureIsHeapObject() {
145 ASSERT(!this->IsFailure());
146 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0;
147 }
148
149
150 bool Object::IsHeapNumber() { 150 bool Object::IsHeapNumber() {
151 return Object::IsHeapObject() 151 return Object::IsHeapObject()
152 && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE; 152 && HeapObject::cast(this)->map()->instance_type() == HEAP_NUMBER_TYPE;
153 } 153 }
154 154
155 155
156 bool Object::IsString() { 156 bool Object::IsString() {
157 return Object::IsHeapObject() 157 return Object::IsHeapObject()
158 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE; 158 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
159 } 159 }
160 160
161 161
162 bool Object::IsSpecObject() { 162 bool Object::IsSpecObject() {
163 return Object::IsHeapObject() 163 return Object::IsHeapObject()
164 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE; 164 && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
165 } 165 }
166 166
167 167
168 bool Object::IsSpecFunction() {
169 if (!Object::IsHeapObject()) return false;
170 InstanceType type = HeapObject::cast(this)->map()->instance_type();
171 return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
172 }
173
174
168 bool Object::IsSymbol() { 175 bool Object::IsSymbol() {
169 if (!this->IsHeapObject()) return false; 176 if (!this->IsHeapObject()) return false;
170 uint32_t type = HeapObject::cast(this)->map()->instance_type(); 177 uint32_t type = HeapObject::cast(this)->map()->instance_type();
171 // Because the symbol tag is non-zero and no non-string types have the 178 // Because the symbol tag is non-zero and no non-string types have the
172 // symbol bit set we can test for symbols with a very simple test 179 // symbol bit set we can test for symbols with a very simple test
173 // operation. 180 // operation.
174 STATIC_ASSERT(kSymbolTag != 0); 181 STATIC_ASSERT(kSymbolTag != 0);
175 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE); 182 ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
176 return (type & kIsSymbolMask) != 0; 183 return (type & kIsSymbolMask) != 0;
177 } 184 }
(...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after
395 return IsSmi() || IsHeapNumber(); 402 return IsSmi() || IsHeapNumber();
396 } 403 }
397 404
398 405
399 bool Object::IsByteArray() { 406 bool Object::IsByteArray() {
400 return Object::IsHeapObject() 407 return Object::IsHeapObject()
401 && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE; 408 && HeapObject::cast(this)->map()->instance_type() == BYTE_ARRAY_TYPE;
402 } 409 }
403 410
404 411
412 bool Object::IsFreeSpace() {
413 return Object::IsHeapObject()
414 && HeapObject::cast(this)->map()->instance_type() == FREE_SPACE_TYPE;
415 }
416
417
418 bool Object::IsFiller() {
419 if (!Object::IsHeapObject()) return false;
420 InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
421 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
422 }
423
424
405 bool Object::IsExternalPixelArray() { 425 bool Object::IsExternalPixelArray() {
406 return Object::IsHeapObject() && 426 return Object::IsHeapObject() &&
407 HeapObject::cast(this)->map()->instance_type() == 427 HeapObject::cast(this)->map()->instance_type() ==
408 EXTERNAL_PIXEL_ARRAY_TYPE; 428 EXTERNAL_PIXEL_ARRAY_TYPE;
409 } 429 }
410 430
411 431
412 bool Object::IsExternalArray() { 432 bool Object::IsExternalArray() {
413 if (!Object::IsHeapObject()) 433 if (!Object::IsHeapObject())
414 return false; 434 return false;
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
502 } 522 }
503 523
504 524
505 Failure* Failure::cast(MaybeObject* obj) { 525 Failure* Failure::cast(MaybeObject* obj) {
506 ASSERT(HAS_FAILURE_TAG(obj)); 526 ASSERT(HAS_FAILURE_TAG(obj));
507 return reinterpret_cast<Failure*>(obj); 527 return reinterpret_cast<Failure*>(obj);
508 } 528 }
509 529
510 530
511 bool Object::IsJSReceiver() { 531 bool Object::IsJSReceiver() {
532 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
512 return IsHeapObject() && 533 return IsHeapObject() &&
513 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE; 534 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
514 } 535 }
515 536
516 537
517 bool Object::IsJSObject() { 538 bool Object::IsJSObject() {
518 return IsJSReceiver() && !IsJSProxy(); 539 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
540 return IsHeapObject() &&
541 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
519 } 542 }
520 543
521 544
522 bool Object::IsJSProxy() { 545 bool Object::IsJSProxy() {
523 return Object::IsHeapObject() && 546 if (!Object::IsHeapObject()) return false;
524 (HeapObject::cast(this)->map()->instance_type() == JS_PROXY_TYPE || 547 InstanceType type = HeapObject::cast(this)->map()->instance_type();
525 HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE); 548 return FIRST_JS_PROXY_TYPE <= type && type <= LAST_JS_PROXY_TYPE;
526 } 549 }
527 550
528 551
529 bool Object::IsJSFunctionProxy() { 552 bool Object::IsJSFunctionProxy() {
530 return Object::IsHeapObject() && 553 return Object::IsHeapObject() &&
531 HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE; 554 HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_PROXY_TYPE;
532 } 555 }
533 556
534 557
535 bool Object::IsJSWeakMap() { 558 bool Object::IsJSWeakMap() {
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
635 } 658 }
636 659
637 660
638 bool Object::IsCode() { 661 bool Object::IsCode() {
639 return Object::IsHeapObject() 662 return Object::IsHeapObject()
640 && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE; 663 && HeapObject::cast(this)->map()->instance_type() == CODE_TYPE;
641 } 664 }
642 665
643 666
644 bool Object::IsOddball() { 667 bool Object::IsOddball() {
645 ASSERT(HEAP->is_safe_to_read_maps());
646 return Object::IsHeapObject() 668 return Object::IsHeapObject()
647 && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE; 669 && HeapObject::cast(this)->map()->instance_type() == ODDBALL_TYPE;
648 } 670 }
649 671
650 672
651 bool Object::IsJSGlobalPropertyCell() { 673 bool Object::IsJSGlobalPropertyCell() {
652 return Object::IsHeapObject() 674 return Object::IsHeapObject()
653 && HeapObject::cast(this)->map()->instance_type() 675 && HeapObject::cast(this)->map()->instance_type()
654 == JS_GLOBAL_PROPERTY_CELL_TYPE; 676 == JS_GLOBAL_PROPERTY_CELL_TYPE;
655 } 677 }
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after
932 954
933 #define FIELD_ADDR(p, offset) \ 955 #define FIELD_ADDR(p, offset) \
934 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag) 956 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
935 957
936 #define READ_FIELD(p, offset) \ 958 #define READ_FIELD(p, offset) \
937 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset))) 959 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)))
938 960
939 #define WRITE_FIELD(p, offset, value) \ 961 #define WRITE_FIELD(p, offset, value) \
940 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value) 962 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
941 963
942 // TODO(isolates): Pass heap in to these macros. 964 #define WRITE_BARRIER(heap, object, offset, value) \
943 #define WRITE_BARRIER(object, offset) \ 965 heap->incremental_marking()->RecordWrite( \
944 object->GetHeap()->RecordWrite(object->address(), offset); 966 object, HeapObject::RawField(object, offset), value); \
967 if (heap->InNewSpace(value)) { \
968 heap->RecordWrite(object->address(), offset); \
969 }
945 970
946 // CONDITIONAL_WRITE_BARRIER must be issued after the actual 971 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
947 // write due to the assert validating the written value. 972 if (mode == UPDATE_WRITE_BARRIER) { \
948 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, mode) \ 973 heap->incremental_marking()->RecordWrite( \
949 if (mode == UPDATE_WRITE_BARRIER) { \ 974 object, HeapObject::RawField(object, offset), value); \
950 heap->RecordWrite(object->address(), offset); \ 975 if (heap->InNewSpace(value)) { \
951 } else { \ 976 heap->RecordWrite(object->address(), offset); \
952 ASSERT(mode == SKIP_WRITE_BARRIER); \ 977 } \
953 ASSERT(heap->InNewSpace(object) || \
954 !heap->InNewSpace(READ_FIELD(object, offset)) || \
955 Page::FromAddress(object->address())-> \
956 IsRegionDirty(object->address() + offset)); \
957 } 978 }
958 979
959 #ifndef V8_TARGET_ARCH_MIPS 980 #ifndef V8_TARGET_ARCH_MIPS
960 #define READ_DOUBLE_FIELD(p, offset) \ 981 #define READ_DOUBLE_FIELD(p, offset) \
961 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset))) 982 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)))
962 #else // V8_TARGET_ARCH_MIPS 983 #else // V8_TARGET_ARCH_MIPS
963 // Prevent gcc from using load-double (mips ldc1) on (possibly) 984 // Prevent gcc from using load-double (mips ldc1) on (possibly)
964 // non-64-bit aligned HeapNumber::value. 985 // non-64-bit aligned HeapNumber::value.
965 static inline double read_double_field(void* p, int offset) { 986 static inline double read_double_field(void* p, int offset) {
966 union conversion { 987 union conversion {
967 double d; 988 double d;
968 uint32_t u[2]; 989 uint32_t u[2];
969 } c; 990 } c;
970 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))); 991 c.u[0] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)));
971 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))); 992 c.u[1] = (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4)));
972 return c.d; 993 return c.d;
973 } 994 }
974 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset) 995 #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
975 #endif // V8_TARGET_ARCH_MIPS 996 #endif // V8_TARGET_ARCH_MIPS
976 997
977
978 #ifndef V8_TARGET_ARCH_MIPS 998 #ifndef V8_TARGET_ARCH_MIPS
979 #define WRITE_DOUBLE_FIELD(p, offset, value) \ 999 #define WRITE_DOUBLE_FIELD(p, offset, value) \
980 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value) 1000 (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
981 #else // V8_TARGET_ARCH_MIPS 1001 #else // V8_TARGET_ARCH_MIPS
982 // Prevent gcc from using store-double (mips sdc1) on (possibly) 1002 // Prevent gcc from using store-double (mips sdc1) on (possibly)
983 // non-64-bit aligned HeapNumber::value. 1003 // non-64-bit aligned HeapNumber::value.
984 static inline void write_double_field(void* p, int offset, 1004 static inline void write_double_field(void* p, int offset,
985 double value) { 1005 double value) {
986 union conversion { 1006 union conversion {
987 double d; 1007 double d;
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after
1162 return MapWord(reinterpret_cast<uintptr_t>(raw)); 1182 return MapWord(reinterpret_cast<uintptr_t>(raw));
1163 } 1183 }
1164 1184
1165 1185
1166 HeapObject* MapWord::ToForwardingAddress() { 1186 HeapObject* MapWord::ToForwardingAddress() {
1167 ASSERT(IsForwardingAddress()); 1187 ASSERT(IsForwardingAddress());
1168 return HeapObject::FromAddress(reinterpret_cast<Address>(value_)); 1188 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1169 } 1189 }
1170 1190
1171 1191
1172 bool MapWord::IsMarked() {
1173 return (value_ & kMarkingMask) == 0;
1174 }
1175
1176
1177 void MapWord::SetMark() {
1178 value_ &= ~kMarkingMask;
1179 }
1180
1181
1182 void MapWord::ClearMark() {
1183 value_ |= kMarkingMask;
1184 }
1185
1186
1187 bool MapWord::IsOverflowed() {
1188 return (value_ & kOverflowMask) != 0;
1189 }
1190
1191
1192 void MapWord::SetOverflow() {
1193 value_ |= kOverflowMask;
1194 }
1195
1196
1197 void MapWord::ClearOverflow() {
1198 value_ &= ~kOverflowMask;
1199 }
1200
1201
1202 MapWord MapWord::EncodeAddress(Address map_address, int offset) {
1203 // Offset is the distance in live bytes from the first live object in the
1204 // same page. The offset between two objects in the same page should not
1205 // exceed the object area size of a page.
1206 ASSERT(0 <= offset && offset < Page::kObjectAreaSize);
1207
1208 uintptr_t compact_offset = offset >> kObjectAlignmentBits;
1209 ASSERT(compact_offset < (1 << kForwardingOffsetBits));
1210
1211 Page* map_page = Page::FromAddress(map_address);
1212 ASSERT_MAP_PAGE_INDEX(map_page->mc_page_index);
1213
1214 uintptr_t map_page_offset =
1215 map_page->Offset(map_address) >> kMapAlignmentBits;
1216
1217 uintptr_t encoding =
1218 (compact_offset << kForwardingOffsetShift) |
1219 (map_page_offset << kMapPageOffsetShift) |
1220 (map_page->mc_page_index << kMapPageIndexShift);
1221 return MapWord(encoding);
1222 }
1223
1224
1225 Address MapWord::DecodeMapAddress(MapSpace* map_space) {
1226 int map_page_index =
1227 static_cast<int>((value_ & kMapPageIndexMask) >> kMapPageIndexShift);
1228 ASSERT_MAP_PAGE_INDEX(map_page_index);
1229
1230 int map_page_offset = static_cast<int>(
1231 ((value_ & kMapPageOffsetMask) >> kMapPageOffsetShift) <<
1232 kMapAlignmentBits);
1233
1234 return (map_space->PageAddress(map_page_index) + map_page_offset);
1235 }
1236
1237
1238 int MapWord::DecodeOffset() {
1239 // The offset field is represented in the kForwardingOffsetBits
1240 // most-significant bits.
1241 uintptr_t offset = (value_ >> kForwardingOffsetShift) << kObjectAlignmentBits;
1242 ASSERT(offset < static_cast<uintptr_t>(Page::kObjectAreaSize));
1243 return static_cast<int>(offset);
1244 }
1245
1246
1247 MapWord MapWord::FromEncodedAddress(Address address) {
1248 return MapWord(reinterpret_cast<uintptr_t>(address));
1249 }
1250
1251
1252 Address MapWord::ToEncodedAddress() {
1253 return reinterpret_cast<Address>(value_);
1254 }
1255
1256
1257 #ifdef DEBUG 1192 #ifdef DEBUG
1258 void HeapObject::VerifyObjectField(int offset) { 1193 void HeapObject::VerifyObjectField(int offset) {
1259 VerifyPointer(READ_FIELD(this, offset)); 1194 VerifyPointer(READ_FIELD(this, offset));
1260 } 1195 }
1261 1196
1262 void HeapObject::VerifySmiField(int offset) { 1197 void HeapObject::VerifySmiField(int offset) {
1263 ASSERT(READ_FIELD(this, offset)->IsSmi()); 1198 ASSERT(READ_FIELD(this, offset)->IsSmi());
1264 } 1199 }
1265 #endif 1200 #endif
1266 1201
1267 1202
1268 Heap* HeapObject::GetHeap() { 1203 Heap* HeapObject::GetHeap() {
1269 // During GC, the map pointer in HeapObject is used in various ways that 1204 Heap* heap =
1270 // prevent us from retrieving Heap from the map. 1205 MemoryChunk::FromAddress(reinterpret_cast<Address>(this))->heap();
1271 // Assert that we are not in GC, implement GC code in a way that it doesn't 1206 ASSERT(heap != NULL);
1272 // pull heap from the map. 1207 ASSERT(heap->isolate() == Isolate::Current());
1273 ASSERT(HEAP->is_safe_to_read_maps()); 1208 return heap;
1274 return map()->heap();
1275 } 1209 }
1276 1210
1277 1211
1278 Isolate* HeapObject::GetIsolate() { 1212 Isolate* HeapObject::GetIsolate() {
1279 return GetHeap()->isolate(); 1213 return GetHeap()->isolate();
1280 } 1214 }
1281 1215
1282 1216
1283 Map* HeapObject::map() { 1217 Map* HeapObject::map() {
1284 return map_word().ToMap(); 1218 return map_word().ToMap();
1285 } 1219 }
1286 1220
1287 1221
1288 void HeapObject::set_map(Map* value) { 1222 void HeapObject::set_map(Map* value) {
1289 set_map_word(MapWord::FromMap(value)); 1223 set_map_word(MapWord::FromMap(value));
1224 if (value != NULL) {
1225 // TODO(1600) We are passing NULL as a slot because maps can never be on
1226 // evacuation candidate.
1227 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1228 }
1229 }
1230
1231
1232 // Unsafe accessor omitting write barrier.
1233 void HeapObject::set_map_unsafe(Map* value) {
1234 set_map_word(MapWord::FromMap(value));
1290 } 1235 }
1291 1236
1292 1237
1293 MapWord HeapObject::map_word() { 1238 MapWord HeapObject::map_word() {
1294 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset))); 1239 return MapWord(reinterpret_cast<uintptr_t>(READ_FIELD(this, kMapOffset)));
1295 } 1240 }
1296 1241
1297 1242
1298 void HeapObject::set_map_word(MapWord map_word) { 1243 void HeapObject::set_map_word(MapWord map_word) {
1299 // WRITE_FIELD does not invoke write barrier, but there is no need 1244 // WRITE_FIELD does not invoke write barrier, but there is no need
(...skipping 22 matching lines...) Expand all
1322 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)), 1267 v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1323 reinterpret_cast<Object**>(FIELD_ADDR(this, end))); 1268 reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1324 } 1269 }
1325 1270
1326 1271
1327 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) { 1272 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1328 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset))); 1273 v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1329 } 1274 }
1330 1275
1331 1276
1332 bool HeapObject::IsMarked() {
1333 return map_word().IsMarked();
1334 }
1335
1336
1337 void HeapObject::SetMark() {
1338 ASSERT(!IsMarked());
1339 MapWord first_word = map_word();
1340 first_word.SetMark();
1341 set_map_word(first_word);
1342 }
1343
1344
1345 void HeapObject::ClearMark() {
1346 ASSERT(IsMarked());
1347 MapWord first_word = map_word();
1348 first_word.ClearMark();
1349 set_map_word(first_word);
1350 }
1351
1352
1353 bool HeapObject::IsOverflowed() {
1354 return map_word().IsOverflowed();
1355 }
1356
1357
1358 void HeapObject::SetOverflow() {
1359 MapWord first_word = map_word();
1360 first_word.SetOverflow();
1361 set_map_word(first_word);
1362 }
1363
1364
1365 void HeapObject::ClearOverflow() {
1366 ASSERT(IsOverflowed());
1367 MapWord first_word = map_word();
1368 first_word.ClearOverflow();
1369 set_map_word(first_word);
1370 }
1371
1372
1373 double HeapNumber::value() { 1277 double HeapNumber::value() {
1374 return READ_DOUBLE_FIELD(this, kValueOffset); 1278 return READ_DOUBLE_FIELD(this, kValueOffset);
1375 } 1279 }
1376 1280
1377 1281
1378 void HeapNumber::set_value(double value) { 1282 void HeapNumber::set_value(double value) {
1379 WRITE_DOUBLE_FIELD(this, kValueOffset, value); 1283 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1380 } 1284 }
1381 1285
1382 1286
(...skipping 10 matching lines...) Expand all
1393 1297
1394 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset) 1298 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1395 1299
1396 1300
1397 FixedArrayBase* JSObject::elements() { 1301 FixedArrayBase* JSObject::elements() {
1398 Object* array = READ_FIELD(this, kElementsOffset); 1302 Object* array = READ_FIELD(this, kElementsOffset);
1399 ASSERT(array->HasValidElements()); 1303 ASSERT(array->HasValidElements());
1400 return static_cast<FixedArrayBase*>(array); 1304 return static_cast<FixedArrayBase*>(array);
1401 } 1305 }
1402 1306
1307 void JSObject::ValidateSmiOnlyElements() {
1308 #if DEBUG
1309 if (FLAG_smi_only_arrays &&
1310 map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1311 Heap* heap = GetHeap();
1312 // Don't use elements, since integrity checks will fail if there
1313 // are filler pointers in the array.
1314 FixedArray* fixed_array =
1315 reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
1316 Map* map = fixed_array->map();
1317 // Arrays that have been shifted in place can't be verified.
1318 if (map != heap->raw_unchecked_one_pointer_filler_map() &&
1319 map != heap->raw_unchecked_two_pointer_filler_map() &&
1320 map != heap->free_space_map()) {
1321 for (int i = 0; i < fixed_array->length(); i++) {
1322 Object* current = fixed_array->get(i);
1323 ASSERT(current->IsSmi() || current == heap->the_hole_value());
1324 }
1325 }
1326 }
1327 #endif
1328 }
1329
1330
1331 MaybeObject* JSObject::EnsureCanContainNonSmiElements() {
1332 #if DEBUG
1333 ValidateSmiOnlyElements();
1334 #endif
1335 if (FLAG_smi_only_arrays &&
1336 (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS)) {
1337 Object* obj;
1338 MaybeObject* maybe_obj = GetElementsTransitionMap(FAST_ELEMENTS);
1339 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1340 set_map(Map::cast(obj));
1341 }
1342 return this;
1343 }
1344
1345
1346 MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
1347 uint32_t count) {
1348 if (FLAG_smi_only_arrays &&
1349 map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
1350 for (uint32_t i = 0; i < count; ++i) {
1351 Object* current = *objects++;
1352 if (!current->IsSmi() && current != GetHeap()->the_hole_value()) {
1353 return EnsureCanContainNonSmiElements();
1354 }
1355 }
1356 }
1357 return this;
1358 }
1359
1360
1361 MaybeObject* JSObject::EnsureCanContainElements(FixedArray* elements) {
1362 if (FLAG_smi_only_arrays) {
1363 Object** objects = reinterpret_cast<Object**>(
1364 FIELD_ADDR(elements, elements->OffsetOfElementAt(0)));
1365 return EnsureCanContainElements(objects, elements->length());
1366 } else {
1367 return this;
1368 }
1369 }
1370
1403 1371
1404 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) { 1372 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1405 ASSERT(map()->has_fast_elements() == 1373 ASSERT((map()->has_fast_elements() ||
1374 map()->has_fast_smi_only_elements()) ==
1406 (value->map() == GetHeap()->fixed_array_map() || 1375 (value->map() == GetHeap()->fixed_array_map() ||
1407 value->map() == GetHeap()->fixed_cow_array_map())); 1376 value->map() == GetHeap()->fixed_cow_array_map()));
1408 ASSERT(map()->has_fast_double_elements() == 1377 ASSERT(map()->has_fast_double_elements() ==
1409 value->IsFixedDoubleArray()); 1378 value->IsFixedDoubleArray());
1410 ASSERT(value->HasValidElements()); 1379 ASSERT(value->HasValidElements());
1380 #ifdef DEBUG
1381 ValidateSmiOnlyElements();
1382 #endif
1411 WRITE_FIELD(this, kElementsOffset, value); 1383 WRITE_FIELD(this, kElementsOffset, value);
1412 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode); 1384 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1413 } 1385 }
1414 1386
1415 1387
1416 void JSObject::initialize_properties() { 1388 void JSObject::initialize_properties() {
1417 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); 1389 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1418 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array()); 1390 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1419 } 1391 }
1420 1392
1421 1393
1422 void JSObject::initialize_elements() { 1394 void JSObject::initialize_elements() {
1423 ASSERT(map()->has_fast_elements()); 1395 ASSERT(map()->has_fast_elements() || map()->has_fast_smi_only_elements());
1424 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array())); 1396 ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1425 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array()); 1397 WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
1426 } 1398 }
1427 1399
1428 1400
1429 MaybeObject* JSObject::ResetElements() { 1401 MaybeObject* JSObject::ResetElements() {
1430 Object* obj; 1402 Object* obj;
1431 { MaybeObject* maybe_obj = map()->GetFastElementsMap(); 1403 ElementsKind elements_kind = FLAG_smi_only_arrays
1432 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1404 ? FAST_SMI_ONLY_ELEMENTS
1433 } 1405 : FAST_ELEMENTS;
1406 MaybeObject* maybe_obj = GetElementsTransitionMap(elements_kind);
1407 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1434 set_map(Map::cast(obj)); 1408 set_map(Map::cast(obj));
1435 initialize_elements(); 1409 initialize_elements();
1436 return this; 1410 return this;
1437 } 1411 }
1438 1412
1439 1413
1440 ACCESSORS(Oddball, to_string, String, kToStringOffset) 1414 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1441 ACCESSORS(Oddball, to_number, Object, kToNumberOffset) 1415 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1442 1416
1443 1417
1444 byte Oddball::kind() { 1418 byte Oddball::kind() {
1445 return READ_BYTE_FIELD(this, kKindOffset); 1419 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1446 } 1420 }
1447 1421
1448 1422
1449 void Oddball::set_kind(byte value) { 1423 void Oddball::set_kind(byte value) {
1450 WRITE_BYTE_FIELD(this, kKindOffset, value); 1424 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1451 } 1425 }
1452 1426
1453 1427
1454 Object* JSGlobalPropertyCell::value() { 1428 Object* JSGlobalPropertyCell::value() {
1455 return READ_FIELD(this, kValueOffset); 1429 return READ_FIELD(this, kValueOffset);
1456 } 1430 }
1457 1431
1458 1432
1459 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) { 1433 void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
1460 // The write barrier is not used for global property cells. 1434 // The write barrier is not used for global property cells.
1461 ASSERT(!val->IsJSGlobalPropertyCell()); 1435 ASSERT(!val->IsJSGlobalPropertyCell());
1462 WRITE_FIELD(this, kValueOffset, val); 1436 WRITE_FIELD(this, kValueOffset, val);
1437 GetHeap()->incremental_marking()->RecordWrite(
1438 this, HeapObject::RawField(this, kValueOffset), val);
1463 } 1439 }
1464 1440
1465 1441
1466 int JSObject::GetHeaderSize() { 1442 int JSObject::GetHeaderSize() {
1467 InstanceType type = map()->instance_type(); 1443 InstanceType type = map()->instance_type();
1468 // Check for the most common kind of JavaScript object before 1444 // Check for the most common kind of JavaScript object before
1469 // falling into the generic switch. This speeds up the internal 1445 // falling into the generic switch. This speeds up the internal
1470 // field operations considerably on average. 1446 // field operations considerably on average.
1471 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize; 1447 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1472 switch (type) { 1448 switch (type) {
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1521 } 1497 }
1522 1498
1523 1499
1524 void JSObject::SetInternalField(int index, Object* value) { 1500 void JSObject::SetInternalField(int index, Object* value) {
1525 ASSERT(index < GetInternalFieldCount() && index >= 0); 1501 ASSERT(index < GetInternalFieldCount() && index >= 0);
1526 // Internal objects do follow immediately after the header, whereas in-object 1502 // Internal objects do follow immediately after the header, whereas in-object
1527 // properties are at the end of the object. Therefore there is no need 1503 // properties are at the end of the object. Therefore there is no need
1528 // to adjust the index here. 1504 // to adjust the index here.
1529 int offset = GetHeaderSize() + (kPointerSize * index); 1505 int offset = GetHeaderSize() + (kPointerSize * index);
1530 WRITE_FIELD(this, offset, value); 1506 WRITE_FIELD(this, offset, value);
1531 WRITE_BARRIER(this, offset); 1507 WRITE_BARRIER(GetHeap(), this, offset, value);
1532 } 1508 }
1533 1509
1534 1510
1535 // Access fast-case object properties at index. The use of these routines 1511 // Access fast-case object properties at index. The use of these routines
1536 // is needed to correctly distinguish between properties stored in-object and 1512 // is needed to correctly distinguish between properties stored in-object and
1537 // properties stored in the properties array. 1513 // properties stored in the properties array.
1538 Object* JSObject::FastPropertyAt(int index) { 1514 Object* JSObject::FastPropertyAt(int index) {
1539 // Adjust for the number of properties stored in the object. 1515 // Adjust for the number of properties stored in the object.
1540 index -= map()->inobject_properties(); 1516 index -= map()->inobject_properties();
1541 if (index < 0) { 1517 if (index < 0) {
1542 int offset = map()->instance_size() + (index * kPointerSize); 1518 int offset = map()->instance_size() + (index * kPointerSize);
1543 return READ_FIELD(this, offset); 1519 return READ_FIELD(this, offset);
1544 } else { 1520 } else {
1545 ASSERT(index < properties()->length()); 1521 ASSERT(index < properties()->length());
1546 return properties()->get(index); 1522 return properties()->get(index);
1547 } 1523 }
1548 } 1524 }
1549 1525
1550 1526
1551 Object* JSObject::FastPropertyAtPut(int index, Object* value) { 1527 Object* JSObject::FastPropertyAtPut(int index, Object* value) {
1552 // Adjust for the number of properties stored in the object. 1528 // Adjust for the number of properties stored in the object.
1553 index -= map()->inobject_properties(); 1529 index -= map()->inobject_properties();
1554 if (index < 0) { 1530 if (index < 0) {
1555 int offset = map()->instance_size() + (index * kPointerSize); 1531 int offset = map()->instance_size() + (index * kPointerSize);
1556 WRITE_FIELD(this, offset, value); 1532 WRITE_FIELD(this, offset, value);
1557 WRITE_BARRIER(this, offset); 1533 WRITE_BARRIER(GetHeap(), this, offset, value);
1558 } else { 1534 } else {
1559 ASSERT(index < properties()->length()); 1535 ASSERT(index < properties()->length());
1560 properties()->set(index, value); 1536 properties()->set(index, value);
1561 } 1537 }
1562 return value; 1538 return value;
1563 } 1539 }
1564 1540
1565 1541
1566 int JSObject::GetInObjectPropertyOffset(int index) { 1542 int JSObject::GetInObjectPropertyOffset(int index) {
1567 // Adjust for the number of properties stored in the object. 1543 // Adjust for the number of properties stored in the object.
(...skipping 13 matching lines...) Expand all
1581 1557
1582 1558
1583 Object* JSObject::InObjectPropertyAtPut(int index, 1559 Object* JSObject::InObjectPropertyAtPut(int index,
1584 Object* value, 1560 Object* value,
1585 WriteBarrierMode mode) { 1561 WriteBarrierMode mode) {
1586 // Adjust for the number of properties stored in the object. 1562 // Adjust for the number of properties stored in the object.
1587 index -= map()->inobject_properties(); 1563 index -= map()->inobject_properties();
1588 ASSERT(index < 0); 1564 ASSERT(index < 0);
1589 int offset = map()->instance_size() + (index * kPointerSize); 1565 int offset = map()->instance_size() + (index * kPointerSize);
1590 WRITE_FIELD(this, offset, value); 1566 WRITE_FIELD(this, offset, value);
1591 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode); 1567 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1592 return value; 1568 return value;
1593 } 1569 }
1594 1570
1595 1571
1596 1572
1597 void JSObject::InitializeBody(int object_size, Object* value) { 1573 void JSObject::InitializeBody(Map* map,
1598 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value)); 1574 Object* pre_allocated_value,
1599 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { 1575 Object* filler_value) {
1600 WRITE_FIELD(this, offset, value); 1576 ASSERT(!filler_value->IsHeapObject() ||
1577 !GetHeap()->InNewSpace(filler_value));
1578 ASSERT(!pre_allocated_value->IsHeapObject() ||
1579 !GetHeap()->InNewSpace(pre_allocated_value));
1580 int size = map->instance_size();
1581 int offset = kHeaderSize;
1582 if (filler_value != pre_allocated_value) {
1583 int pre_allocated = map->pre_allocated_property_fields();
1584 ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
1585 for (int i = 0; i < pre_allocated; i++) {
1586 WRITE_FIELD(this, offset, pre_allocated_value);
1587 offset += kPointerSize;
1588 }
1589 }
1590 while (offset < size) {
1591 WRITE_FIELD(this, offset, filler_value);
1592 offset += kPointerSize;
1601 } 1593 }
1602 } 1594 }
1603 1595
1604 1596
1605 bool JSObject::HasFastProperties() { 1597 bool JSObject::HasFastProperties() {
1606 return !properties()->IsDictionary(); 1598 return !properties()->IsDictionary();
1607 } 1599 }
1608 1600
1609 1601
1610 int JSObject::MaxFastProperties() { 1602 int JSObject::MaxFastProperties() {
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
1676 int offset = kHeaderSize + index * kPointerSize; 1668 int offset = kHeaderSize + index * kPointerSize;
1677 WRITE_FIELD(this, offset, value); 1669 WRITE_FIELD(this, offset, value);
1678 } 1670 }
1679 1671
1680 1672
1681 void FixedArray::set(int index, Object* value) { 1673 void FixedArray::set(int index, Object* value) {
1682 ASSERT(map() != HEAP->fixed_cow_array_map()); 1674 ASSERT(map() != HEAP->fixed_cow_array_map());
1683 ASSERT(index >= 0 && index < this->length()); 1675 ASSERT(index >= 0 && index < this->length());
1684 int offset = kHeaderSize + index * kPointerSize; 1676 int offset = kHeaderSize + index * kPointerSize;
1685 WRITE_FIELD(this, offset, value); 1677 WRITE_FIELD(this, offset, value);
1686 WRITE_BARRIER(this, offset); 1678 WRITE_BARRIER(GetHeap(), this, offset, value);
1687 } 1679 }
1688 1680
1689 1681
1690 inline bool FixedDoubleArray::is_the_hole_nan(double value) { 1682 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
1691 return BitCast<uint64_t, double>(value) == kHoleNanInt64; 1683 return BitCast<uint64_t, double>(value) == kHoleNanInt64;
1692 } 1684 }
1693 1685
1694 1686
1695 inline double FixedDoubleArray::hole_nan_as_double() { 1687 inline double FixedDoubleArray::hole_nan_as_double() {
1696 return BitCast<double, uint64_t>(kHoleNanInt64); 1688 return BitCast<double, uint64_t>(kHoleNanInt64);
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
1761 int offset = kHeaderSize + old_length * kDoubleSize; 1753 int offset = kHeaderSize + old_length * kDoubleSize;
1762 for (int current = from->length(); current < length(); ++current) { 1754 for (int current = from->length(); current < length(); ++current) {
1763 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double()); 1755 WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
1764 offset += kDoubleSize; 1756 offset += kDoubleSize;
1765 } 1757 }
1766 } 1758 }
1767 1759
1768 1760
1769 void FixedDoubleArray::Initialize(FixedArray* from) { 1761 void FixedDoubleArray::Initialize(FixedArray* from) {
1770 int old_length = from->length(); 1762 int old_length = from->length();
1771 ASSERT(old_length < length()); 1763 ASSERT(old_length <= length());
1772 for (int i = 0; i < old_length; i++) { 1764 for (int i = 0; i < old_length; i++) {
1773 Object* hole_or_object = from->get(i); 1765 Object* hole_or_object = from->get(i);
1774 if (hole_or_object->IsTheHole()) { 1766 if (hole_or_object->IsTheHole()) {
1775 set_the_hole(i); 1767 set_the_hole(i);
1776 } else { 1768 } else {
1777 set(i, hole_or_object->Number()); 1769 set(i, hole_or_object->Number());
1778 } 1770 }
1779 } 1771 }
1780 int offset = kHeaderSize + old_length * kDoubleSize; 1772 int offset = kHeaderSize + old_length * kDoubleSize;
1781 for (int current = from->length(); current < length(); ++current) { 1773 for (int current = from->length(); current < length(); ++current) {
(...skipping 13 matching lines...) Expand all
1795 Object* key = from->KeyAt(i); 1787 Object* key = from->KeyAt(i);
1796 if (key->IsNumber()) { 1788 if (key->IsNumber()) {
1797 uint32_t entry = static_cast<uint32_t>(key->Number()); 1789 uint32_t entry = static_cast<uint32_t>(key->Number());
1798 set(entry, from->ValueAt(i)->Number()); 1790 set(entry, from->ValueAt(i)->Number());
1799 } 1791 }
1800 } 1792 }
1801 } 1793 }
1802 1794
1803 1795
1804 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) { 1796 WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
1805 if (GetHeap()->InNewSpace(this)) return SKIP_WRITE_BARRIER; 1797 Heap* heap = GetHeap();
1798 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
1799 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
1806 return UPDATE_WRITE_BARRIER; 1800 return UPDATE_WRITE_BARRIER;
1807 } 1801 }
1808 1802
1809 1803
1810 void FixedArray::set(int index, 1804 void FixedArray::set(int index,
1811 Object* value, 1805 Object* value,
1812 WriteBarrierMode mode) { 1806 WriteBarrierMode mode) {
1813 ASSERT(map() != HEAP->fixed_cow_array_map()); 1807 ASSERT(map() != HEAP->fixed_cow_array_map());
1814 ASSERT(index >= 0 && index < this->length()); 1808 ASSERT(index >= 0 && index < this->length());
1815 int offset = kHeaderSize + index * kPointerSize; 1809 int offset = kHeaderSize + index * kPointerSize;
1816 WRITE_FIELD(this, offset, value); 1810 WRITE_FIELD(this, offset, value);
1817 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, mode); 1811 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
1818 } 1812 }
1819 1813
1820 1814
1821 void FixedArray::fast_set(FixedArray* array, int index, Object* value) { 1815 void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
1822 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map()); 1816 ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
1823 ASSERT(index >= 0 && index < array->length()); 1817 ASSERT(index >= 0 && index < array->length());
1824 ASSERT(!HEAP->InNewSpace(value)); 1818 ASSERT(!HEAP->InNewSpace(value));
1825 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); 1819 WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
1820 array->GetHeap()->incremental_marking()->RecordWrite(
1821 array,
1822 HeapObject::RawField(array, kHeaderSize + index * kPointerSize),
1823 value);
1826 } 1824 }
1827 1825
1828 1826
1829 void FixedArray::set_undefined(int index) { 1827 void FixedArray::set_undefined(int index) {
1830 ASSERT(map() != HEAP->fixed_cow_array_map()); 1828 ASSERT(map() != HEAP->fixed_cow_array_map());
1831 set_undefined(GetHeap(), index); 1829 set_undefined(GetHeap(), index);
1832 } 1830 }
1833 1831
1834 1832
1835 void FixedArray::set_undefined(Heap* heap, int index) { 1833 void FixedArray::set_undefined(Heap* heap, int index) {
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1868 WRITE_FIELD(this, offset, value); 1866 WRITE_FIELD(this, offset, value);
1869 } 1867 }
1870 1868
1871 1869
1872 void FixedArray::set_unchecked(Heap* heap, 1870 void FixedArray::set_unchecked(Heap* heap,
1873 int index, 1871 int index,
1874 Object* value, 1872 Object* value,
1875 WriteBarrierMode mode) { 1873 WriteBarrierMode mode) {
1876 int offset = kHeaderSize + index * kPointerSize; 1874 int offset = kHeaderSize + index * kPointerSize;
1877 WRITE_FIELD(this, offset, value); 1875 WRITE_FIELD(this, offset, value);
1878 CONDITIONAL_WRITE_BARRIER(heap, this, offset, mode); 1876 CONDITIONAL_WRITE_BARRIER(heap, this, offset, value, mode);
1879 } 1877 }
1880 1878
1881 1879
1882 void FixedArray::set_null_unchecked(Heap* heap, int index) { 1880 void FixedArray::set_null_unchecked(Heap* heap, int index) {
1883 ASSERT(index >= 0 && index < this->length()); 1881 ASSERT(index >= 0 && index < this->length());
1884 ASSERT(!HEAP->InNewSpace(heap->null_value())); 1882 ASSERT(!HEAP->InNewSpace(heap->null_value()));
1885 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value()); 1883 WRITE_FIELD(this, kHeaderSize + index * kPointerSize, heap->null_value());
1886 } 1884 }
1887 1885
1888 1886
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
2147 CAST_ACCESSOR(JSGlobalObject) 2145 CAST_ACCESSOR(JSGlobalObject)
2148 CAST_ACCESSOR(JSBuiltinsObject) 2146 CAST_ACCESSOR(JSBuiltinsObject)
2149 CAST_ACCESSOR(Code) 2147 CAST_ACCESSOR(Code)
2150 CAST_ACCESSOR(JSArray) 2148 CAST_ACCESSOR(JSArray)
2151 CAST_ACCESSOR(JSRegExp) 2149 CAST_ACCESSOR(JSRegExp)
2152 CAST_ACCESSOR(JSProxy) 2150 CAST_ACCESSOR(JSProxy)
2153 CAST_ACCESSOR(JSFunctionProxy) 2151 CAST_ACCESSOR(JSFunctionProxy)
2154 CAST_ACCESSOR(JSWeakMap) 2152 CAST_ACCESSOR(JSWeakMap)
2155 CAST_ACCESSOR(Foreign) 2153 CAST_ACCESSOR(Foreign)
2156 CAST_ACCESSOR(ByteArray) 2154 CAST_ACCESSOR(ByteArray)
2155 CAST_ACCESSOR(FreeSpace)
2157 CAST_ACCESSOR(ExternalArray) 2156 CAST_ACCESSOR(ExternalArray)
2158 CAST_ACCESSOR(ExternalByteArray) 2157 CAST_ACCESSOR(ExternalByteArray)
2159 CAST_ACCESSOR(ExternalUnsignedByteArray) 2158 CAST_ACCESSOR(ExternalUnsignedByteArray)
2160 CAST_ACCESSOR(ExternalShortArray) 2159 CAST_ACCESSOR(ExternalShortArray)
2161 CAST_ACCESSOR(ExternalUnsignedShortArray) 2160 CAST_ACCESSOR(ExternalUnsignedShortArray)
2162 CAST_ACCESSOR(ExternalIntArray) 2161 CAST_ACCESSOR(ExternalIntArray)
2163 CAST_ACCESSOR(ExternalUnsignedIntArray) 2162 CAST_ACCESSOR(ExternalUnsignedIntArray)
2164 CAST_ACCESSOR(ExternalFloatArray) 2163 CAST_ACCESSOR(ExternalFloatArray)
2165 CAST_ACCESSOR(ExternalDoubleArray) 2164 CAST_ACCESSOR(ExternalDoubleArray)
2166 CAST_ACCESSOR(ExternalPixelArray) 2165 CAST_ACCESSOR(ExternalPixelArray)
2167 CAST_ACCESSOR(Struct) 2166 CAST_ACCESSOR(Struct)
2168 2167
2169 2168
2170 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name) 2169 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
2171 STRUCT_LIST(MAKE_STRUCT_CAST) 2170 STRUCT_LIST(MAKE_STRUCT_CAST)
2172 #undef MAKE_STRUCT_CAST 2171 #undef MAKE_STRUCT_CAST
2173 2172
2174 2173
2175 template <typename Shape, typename Key> 2174 template <typename Shape, typename Key>
2176 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) { 2175 HashTable<Shape, Key>* HashTable<Shape, Key>::cast(Object* obj) {
2177 ASSERT(obj->IsHashTable()); 2176 ASSERT(obj->IsHashTable());
2178 return reinterpret_cast<HashTable*>(obj); 2177 return reinterpret_cast<HashTable*>(obj);
2179 } 2178 }
2180 2179
2181 2180
2182 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) 2181 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
2182 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
2183 2183
2184 SMI_ACCESSORS(String, length, kLengthOffset) 2184 SMI_ACCESSORS(String, length, kLengthOffset)
2185 2185
2186 2186
2187 uint32_t String::hash_field() { 2187 uint32_t String::hash_field() {
2188 return READ_UINT32_FIELD(this, kHashFieldOffset); 2188 return READ_UINT32_FIELD(this, kHashFieldOffset);
2189 } 2189 }
2190 2190
2191 2191
2192 void String::set_hash_field(uint32_t value) { 2192 void String::set_hash_field(uint32_t value) {
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
2329 return SizeFor(length()); 2329 return SizeFor(length());
2330 } 2330 }
2331 2331
2332 2332
2333 String* SlicedString::parent() { 2333 String* SlicedString::parent() {
2334 return String::cast(READ_FIELD(this, kParentOffset)); 2334 return String::cast(READ_FIELD(this, kParentOffset));
2335 } 2335 }
2336 2336
2337 2337
2338 void SlicedString::set_parent(String* parent) { 2338 void SlicedString::set_parent(String* parent) {
2339 ASSERT(parent->IsSeqString()); 2339 ASSERT(parent->IsSeqString() || parent->IsExternalString());
2340 WRITE_FIELD(this, kParentOffset, parent); 2340 WRITE_FIELD(this, kParentOffset, parent);
2341 } 2341 }
2342 2342
2343 2343
2344 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset) 2344 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
2345 2345
2346 2346
2347 String* ConsString::first() { 2347 String* ConsString::first() {
2348 return String::cast(READ_FIELD(this, kFirstOffset)); 2348 return String::cast(READ_FIELD(this, kFirstOffset));
2349 } 2349 }
2350 2350
2351 2351
2352 Object* ConsString::unchecked_first() { 2352 Object* ConsString::unchecked_first() {
2353 return READ_FIELD(this, kFirstOffset); 2353 return READ_FIELD(this, kFirstOffset);
2354 } 2354 }
2355 2355
2356 2356
2357 void ConsString::set_first(String* value, WriteBarrierMode mode) { 2357 void ConsString::set_first(String* value, WriteBarrierMode mode) {
2358 WRITE_FIELD(this, kFirstOffset, value); 2358 WRITE_FIELD(this, kFirstOffset, value);
2359 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, mode); 2359 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
2360 } 2360 }
2361 2361
2362 2362
2363 String* ConsString::second() { 2363 String* ConsString::second() {
2364 return String::cast(READ_FIELD(this, kSecondOffset)); 2364 return String::cast(READ_FIELD(this, kSecondOffset));
2365 } 2365 }
2366 2366
2367 2367
2368 Object* ConsString::unchecked_second() { 2368 Object* ConsString::unchecked_second() {
2369 return READ_FIELD(this, kSecondOffset); 2369 return READ_FIELD(this, kSecondOffset);
2370 } 2370 }
2371 2371
2372 2372
2373 void ConsString::set_second(String* value, WriteBarrierMode mode) { 2373 void ConsString::set_second(String* value, WriteBarrierMode mode) {
2374 WRITE_FIELD(this, kSecondOffset, value); 2374 WRITE_FIELD(this, kSecondOffset, value);
2375 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, mode); 2375 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
2376 } 2376 }
2377 2377
2378 2378
2379 ExternalAsciiString::Resource* ExternalAsciiString::resource() { 2379 const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
2380 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); 2380 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2381 } 2381 }
2382 2382
2383 2383
2384 void ExternalAsciiString::set_resource( 2384 void ExternalAsciiString::set_resource(
2385 ExternalAsciiString::Resource* resource) { 2385 const ExternalAsciiString::Resource* resource) {
2386 *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource; 2386 *reinterpret_cast<const Resource**>(
2387 FIELD_ADDR(this, kResourceOffset)) = resource;
2387 } 2388 }
2388 2389
2389 2390
2390 ExternalTwoByteString::Resource* ExternalTwoByteString::resource() { 2391 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
2391 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)); 2392 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
2392 } 2393 }
2393 2394
2394 2395
2395 void ExternalTwoByteString::set_resource( 2396 void ExternalTwoByteString::set_resource(
2396 ExternalTwoByteString::Resource* resource) { 2397 const ExternalTwoByteString::Resource* resource) {
2397 *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)) = resource; 2398 *reinterpret_cast<const Resource**>(
2399 FIELD_ADDR(this, kResourceOffset)) = resource;
2398 } 2400 }
2399 2401
2400 2402
2401 void JSFunctionResultCache::MakeZeroSize() { 2403 void JSFunctionResultCache::MakeZeroSize() {
2402 set_finger_index(kEntriesIndex); 2404 set_finger_index(kEntriesIndex);
2403 set_size(kEntriesIndex); 2405 set_size(kEntriesIndex);
2404 } 2406 }
2405 2407
2406 2408
2407 void JSFunctionResultCache::Clear() { 2409 void JSFunctionResultCache::Clear() {
(...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after
2687 if (instance_type == FIXED_ARRAY_TYPE) { 2689 if (instance_type == FIXED_ARRAY_TYPE) {
2688 return FixedArray::BodyDescriptor::SizeOf(map, this); 2690 return FixedArray::BodyDescriptor::SizeOf(map, this);
2689 } 2691 }
2690 if (instance_type == ASCII_STRING_TYPE) { 2692 if (instance_type == ASCII_STRING_TYPE) {
2691 return SeqAsciiString::SizeFor( 2693 return SeqAsciiString::SizeFor(
2692 reinterpret_cast<SeqAsciiString*>(this)->length()); 2694 reinterpret_cast<SeqAsciiString*>(this)->length());
2693 } 2695 }
2694 if (instance_type == BYTE_ARRAY_TYPE) { 2696 if (instance_type == BYTE_ARRAY_TYPE) {
2695 return reinterpret_cast<ByteArray*>(this)->ByteArraySize(); 2697 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
2696 } 2698 }
2699 if (instance_type == FREE_SPACE_TYPE) {
2700 return reinterpret_cast<FreeSpace*>(this)->size();
2701 }
2697 if (instance_type == STRING_TYPE) { 2702 if (instance_type == STRING_TYPE) {
2698 return SeqTwoByteString::SizeFor( 2703 return SeqTwoByteString::SizeFor(
2699 reinterpret_cast<SeqTwoByteString*>(this)->length()); 2704 reinterpret_cast<SeqTwoByteString*>(this)->length());
2700 } 2705 }
2701 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) { 2706 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
2702 return FixedDoubleArray::SizeFor( 2707 return FixedDoubleArray::SizeFor(
2703 reinterpret_cast<FixedDoubleArray*>(this)->length()); 2708 reinterpret_cast<FixedDoubleArray*>(this)->length());
2704 } 2709 }
2705 ASSERT(instance_type == CODE_TYPE); 2710 ASSERT(instance_type == CODE_TYPE);
2706 return reinterpret_cast<Code*>(this)->CodeSize(); 2711 return reinterpret_cast<Code*>(this)->CodeSize();
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
2848 bool Map::is_shared() { 2853 bool Map::is_shared() {
2849 return ((1 << kIsShared) & bit_field3()) != 0; 2854 return ((1 << kIsShared) & bit_field3()) != 0;
2850 } 2855 }
2851 2856
2852 2857
2853 JSFunction* Map::unchecked_constructor() { 2858 JSFunction* Map::unchecked_constructor() {
2854 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset)); 2859 return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
2855 } 2860 }
2856 2861
2857 2862
2858 FixedArray* Map::unchecked_prototype_transitions() {
2859 return reinterpret_cast<FixedArray*>(
2860 READ_FIELD(this, kPrototypeTransitionsOffset));
2861 }
2862
2863
2864 Code::Flags Code::flags() { 2863 Code::Flags Code::flags() {
2865 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset)); 2864 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
2866 } 2865 }
2867 2866
2868 2867
2869 void Code::set_flags(Code::Flags flags) { 2868 void Code::set_flags(Code::Flags flags) {
2870 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1); 2869 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
2871 // Make sure that all call stubs have an arguments count. 2870 // Make sure that all call stubs have an arguments count.
2872 ASSERT((ExtractKindFromFlags(flags) != CALL_IC && 2871 ASSERT((ExtractKindFromFlags(flags) != CALL_IC &&
2873 ExtractKindFromFlags(flags) != KEYED_CALL_IC) || 2872 ExtractKindFromFlags(flags) != KEYED_CALL_IC) ||
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
2925 ASSERT(kind() == STUB || 2924 ASSERT(kind() == STUB ||
2926 kind() == UNARY_OP_IC || 2925 kind() == UNARY_OP_IC ||
2927 kind() == BINARY_OP_IC || 2926 kind() == BINARY_OP_IC ||
2928 kind() == COMPARE_IC || 2927 kind() == COMPARE_IC ||
2929 kind() == TO_BOOLEAN_IC); 2928 kind() == TO_BOOLEAN_IC);
2930 ASSERT(0 <= major && major < 256); 2929 ASSERT(0 <= major && major < 256);
2931 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major); 2930 WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
2932 } 2931 }
2933 2932
2934 2933
2934 bool Code::is_pregenerated() {
2935 return kind() == STUB && IsPregeneratedField::decode(flags());
2936 }
2937
2938
2939 void Code::set_is_pregenerated(bool value) {
2940 ASSERT(kind() == STUB);
2941 Flags f = flags();
2942 f = static_cast<Flags>(IsPregeneratedField::update(f, value));
2943 set_flags(f);
2944 }
2945
2946
2935 bool Code::optimizable() { 2947 bool Code::optimizable() {
2936 ASSERT(kind() == FUNCTION); 2948 ASSERT(kind() == FUNCTION);
2937 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1; 2949 return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
2938 } 2950 }
2939 2951
2940 2952
2941 void Code::set_optimizable(bool value) { 2953 void Code::set_optimizable(bool value) {
2942 ASSERT(kind() == FUNCTION); 2954 ASSERT(kind() == FUNCTION);
2943 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0); 2955 WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
2944 } 2956 }
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
3090 ASSERT(is_to_boolean_ic_stub()); 3102 ASSERT(is_to_boolean_ic_stub());
3091 return READ_BYTE_FIELD(this, kToBooleanTypeOffset); 3103 return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
3092 } 3104 }
3093 3105
3094 3106
3095 void Code::set_to_boolean_state(byte value) { 3107 void Code::set_to_boolean_state(byte value) {
3096 ASSERT(is_to_boolean_ic_stub()); 3108 ASSERT(is_to_boolean_ic_stub());
3097 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value); 3109 WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
3098 } 3110 }
3099 3111
3112
3113 bool Code::has_function_cache() {
3114 ASSERT(kind() == STUB);
3115 return READ_BYTE_FIELD(this, kHasFunctionCacheOffset) != 0;
3116 }
3117
3118
3119 void Code::set_has_function_cache(bool flag) {
3120 ASSERT(kind() == STUB);
3121 WRITE_BYTE_FIELD(this, kHasFunctionCacheOffset, flag);
3122 }
3123
3124
3100 bool Code::is_inline_cache_stub() { 3125 bool Code::is_inline_cache_stub() {
3101 Kind kind = this->kind(); 3126 Kind kind = this->kind();
3102 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND; 3127 return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
3103 } 3128 }
3104 3129
3105 3130
3106 Code::Flags Code::ComputeFlags(Kind kind, 3131 Code::Flags Code::ComputeFlags(Kind kind,
3107 InlineCacheState ic_state, 3132 InlineCacheState ic_state,
3108 ExtraICState extra_ic_state, 3133 ExtraICState extra_ic_state,
3109 PropertyType type, 3134 PropertyType type,
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
3175 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); 3200 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
3176 // GetCodeFromTargetAddress might be called when marking objects during mark 3201 // GetCodeFromTargetAddress might be called when marking objects during mark
3177 // sweep. reinterpret_cast is therefore used instead of the more appropriate 3202 // sweep. reinterpret_cast is therefore used instead of the more appropriate
3178 // Code::cast. Code::cast does not work when the object's map is 3203 // Code::cast. Code::cast does not work when the object's map is
3179 // marked. 3204 // marked.
3180 Code* result = reinterpret_cast<Code*>(code); 3205 Code* result = reinterpret_cast<Code*>(code);
3181 return result; 3206 return result;
3182 } 3207 }
3183 3208
3184 3209
3185 Isolate* Map::isolate() {
3186 return heap()->isolate();
3187 }
3188
3189
3190 Heap* Map::heap() {
3191 // NOTE: address() helper is not used to save one instruction.
3192 Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3193 ASSERT(heap != NULL);
3194 ASSERT(heap->isolate() == Isolate::Current());
3195 return heap;
3196 }
3197
3198
3199 Heap* Code::heap() {
3200 // NOTE: address() helper is not used to save one instruction.
3201 Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3202 ASSERT(heap != NULL);
3203 ASSERT(heap->isolate() == Isolate::Current());
3204 return heap;
3205 }
3206
3207
3208 Isolate* Code::isolate() {
3209 return heap()->isolate();
3210 }
3211
3212
3213 Heap* JSGlobalPropertyCell::heap() {
3214 // NOTE: address() helper is not used to save one instruction.
3215 Heap* heap = Page::FromAddress(reinterpret_cast<Address>(this))->heap_;
3216 ASSERT(heap != NULL);
3217 ASSERT(heap->isolate() == Isolate::Current());
3218 return heap;
3219 }
3220
3221
3222 Isolate* JSGlobalPropertyCell::isolate() {
3223 return heap()->isolate();
3224 }
3225
3226
3227 Object* Code::GetObjectFromEntryAddress(Address location_of_address) { 3210 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
3228 return HeapObject:: 3211 return HeapObject::
3229 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); 3212 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
3230 } 3213 }
3231 3214
3232 3215
3233 Object* Map::prototype() { 3216 Object* Map::prototype() {
3234 return READ_FIELD(this, kPrototypeOffset); 3217 return READ_FIELD(this, kPrototypeOffset);
3235 } 3218 }
3236 3219
3237 3220
3238 void Map::set_prototype(Object* value, WriteBarrierMode mode) { 3221 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
3239 ASSERT(value->IsNull() || value->IsJSReceiver()); 3222 ASSERT(value->IsNull() || value->IsJSReceiver());
3240 WRITE_FIELD(this, kPrototypeOffset, value); 3223 WRITE_FIELD(this, kPrototypeOffset, value);
3241 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, mode); 3224 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
3242 }
3243
3244
3245 MaybeObject* Map::GetFastElementsMap() {
3246 if (has_fast_elements()) return this;
3247 Object* obj;
3248 { MaybeObject* maybe_obj = CopyDropTransitions();
3249 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3250 }
3251 Map* new_map = Map::cast(obj);
3252 new_map->set_elements_kind(FAST_ELEMENTS);
3253 isolate()->counters()->map_to_fast_elements()->Increment();
3254 return new_map;
3255 }
3256
3257
3258 MaybeObject* Map::GetFastDoubleElementsMap() {
3259 if (has_fast_double_elements()) return this;
3260 Object* obj;
3261 { MaybeObject* maybe_obj = CopyDropTransitions();
3262 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3263 }
3264 Map* new_map = Map::cast(obj);
3265 new_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
3266 isolate()->counters()->map_to_fast_double_elements()->Increment();
3267 return new_map;
3268 }
3269
3270
3271 MaybeObject* Map::GetSlowElementsMap() {
3272 if (!has_fast_elements() && !has_fast_double_elements()) return this;
3273 Object* obj;
3274 { MaybeObject* maybe_obj = CopyDropTransitions();
3275 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3276 }
3277 Map* new_map = Map::cast(obj);
3278 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
3279 isolate()->counters()->map_to_slow_elements()->Increment();
3280 return new_map;
3281 } 3225 }
3282 3226
3283 3227
3284 DescriptorArray* Map::instance_descriptors() { 3228 DescriptorArray* Map::instance_descriptors() {
3285 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset); 3229 Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
3286 if (object->IsSmi()) { 3230 if (object->IsSmi()) {
3287 return HEAP->empty_descriptor_array(); 3231 return HEAP->empty_descriptor_array();
3288 } else { 3232 } else {
3289 return DescriptorArray::cast(object); 3233 return DescriptorArray::cast(object);
3290 } 3234 }
(...skipping 14 matching lines...) Expand all
3305 kInstanceDescriptorsOrBitField3Offset, 3249 kInstanceDescriptorsOrBitField3Offset,
3306 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage())); 3250 Smi::FromInt(DescriptorArray::cast(object)->bit_field3_storage()));
3307 } 3251 }
3308 } 3252 }
3309 3253
3310 3254
3311 void Map::set_instance_descriptors(DescriptorArray* value, 3255 void Map::set_instance_descriptors(DescriptorArray* value,
3312 WriteBarrierMode mode) { 3256 WriteBarrierMode mode) {
3313 Object* object = READ_FIELD(this, 3257 Object* object = READ_FIELD(this,
3314 kInstanceDescriptorsOrBitField3Offset); 3258 kInstanceDescriptorsOrBitField3Offset);
3315 if (value == isolate()->heap()->empty_descriptor_array()) { 3259 Heap* heap = GetHeap();
3260 if (value == heap->empty_descriptor_array()) {
3316 clear_instance_descriptors(); 3261 clear_instance_descriptors();
3317 return; 3262 return;
3318 } else { 3263 } else {
3319 if (object->IsSmi()) { 3264 if (object->IsSmi()) {
3320 value->set_bit_field3_storage(Smi::cast(object)->value()); 3265 value->set_bit_field3_storage(Smi::cast(object)->value());
3321 } else { 3266 } else {
3322 value->set_bit_field3_storage( 3267 value->set_bit_field3_storage(
3323 DescriptorArray::cast(object)->bit_field3_storage()); 3268 DescriptorArray::cast(object)->bit_field3_storage());
3324 } 3269 }
3325 } 3270 }
3326 ASSERT(!is_shared()); 3271 ASSERT(!is_shared());
3327 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value); 3272 WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
3328 CONDITIONAL_WRITE_BARRIER(GetHeap(), 3273 CONDITIONAL_WRITE_BARRIER(
3329 this, 3274 heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
3330 kInstanceDescriptorsOrBitField3Offset,
3331 mode);
3332 } 3275 }
3333 3276
3334 3277
3335 int Map::bit_field3() { 3278 int Map::bit_field3() {
3336 Object* object = READ_FIELD(this, 3279 Object* object = READ_FIELD(this,
3337 kInstanceDescriptorsOrBitField3Offset); 3280 kInstanceDescriptorsOrBitField3Offset);
3338 if (object->IsSmi()) { 3281 if (object->IsSmi()) {
3339 return Smi::cast(object)->value(); 3282 return Smi::cast(object)->value();
3340 } else { 3283 } else {
3341 return DescriptorArray::cast(object)->bit_field3_storage(); 3284 return DescriptorArray::cast(object)->bit_field3_storage();
3342 } 3285 }
3343 } 3286 }
3344 3287
3345 3288
3346 void Map::set_bit_field3(int value) { 3289 void Map::set_bit_field3(int value) {
3347 ASSERT(Smi::IsValid(value)); 3290 ASSERT(Smi::IsValid(value));
3348 Object* object = READ_FIELD(this, 3291 Object* object = READ_FIELD(this,
3349 kInstanceDescriptorsOrBitField3Offset); 3292 kInstanceDescriptorsOrBitField3Offset);
3350 if (object->IsSmi()) { 3293 if (object->IsSmi()) {
3351 WRITE_FIELD(this, 3294 WRITE_FIELD(this,
3352 kInstanceDescriptorsOrBitField3Offset, 3295 kInstanceDescriptorsOrBitField3Offset,
3353 Smi::FromInt(value)); 3296 Smi::FromInt(value));
3354 } else { 3297 } else {
3355 DescriptorArray::cast(object)->set_bit_field3_storage(value); 3298 DescriptorArray::cast(object)->set_bit_field3_storage(value);
3356 } 3299 }
3357 } 3300 }
3358 3301
3359 3302
3303 FixedArray* Map::unchecked_prototype_transitions() {
3304 return reinterpret_cast<FixedArray*>(
3305 READ_FIELD(this, kPrototypeTransitionsOffset));
3306 }
3307
3308
3360 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset) 3309 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
3361 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset) 3310 ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
3362 ACCESSORS(Map, constructor, Object, kConstructorOffset) 3311 ACCESSORS(Map, constructor, Object, kConstructorOffset)
3363 3312
3364 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset) 3313 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
3365 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset) 3314 ACCESSORS(JSFunction, literals, FixedArray, kLiteralsOffset)
3366 ACCESSORS_GCSAFE(JSFunction, next_function_link, Object, 3315 ACCESSORS(JSFunction,
3367 kNextFunctionLinkOffset) 3316 next_function_link,
3317 Object,
3318 kNextFunctionLinkOffset)
3368 3319
3369 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset) 3320 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
3370 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset) 3321 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
3371 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset) 3322 ACCESSORS(GlobalObject, global_receiver, JSObject, kGlobalReceiverOffset)
3372 3323
3373 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset) 3324 ACCESSORS(JSGlobalProxy, context, Object, kContextOffset)
3374 3325
3375 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset) 3326 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
3376 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset) 3327 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
3377 ACCESSORS(AccessorInfo, data, Object, kDataOffset) 3328 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
3446 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex) 3397 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
3447 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex) 3398 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
3448 3399
3449 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex) 3400 ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
3450 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex) 3401 ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
3451 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex) 3402 ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
3452 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex) 3403 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
3453 #endif 3404 #endif
3454 3405
3455 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset) 3406 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
3456 ACCESSORS_GCSAFE(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset) 3407 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
3457 ACCESSORS_GCSAFE(SharedFunctionInfo, initial_map, Object, kInitialMapOffset) 3408 ACCESSORS(SharedFunctionInfo, initial_map, Object, kInitialMapOffset)
3458 ACCESSORS(SharedFunctionInfo, instance_class_name, Object, 3409 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
3459 kInstanceClassNameOffset) 3410 kInstanceClassNameOffset)
3460 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset) 3411 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
3461 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset) 3412 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
3462 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset) 3413 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
3463 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset) 3414 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
3464 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object, 3415 ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
3465 kThisPropertyAssignmentsOffset) 3416 kThisPropertyAssignmentsOffset)
3466 3417
3467 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype, 3418 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
3653 } 3604 }
3654 3605
3655 3606
3656 Code* SharedFunctionInfo::unchecked_code() { 3607 Code* SharedFunctionInfo::unchecked_code() {
3657 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset)); 3608 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset));
3658 } 3609 }
3659 3610
3660 3611
3661 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) { 3612 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
3662 WRITE_FIELD(this, kCodeOffset, value); 3613 WRITE_FIELD(this, kCodeOffset, value);
3663 ASSERT(!Isolate::Current()->heap()->InNewSpace(value)); 3614 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
3664 } 3615 }
3665 3616
3666 3617
3667 SerializedScopeInfo* SharedFunctionInfo::scope_info() { 3618 SerializedScopeInfo* SharedFunctionInfo::scope_info() {
3668 return reinterpret_cast<SerializedScopeInfo*>( 3619 return reinterpret_cast<SerializedScopeInfo*>(
3669 READ_FIELD(this, kScopeInfoOffset)); 3620 READ_FIELD(this, kScopeInfoOffset));
3670 } 3621 }
3671 3622
3672 3623
3673 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value, 3624 void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
3674 WriteBarrierMode mode) { 3625 WriteBarrierMode mode) {
3675 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value)); 3626 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
3676 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kScopeInfoOffset, mode); 3627 CONDITIONAL_WRITE_BARRIER(GetHeap(),
3628 this,
3629 kScopeInfoOffset,
3630 reinterpret_cast<Object*>(value),
3631 mode);
3677 } 3632 }
3678 3633
3679 3634
3680 Smi* SharedFunctionInfo::deopt_counter() { 3635 Smi* SharedFunctionInfo::deopt_counter() {
3681 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset)); 3636 return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
3682 } 3637 }
3683 3638
3684 3639
3685 void SharedFunctionInfo::set_deopt_counter(Smi* value) { 3640 void SharedFunctionInfo::set_deopt_counter(Smi* value) {
3686 WRITE_FIELD(this, kDeoptCounterOffset, value); 3641 WRITE_FIELD(this, kDeoptCounterOffset, value);
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
3763 } 3718 }
3764 3719
3765 3720
3766 Code* JSFunction::unchecked_code() { 3721 Code* JSFunction::unchecked_code() {
3767 return reinterpret_cast<Code*>( 3722 return reinterpret_cast<Code*>(
3768 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); 3723 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
3769 } 3724 }
3770 3725
3771 3726
3772 void JSFunction::set_code(Code* value) { 3727 void JSFunction::set_code(Code* value) {
3773 // Skip the write barrier because code is never in new space.
3774 ASSERT(!HEAP->InNewSpace(value)); 3728 ASSERT(!HEAP->InNewSpace(value));
3775 Address entry = value->entry(); 3729 Address entry = value->entry();
3776 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); 3730 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
3731 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
3732 this,
3733 HeapObject::RawField(this, kCodeEntryOffset),
3734 value);
3777 } 3735 }
3778 3736
3779 3737
3780 void JSFunction::ReplaceCode(Code* code) { 3738 void JSFunction::ReplaceCode(Code* code) {
3781 bool was_optimized = IsOptimized(); 3739 bool was_optimized = IsOptimized();
3782 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION; 3740 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
3783 3741
3784 set_code(code); 3742 set_code(code);
3785 3743
3786 // Add/remove the function from the list of optimized functions for this 3744 // Add/remove the function from the list of optimized functions for this
(...skipping 19 matching lines...) Expand all
3806 3764
3807 SharedFunctionInfo* JSFunction::unchecked_shared() { 3765 SharedFunctionInfo* JSFunction::unchecked_shared() {
3808 return reinterpret_cast<SharedFunctionInfo*>( 3766 return reinterpret_cast<SharedFunctionInfo*>(
3809 READ_FIELD(this, kSharedFunctionInfoOffset)); 3767 READ_FIELD(this, kSharedFunctionInfoOffset));
3810 } 3768 }
3811 3769
3812 3770
3813 void JSFunction::set_context(Object* value) { 3771 void JSFunction::set_context(Object* value) {
3814 ASSERT(value->IsUndefined() || value->IsContext()); 3772 ASSERT(value->IsUndefined() || value->IsContext());
3815 WRITE_FIELD(this, kContextOffset, value); 3773 WRITE_FIELD(this, kContextOffset, value);
3816 WRITE_BARRIER(this, kContextOffset); 3774 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
3817 } 3775 }
3818 3776
3819 ACCESSORS(JSFunction, prototype_or_initial_map, Object, 3777 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
3820 kPrototypeOrInitialMapOffset) 3778 kPrototypeOrInitialMapOffset)
3821 3779
3822 3780
3823 Map* JSFunction::initial_map() { 3781 Map* JSFunction::initial_map() {
3824 return Map::cast(prototype_or_initial_map()); 3782 return Map::cast(prototype_or_initial_map());
3825 } 3783 }
3826 3784
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
3880 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) { 3838 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
3881 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3839 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3882 return READ_FIELD(this, OffsetOfFunctionWithId(id)); 3840 return READ_FIELD(this, OffsetOfFunctionWithId(id));
3883 } 3841 }
3884 3842
3885 3843
3886 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id, 3844 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
3887 Object* value) { 3845 Object* value) {
3888 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3846 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3889 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value); 3847 WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
3890 WRITE_BARRIER(this, OffsetOfFunctionWithId(id)); 3848 WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
3891 } 3849 }
3892 3850
3893 3851
3894 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) { 3852 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
3895 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3853 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3896 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id))); 3854 return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
3897 } 3855 }
3898 3856
3899 3857
3900 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id, 3858 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
3901 Code* value) { 3859 Code* value) {
3902 ASSERT(id < kJSBuiltinsCount); // id is unsigned. 3860 ASSERT(id < kJSBuiltinsCount); // id is unsigned.
3903 WRITE_FIELD(this, OffsetOfCodeWithId(id), value); 3861 WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
3904 ASSERT(!HEAP->InNewSpace(value)); 3862 ASSERT(!HEAP->InNewSpace(value));
3905 } 3863 }
3906 3864
3907 3865
3908 ACCESSORS(JSProxy, handler, Object, kHandlerOffset) 3866 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
3867 ACCESSORS(JSProxy, hash, Object, kHashOffset)
3909 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset) 3868 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
3910 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset) 3869 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
3911 3870
3912 3871
3913 void JSProxy::InitializeBody(int object_size, Object* value) { 3872 void JSProxy::InitializeBody(int object_size, Object* value) {
3914 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value)); 3873 ASSERT(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
3915 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) { 3874 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
3916 WRITE_FIELD(this, offset, value); 3875 WRITE_FIELD(this, offset, value);
3917 } 3876 }
3918 } 3877 }
3919 3878
3920 3879
3921 ACCESSORS(JSWeakMap, table, ObjectHashTable, kTableOffset) 3880 ACCESSORS(JSWeakMap, table, Object, kTableOffset)
3922 ACCESSORS_GCSAFE(JSWeakMap, next, Object, kNextOffset) 3881 ACCESSORS(JSWeakMap, next, Object, kNextOffset)
3923 3882
3924 3883
3925 ObjectHashTable* JSWeakMap::unchecked_table() { 3884 ObjectHashTable* JSWeakMap::unchecked_table() {
3926 return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset)); 3885 return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
3927 } 3886 }
3928 3887
3929 3888
3930 Address Foreign::address() { 3889 Address Foreign::address() {
3931 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kAddressOffset)); 3890 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kAddressOffset));
3932 } 3891 }
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
4004 int Code::relocation_size() { 3963 int Code::relocation_size() {
4005 return unchecked_relocation_info()->length(); 3964 return unchecked_relocation_info()->length();
4006 } 3965 }
4007 3966
4008 3967
4009 byte* Code::entry() { 3968 byte* Code::entry() {
4010 return instruction_start(); 3969 return instruction_start();
4011 } 3970 }
4012 3971
4013 3972
4014 bool Code::contains(byte* pc) { 3973 bool Code::contains(byte* inner_pointer) {
4015 return (instruction_start() <= pc) && 3974 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
4016 (pc <= instruction_start() + instruction_size());
4017 } 3975 }
4018 3976
4019 3977
4020 ACCESSORS(JSArray, length, Object, kLengthOffset) 3978 ACCESSORS(JSArray, length, Object, kLengthOffset)
4021 3979
4022 3980
4023 ACCESSORS(JSRegExp, data, Object, kDataOffset) 3981 ACCESSORS(JSRegExp, data, Object, kDataOffset)
4024 3982
4025 3983
4026 JSRegExp::Type JSRegExp::TypeTag() { 3984 JSRegExp::Type JSRegExp::TypeTag() {
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
4085 FixedArray::cast(data())->set(index, value); 4043 FixedArray::cast(data())->set(index, value);
4086 } 4044 }
4087 4045
4088 4046
4089 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) { 4047 void JSRegExp::SetDataAtUnchecked(int index, Object* value, Heap* heap) {
4090 ASSERT(index >= kDataIndex); // Only implementation data can be set this way. 4048 ASSERT(index >= kDataIndex); // Only implementation data can be set this way.
4091 FixedArray* fa = reinterpret_cast<FixedArray*>(data()); 4049 FixedArray* fa = reinterpret_cast<FixedArray*>(data());
4092 if (value->IsSmi()) { 4050 if (value->IsSmi()) {
4093 fa->set_unchecked(index, Smi::cast(value)); 4051 fa->set_unchecked(index, Smi::cast(value));
4094 } else { 4052 } else {
4053 // We only do this during GC, so we don't need to notify the write barrier.
4095 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER); 4054 fa->set_unchecked(heap, index, value, SKIP_WRITE_BARRIER);
4096 } 4055 }
4097 } 4056 }
4098 4057
4099 4058
4100 ElementsKind JSObject::GetElementsKind() { 4059 ElementsKind JSObject::GetElementsKind() {
4101 ElementsKind kind = map()->elements_kind(); 4060 ElementsKind kind = map()->elements_kind();
4102 ASSERT((kind == FAST_ELEMENTS && 4061 #if DEBUG
4103 (elements()->map() == GetHeap()->fixed_array_map() || 4062 FixedArrayBase* fixed_array =
4104 elements()->map() == GetHeap()->fixed_cow_array_map())) || 4063 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
4064 Map* map = fixed_array->map();
4065 ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
4066 (map == GetHeap()->fixed_array_map() ||
4067 map == GetHeap()->fixed_cow_array_map())) ||
4105 (kind == FAST_DOUBLE_ELEMENTS && 4068 (kind == FAST_DOUBLE_ELEMENTS &&
4106 elements()->IsFixedDoubleArray()) || 4069 fixed_array->IsFixedDoubleArray()) ||
4107 (kind == DICTIONARY_ELEMENTS && 4070 (kind == DICTIONARY_ELEMENTS &&
4108 elements()->IsFixedArray() && 4071 fixed_array->IsFixedArray() &&
4109 elements()->IsDictionary()) || 4072 fixed_array->IsDictionary()) ||
4110 (kind > DICTIONARY_ELEMENTS)); 4073 (kind > DICTIONARY_ELEMENTS));
4074 #endif
4111 return kind; 4075 return kind;
4112 } 4076 }
4113 4077
4114 4078
4115 ElementsAccessor* JSObject::GetElementsAccessor() { 4079 ElementsAccessor* JSObject::GetElementsAccessor() {
4116 return ElementsAccessor::ForKind(GetElementsKind()); 4080 return ElementsAccessor::ForKind(GetElementsKind());
4117 } 4081 }
4118 4082
4119 4083
4120 bool JSObject::HasFastElements() { 4084 bool JSObject::HasFastElements() {
4121 return GetElementsKind() == FAST_ELEMENTS; 4085 return GetElementsKind() == FAST_ELEMENTS;
4122 } 4086 }
4123 4087
4124 4088
4089 bool JSObject::HasFastSmiOnlyElements() {
4090 return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
4091 }
4092
4093
4094 bool JSObject::HasFastTypeElements() {
4095 ElementsKind elements_kind = GetElementsKind();
4096 return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
4097 elements_kind == FAST_ELEMENTS;
4098 }
4099
4100
4125 bool JSObject::HasFastDoubleElements() { 4101 bool JSObject::HasFastDoubleElements() {
4126 return GetElementsKind() == FAST_DOUBLE_ELEMENTS; 4102 return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
4127 } 4103 }
4128 4104
4129 4105
4130 bool JSObject::HasDictionaryElements() { 4106 bool JSObject::HasDictionaryElements() {
4131 return GetElementsKind() == DICTIONARY_ELEMENTS; 4107 return GetElementsKind() == DICTIONARY_ELEMENTS;
4132 } 4108 }
4133 4109
4134 4110
4111 bool JSObject::HasNonStrictArgumentsElements() {
4112 return GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS;
4113 }
4114
4115
4135 bool JSObject::HasExternalArrayElements() { 4116 bool JSObject::HasExternalArrayElements() {
4136 HeapObject* array = elements(); 4117 HeapObject* array = elements();
4137 ASSERT(array != NULL); 4118 ASSERT(array != NULL);
4138 return array->IsExternalArray(); 4119 return array->IsExternalArray();
4139 } 4120 }
4140 4121
4141 4122
4142 #define EXTERNAL_ELEMENTS_CHECK(name, type) \ 4123 #define EXTERNAL_ELEMENTS_CHECK(name, type) \
4143 bool JSObject::HasExternal##name##Elements() { \ 4124 bool JSObject::HasExternal##name##Elements() { \
4144 HeapObject* array = elements(); \ 4125 HeapObject* array = elements(); \
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
4176 4157
4177 bool JSObject::AllowsSetElementsLength() { 4158 bool JSObject::AllowsSetElementsLength() {
4178 bool result = elements()->IsFixedArray() || 4159 bool result = elements()->IsFixedArray() ||
4179 elements()->IsFixedDoubleArray(); 4160 elements()->IsFixedDoubleArray();
4180 ASSERT(result == !HasExternalArrayElements()); 4161 ASSERT(result == !HasExternalArrayElements());
4181 return result; 4162 return result;
4182 } 4163 }
4183 4164
4184 4165
4185 MaybeObject* JSObject::EnsureWritableFastElements() { 4166 MaybeObject* JSObject::EnsureWritableFastElements() {
4186 ASSERT(HasFastElements()); 4167 ASSERT(HasFastTypeElements());
4187 FixedArray* elems = FixedArray::cast(elements()); 4168 FixedArray* elems = FixedArray::cast(elements());
4188 Isolate* isolate = GetIsolate(); 4169 Isolate* isolate = GetIsolate();
4189 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems; 4170 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
4190 Object* writable_elems; 4171 Object* writable_elems;
4191 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap( 4172 { MaybeObject* maybe_writable_elems = isolate->heap()->CopyFixedArrayWithMap(
4192 elems, isolate->heap()->fixed_array_map()); 4173 elems, isolate->heap()->fixed_array_map());
4193 if (!maybe_writable_elems->ToObject(&writable_elems)) { 4174 if (!maybe_writable_elems->ToObject(&writable_elems)) {
4194 return maybe_writable_elems; 4175 return maybe_writable_elems;
4195 } 4176 }
4196 } 4177 }
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
4352 if (IsJSGlobalProxy()) { 4333 if (IsJSGlobalProxy()) {
4353 Object* proto = GetPrototype(); 4334 Object* proto = GetPrototype();
4354 if (proto->IsNull()) return GetHeap()->undefined_value(); 4335 if (proto->IsNull()) return GetHeap()->undefined_value();
4355 ASSERT(proto->IsJSGlobalObject()); 4336 ASSERT(proto->IsJSGlobalObject());
4356 return proto; 4337 return proto;
4357 } 4338 }
4358 return this; 4339 return this;
4359 } 4340 }
4360 4341
4361 4342
4362 bool JSObject::HasHiddenPropertiesObject() { 4343 MaybeObject* JSReceiver::GetIdentityHash(CreationFlag flag) {
4363 ASSERT(!IsJSGlobalProxy()); 4344 return IsJSProxy()
4364 return GetPropertyAttributePostInterceptor(this, 4345 ? JSProxy::cast(this)->GetIdentityHash(flag)
4365 GetHeap()->hidden_symbol(), 4346 : JSObject::cast(this)->GetIdentityHash(flag);
4366 false) != ABSENT;
4367 } 4347 }
4368 4348
4369 4349
4370 Object* JSObject::GetHiddenPropertiesObject() { 4350 bool JSReceiver::HasElement(uint32_t index) {
4371 ASSERT(!IsJSGlobalProxy()); 4351 if (IsJSProxy()) {
4372 PropertyAttributes attributes; 4352 return JSProxy::cast(this)->HasElementWithHandler(index);
4373 // You can't install a getter on a property indexed by the hidden symbol, 4353 }
4374 // so we can be sure that GetLocalPropertyPostInterceptor returns a real 4354 return JSObject::cast(this)->HasElementWithReceiver(this, index);
4375 // object.
4376 Object* result =
4377 GetLocalPropertyPostInterceptor(this,
4378 GetHeap()->hidden_symbol(),
4379 &attributes)->ToObjectUnchecked();
4380 return result;
4381 }
4382
4383
4384 MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
4385 ASSERT(!IsJSGlobalProxy());
4386 return SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
4387 hidden_obj,
4388 DONT_ENUM,
4389 kNonStrictMode);
4390 }
4391
4392
4393 bool JSObject::HasHiddenProperties() {
4394 return !GetHiddenProperties(OMIT_CREATION)->ToObjectChecked()->IsUndefined();
4395 }
4396
4397
4398 bool JSObject::HasElement(uint32_t index) {
4399 return HasElementWithReceiver(this, index);
4400 } 4355 }
4401 4356
4402 4357
4403 bool AccessorInfo::all_can_read() { 4358 bool AccessorInfo::all_can_read() {
4404 return BooleanBit::get(flag(), kAllCanReadBit); 4359 return BooleanBit::get(flag(), kAllCanReadBit);
4405 } 4360 }
4406 4361
4407 4362
4408 void AccessorInfo::set_all_can_read(bool value) { 4363 void AccessorInfo::set_all_can_read(bool value) {
4409 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value)); 4364 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
4501 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) { 4456 uint32_t StringDictionaryShape::HashForObject(String* key, Object* other) {
4502 return String::cast(other)->Hash(); 4457 return String::cast(other)->Hash();
4503 } 4458 }
4504 4459
4505 4460
4506 MaybeObject* StringDictionaryShape::AsObject(String* key) { 4461 MaybeObject* StringDictionaryShape::AsObject(String* key) {
4507 return key; 4462 return key;
4508 } 4463 }
4509 4464
4510 4465
4511 bool ObjectHashTableShape::IsMatch(JSObject* key, Object* other) { 4466 bool ObjectHashTableShape::IsMatch(JSReceiver* key, Object* other) {
4512 return key == JSObject::cast(other); 4467 return key == JSReceiver::cast(other);
4513 } 4468 }
4514 4469
4515 4470
4516 uint32_t ObjectHashTableShape::Hash(JSObject* key) { 4471 uint32_t ObjectHashTableShape::Hash(JSReceiver* key) {
4517 MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::OMIT_CREATION); 4472 MaybeObject* maybe_hash = key->GetIdentityHash(OMIT_CREATION);
4518 ASSERT(!maybe_hash->IsFailure()); 4473 ASSERT(!maybe_hash->IsFailure());
4519 return Smi::cast(maybe_hash->ToObjectUnchecked())->value(); 4474 return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
4520 } 4475 }
4521 4476
4522 4477
4523 uint32_t ObjectHashTableShape::HashForObject(JSObject* key, Object* other) { 4478 uint32_t ObjectHashTableShape::HashForObject(JSReceiver* key, Object* other) {
4524 MaybeObject* maybe_hash = JSObject::cast(other)->GetIdentityHash( 4479 MaybeObject* maybe_hash =
4525 JSObject::OMIT_CREATION); 4480 JSReceiver::cast(other)->GetIdentityHash(OMIT_CREATION);
4526 ASSERT(!maybe_hash->IsFailure()); 4481 ASSERT(!maybe_hash->IsFailure());
4527 return Smi::cast(maybe_hash->ToObjectUnchecked())->value(); 4482 return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
4528 } 4483 }
4529 4484
4530 4485
4531 MaybeObject* ObjectHashTableShape::AsObject(JSObject* key) { 4486 MaybeObject* ObjectHashTableShape::AsObject(JSReceiver* key) {
4532 return key; 4487 return key;
4533 } 4488 }
4534 4489
4535 4490
4536 void ObjectHashTable::RemoveEntry(int entry) { 4491 void ObjectHashTable::RemoveEntry(int entry) {
4537 RemoveEntry(entry, GetHeap()); 4492 RemoveEntry(entry, GetHeap());
4538 } 4493 }
4539 4494
4540 4495
4541 void Map::ClearCodeCache(Heap* heap) { 4496 void Map::ClearCodeCache(Heap* heap) {
4542 // No write barrier is needed since empty_fixed_array is not in new space. 4497 // No write barrier is needed since empty_fixed_array is not in new space.
4543 // Please note this function is used during marking: 4498 // Please note this function is used during marking:
4544 // - MarkCompactCollector::MarkUnmarkedObject 4499 // - MarkCompactCollector::MarkUnmarkedObject
4545 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array())); 4500 ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
4546 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array()); 4501 WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
4547 } 4502 }
4548 4503
4549 4504
4550 void JSArray::EnsureSize(int required_size) { 4505 void JSArray::EnsureSize(int required_size) {
4551 ASSERT(HasFastElements()); 4506 ASSERT(HasFastTypeElements());
4552 FixedArray* elts = FixedArray::cast(elements()); 4507 FixedArray* elts = FixedArray::cast(elements());
4553 const int kArraySizeThatFitsComfortablyInNewSpace = 128; 4508 const int kArraySizeThatFitsComfortablyInNewSpace = 128;
4554 if (elts->length() < required_size) { 4509 if (elts->length() < required_size) {
4555 // Doubling in size would be overkill, but leave some slack to avoid 4510 // Doubling in size would be overkill, but leave some slack to avoid
4556 // constantly growing. 4511 // constantly growing.
4557 Expand(required_size + (required_size >> 3)); 4512 Expand(required_size + (required_size >> 3));
4558 // It's a performance benefit to keep a frequently used array in new-space. 4513 // It's a performance benefit to keep a frequently used array in new-space.
4559 } else if (!GetHeap()->new_space()->Contains(elts) && 4514 } else if (!GetHeap()->new_space()->Contains(elts) &&
4560 required_size < kArraySizeThatFitsComfortablyInNewSpace) { 4515 required_size < kArraySizeThatFitsComfortablyInNewSpace) {
4561 // Expand will allocate a new backing store in new space even if the size 4516 // Expand will allocate a new backing store in new space even if the size
4562 // we asked for isn't larger than what we had before. 4517 // we asked for isn't larger than what we had before.
4563 Expand(required_size); 4518 Expand(required_size);
4564 } 4519 }
4565 } 4520 }
4566 4521
4567 4522
4568 void JSArray::set_length(Smi* length) { 4523 void JSArray::set_length(Smi* length) {
4524 // Don't need a write barrier for a Smi.
4569 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER); 4525 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
4570 } 4526 }
4571 4527
4572 4528
4573 void JSArray::SetContent(FixedArray* storage) { 4529 MaybeObject* JSArray::SetContent(FixedArray* storage) {
4530 MaybeObject* maybe_object = EnsureCanContainElements(storage);
4531 if (maybe_object->IsFailure()) return maybe_object;
4574 set_length(Smi::FromInt(storage->length())); 4532 set_length(Smi::FromInt(storage->length()));
4575 set_elements(storage); 4533 set_elements(storage);
4534 return this;
4576 } 4535 }
4577 4536
4578 4537
4579 MaybeObject* FixedArray::Copy() { 4538 MaybeObject* FixedArray::Copy() {
4580 if (length() == 0) return this; 4539 if (length() == 0) return this;
4581 return GetHeap()->CopyFixedArray(this); 4540 return GetHeap()->CopyFixedArray(this);
4582 } 4541 }
4583 4542
4584 4543
4585 Relocatable::Relocatable(Isolate* isolate) { 4544 Relocatable::Relocatable(Isolate* isolate) {
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
4682 #undef WRITE_INT_FIELD 4641 #undef WRITE_INT_FIELD
4683 #undef READ_SHORT_FIELD 4642 #undef READ_SHORT_FIELD
4684 #undef WRITE_SHORT_FIELD 4643 #undef WRITE_SHORT_FIELD
4685 #undef READ_BYTE_FIELD 4644 #undef READ_BYTE_FIELD
4686 #undef WRITE_BYTE_FIELD 4645 #undef WRITE_BYTE_FIELD
4687 4646
4688 4647
4689 } } // namespace v8::internal 4648 } } // namespace v8::internal
4690 4649
4691 #endif // V8_OBJECTS_INL_H_ 4650 #endif // V8_OBJECTS_INL_H_
OLDNEW
« no previous file with comments | « src/objects-debug.cc ('k') | src/objects-printer.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698