Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(152)

Side by Side Diff: src/objects-inl.h

Issue 2912773002: Rename "NoBarrier" memory operations to "Relaxed". (Closed)
Patch Set: comment Created 3 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/objects.h ('k') | src/objects/object-macros.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 // 4 //
5 // Review notes: 5 // Review notes:
6 // 6 //
7 // - The use of macros in these inline functions may seem superfluous 7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal 8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep. 9 // code. gcc is not happy when attempting to inline too deep.
10 // 10 //
(...skipping 1313 matching lines...) Expand 10 before | Expand all | Expand 10 after
1324 value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value); 1324 value->GetHeap()->incremental_marking()->RecordWrite(this, nullptr, value);
1325 } 1325 }
1326 } 1326 }
1327 1327
1328 HeapObject** HeapObject::map_slot() { 1328 HeapObject** HeapObject::map_slot() {
1329 return reinterpret_cast<HeapObject**>(FIELD_ADDR(this, kMapOffset)); 1329 return reinterpret_cast<HeapObject**>(FIELD_ADDR(this, kMapOffset));
1330 } 1330 }
1331 1331
1332 MapWord HeapObject::map_word() const { 1332 MapWord HeapObject::map_word() const {
1333 return MapWord( 1333 return MapWord(
1334 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset))); 1334 reinterpret_cast<uintptr_t>(RELAXED_READ_FIELD(this, kMapOffset)));
1335 } 1335 }
1336 1336
1337 1337
1338 void HeapObject::set_map_word(MapWord map_word) { 1338 void HeapObject::set_map_word(MapWord map_word) {
1339 NOBARRIER_WRITE_FIELD( 1339 RELAXED_WRITE_FIELD(this, kMapOffset,
1340 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_)); 1340 reinterpret_cast<Object*>(map_word.value_));
1341 } 1341 }
1342 1342
1343 1343
1344 MapWord HeapObject::synchronized_map_word() const { 1344 MapWord HeapObject::synchronized_map_word() const {
1345 return MapWord( 1345 return MapWord(
1346 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset))); 1346 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1347 } 1347 }
1348 1348
1349 1349
1350 void HeapObject::synchronized_set_map_word(MapWord map_word) { 1350 void HeapObject::synchronized_set_map_word(MapWord map_word) {
(...skipping 829 matching lines...) Expand 10 before | Expand all | Expand 10 after
2180 IsUndefined(isolate) || IsTrue(isolate) || IsFalse(isolate) || 2180 IsUndefined(isolate) || IsTrue(isolate) || IsFalse(isolate) ||
2181 IsNull(isolate))) { 2181 IsNull(isolate))) {
2182 FATAL("API call returned invalid object"); 2182 FATAL("API call returned invalid object");
2183 } 2183 }
2184 #endif // DEBUG 2184 #endif // DEBUG
2185 } 2185 }
2186 2186
2187 2187
2188 Object* FixedArray::get(int index) const { 2188 Object* FixedArray::get(int index) const {
2189 SLOW_DCHECK(index >= 0 && index < this->length()); 2189 SLOW_DCHECK(index >= 0 && index < this->length());
2190 return NOBARRIER_READ_FIELD(this, kHeaderSize + index * kPointerSize); 2190 return RELAXED_READ_FIELD(this, kHeaderSize + index * kPointerSize);
2191 } 2191 }
2192 2192
2193 Handle<Object> FixedArray::get(FixedArray* array, int index, Isolate* isolate) { 2193 Handle<Object> FixedArray::get(FixedArray* array, int index, Isolate* isolate) {
2194 return handle(array->get(index), isolate); 2194 return handle(array->get(index), isolate);
2195 } 2195 }
2196 2196
2197 template <class T> 2197 template <class T>
2198 MaybeHandle<T> FixedArray::GetValue(Isolate* isolate, int index) const { 2198 MaybeHandle<T> FixedArray::GetValue(Isolate* isolate, int index) const {
2199 Object* obj = get(index); 2199 Object* obj = get(index);
2200 if (obj->IsUndefined(isolate)) return MaybeHandle<T>(); 2200 if (obj->IsUndefined(isolate)) return MaybeHandle<T>();
2201 return Handle<T>(T::cast(obj), isolate); 2201 return Handle<T>(T::cast(obj), isolate);
2202 } 2202 }
2203 2203
2204 template <class T> 2204 template <class T>
2205 Handle<T> FixedArray::GetValueChecked(Isolate* isolate, int index) const { 2205 Handle<T> FixedArray::GetValueChecked(Isolate* isolate, int index) const {
2206 Object* obj = get(index); 2206 Object* obj = get(index);
2207 CHECK(!obj->IsUndefined(isolate)); 2207 CHECK(!obj->IsUndefined(isolate));
2208 return Handle<T>(T::cast(obj), isolate); 2208 return Handle<T>(T::cast(obj), isolate);
2209 } 2209 }
2210 bool FixedArray::is_the_hole(Isolate* isolate, int index) { 2210 bool FixedArray::is_the_hole(Isolate* isolate, int index) {
2211 return get(index)->IsTheHole(isolate); 2211 return get(index)->IsTheHole(isolate);
2212 } 2212 }
2213 2213
2214 void FixedArray::set(int index, Smi* value) { 2214 void FixedArray::set(int index, Smi* value) {
2215 DCHECK(map() != GetHeap()->fixed_cow_array_map()); 2215 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2216 DCHECK(index >= 0 && index < this->length()); 2216 DCHECK(index >= 0 && index < this->length());
2217 DCHECK(reinterpret_cast<Object*>(value)->IsSmi()); 2217 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2218 int offset = kHeaderSize + index * kPointerSize; 2218 int offset = kHeaderSize + index * kPointerSize;
2219 NOBARRIER_WRITE_FIELD(this, offset, value); 2219 RELAXED_WRITE_FIELD(this, offset, value);
2220 } 2220 }
2221 2221
2222 2222
2223 void FixedArray::set(int index, Object* value) { 2223 void FixedArray::set(int index, Object* value) {
2224 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map()); 2224 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2225 DCHECK(IsFixedArray()); 2225 DCHECK(IsFixedArray());
2226 DCHECK_GE(index, 0); 2226 DCHECK_GE(index, 0);
2227 DCHECK_LT(index, this->length()); 2227 DCHECK_LT(index, this->length());
2228 int offset = kHeaderSize + index * kPointerSize; 2228 int offset = kHeaderSize + index * kPointerSize;
2229 NOBARRIER_WRITE_FIELD(this, offset, value); 2229 RELAXED_WRITE_FIELD(this, offset, value);
2230 WRITE_BARRIER(GetHeap(), this, offset, value); 2230 WRITE_BARRIER(GetHeap(), this, offset, value);
2231 } 2231 }
2232 2232
2233 2233
2234 double FixedDoubleArray::get_scalar(int index) { 2234 double FixedDoubleArray::get_scalar(int index) {
2235 DCHECK(map() != GetHeap()->fixed_cow_array_map() && 2235 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2236 map() != GetHeap()->fixed_array_map()); 2236 map() != GetHeap()->fixed_array_map());
2237 DCHECK(index >= 0 && index < this->length()); 2237 DCHECK(index >= 0 && index < this->length());
2238 DCHECK(!is_the_hole(index)); 2238 DCHECK(!is_the_hole(index));
2239 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize); 2239 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
2444 } 2444 }
2445 2445
2446 2446
2447 void FixedArray::set(int index, 2447 void FixedArray::set(int index,
2448 Object* value, 2448 Object* value,
2449 WriteBarrierMode mode) { 2449 WriteBarrierMode mode) {
2450 DCHECK_NE(map(), GetHeap()->fixed_cow_array_map()); 2450 DCHECK_NE(map(), GetHeap()->fixed_cow_array_map());
2451 DCHECK_GE(index, 0); 2451 DCHECK_GE(index, 0);
2452 DCHECK_LT(index, this->length()); 2452 DCHECK_LT(index, this->length());
2453 int offset = kHeaderSize + index * kPointerSize; 2453 int offset = kHeaderSize + index * kPointerSize;
2454 NOBARRIER_WRITE_FIELD(this, offset, value); 2454 RELAXED_WRITE_FIELD(this, offset, value);
2455 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); 2455 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2456 } 2456 }
2457 2457
2458 2458
2459 void FixedArray::NoWriteBarrierSet(FixedArray* array, 2459 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2460 int index, 2460 int index,
2461 Object* value) { 2461 Object* value) {
2462 DCHECK_NE(array->map(), array->GetHeap()->fixed_cow_array_map()); 2462 DCHECK_NE(array->map(), array->GetHeap()->fixed_cow_array_map());
2463 DCHECK_GE(index, 0); 2463 DCHECK_GE(index, 0);
2464 DCHECK_LT(index, array->length()); 2464 DCHECK_LT(index, array->length());
2465 DCHECK(!array->GetHeap()->InNewSpace(value)); 2465 DCHECK(!array->GetHeap()->InNewSpace(value));
2466 NOBARRIER_WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); 2466 RELAXED_WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2467 } 2467 }
2468 2468
2469 void FixedArray::set_undefined(int index) { 2469 void FixedArray::set_undefined(int index) {
2470 set_undefined(GetIsolate(), index); 2470 set_undefined(GetIsolate(), index);
2471 } 2471 }
2472 2472
2473 void FixedArray::set_undefined(Isolate* isolate, int index) { 2473 void FixedArray::set_undefined(Isolate* isolate, int index) {
2474 FixedArray::NoWriteBarrierSet(this, index, 2474 FixedArray::NoWriteBarrierSet(this, index,
2475 isolate->heap()->undefined_value()); 2475 isolate->heap()->undefined_value());
2476 } 2476 }
(...skipping 726 matching lines...) Expand 10 before | Expand all | Expand 10 after
3203 HashTable<Derived, Shape, Key>::cast(const Object* obj) { 3203 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3204 SLOW_DCHECK(obj->IsHashTable()); 3204 SLOW_DCHECK(obj->IsHashTable());
3205 return reinterpret_cast<const HashTable*>(obj); 3205 return reinterpret_cast<const HashTable*>(obj);
3206 } 3206 }
3207 3207
3208 3208
3209 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) 3209 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3210 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset) 3210 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3211 3211
3212 SMI_ACCESSORS(FreeSpace, size, kSizeOffset) 3212 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3213 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset) 3213 RELAXED_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3214 3214
3215 SMI_ACCESSORS(String, length, kLengthOffset) 3215 SMI_ACCESSORS(String, length, kLengthOffset)
3216 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset) 3216 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3217 3217
3218 3218
3219 int FreeSpace::Size() { return size(); } 3219 int FreeSpace::Size() { return size(); }
3220 3220
3221 3221
3222 FreeSpace* FreeSpace::next() { 3222 FreeSpace* FreeSpace::next() {
3223 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) || 3223 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3224 (!GetHeap()->deserialization_complete() && map() == NULL)); 3224 (!GetHeap()->deserialization_complete() && map() == NULL));
3225 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size()); 3225 DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
3226 return reinterpret_cast<FreeSpace*>( 3226 return reinterpret_cast<FreeSpace*>(
3227 Memory::Address_at(address() + kNextOffset)); 3227 Memory::Address_at(address() + kNextOffset));
3228 } 3228 }
3229 3229
3230 3230
3231 void FreeSpace::set_next(FreeSpace* next) { 3231 void FreeSpace::set_next(FreeSpace* next) {
3232 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) || 3232 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3233 (!GetHeap()->deserialization_complete() && map() == NULL)); 3233 (!GetHeap()->deserialization_complete() && map() == NULL));
3234 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size()); 3234 DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
3235 base::NoBarrier_Store( 3235 base::Relaxed_Store(
3236 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset), 3236 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3237 reinterpret_cast<base::AtomicWord>(next)); 3237 reinterpret_cast<base::AtomicWord>(next));
3238 } 3238 }
3239 3239
3240 3240
3241 FreeSpace* FreeSpace::cast(HeapObject* o) { 3241 FreeSpace* FreeSpace::cast(HeapObject* o) {
3242 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace()); 3242 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3243 return reinterpret_cast<FreeSpace*>(o); 3243 return reinterpret_cast<FreeSpace*>(o);
3244 } 3244 }
3245 3245
(...skipping 860 matching lines...) Expand 10 before | Expand all | Expand 10 after
4106 } 4106 }
4107 4107
4108 4108
4109 void Map::set_visitor_id(int id) { 4109 void Map::set_visitor_id(int id) {
4110 DCHECK(0 <= id && id < 256); 4110 DCHECK(0 <= id && id < 256);
4111 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id)); 4111 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4112 } 4112 }
4113 4113
4114 4114
4115 int Map::instance_size() { 4115 int Map::instance_size() {
4116 return NOBARRIER_READ_BYTE_FIELD( 4116 return RELAXED_READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
4117 this, kInstanceSizeOffset) << kPointerSizeLog2;
4118 } 4117 }
4119 4118
4120 4119
4121 int Map::inobject_properties_or_constructor_function_index() { 4120 int Map::inobject_properties_or_constructor_function_index() {
4122 return READ_BYTE_FIELD(this, 4121 return READ_BYTE_FIELD(this,
4123 kInObjectPropertiesOrConstructorFunctionIndexOffset); 4122 kInObjectPropertiesOrConstructorFunctionIndexOffset);
4124 } 4123 }
4125 4124
4126 4125
4127 void Map::set_inobject_properties_or_constructor_function_index(int value) { 4126 void Map::set_inobject_properties_or_constructor_function_index(int value) {
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
4187 return SeqOneByteString::SizeFor( 4186 return SeqOneByteString::SizeFor(
4188 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length()); 4187 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4189 } 4188 }
4190 if (instance_type == BYTE_ARRAY_TYPE) { 4189 if (instance_type == BYTE_ARRAY_TYPE) {
4191 return reinterpret_cast<ByteArray*>(this)->ByteArraySize(); 4190 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4192 } 4191 }
4193 if (instance_type == BYTECODE_ARRAY_TYPE) { 4192 if (instance_type == BYTECODE_ARRAY_TYPE) {
4194 return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize(); 4193 return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4195 } 4194 }
4196 if (instance_type == FREE_SPACE_TYPE) { 4195 if (instance_type == FREE_SPACE_TYPE) {
4197 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size(); 4196 return reinterpret_cast<FreeSpace*>(this)->relaxed_read_size();
4198 } 4197 }
4199 if (instance_type == STRING_TYPE || 4198 if (instance_type == STRING_TYPE ||
4200 instance_type == INTERNALIZED_STRING_TYPE) { 4199 instance_type == INTERNALIZED_STRING_TYPE) {
4201 // Strings may get concurrently truncated, hence we have to access its 4200 // Strings may get concurrently truncated, hence we have to access its
4202 // length synchronized. 4201 // length synchronized.
4203 return SeqTwoByteString::SizeFor( 4202 return SeqTwoByteString::SizeFor(
4204 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length()); 4203 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4205 } 4204 }
4206 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) { 4205 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4207 return FixedDoubleArray::SizeFor( 4206 return FixedDoubleArray::SizeFor(
4208 reinterpret_cast<FixedDoubleArray*>(this)->length()); 4207 reinterpret_cast<FixedDoubleArray*>(this)->length());
4209 } 4208 }
4210 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE && 4209 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4211 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) { 4210 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4212 return reinterpret_cast<FixedTypedArrayBase*>( 4211 return reinterpret_cast<FixedTypedArrayBase*>(
4213 this)->TypedArraySize(instance_type); 4212 this)->TypedArraySize(instance_type);
4214 } 4213 }
4215 if (instance_type == SMALL_ORDERED_HASH_SET_TYPE) { 4214 if (instance_type == SMALL_ORDERED_HASH_SET_TYPE) {
4216 return reinterpret_cast<SmallOrderedHashSet*>(this)->Size(); 4215 return reinterpret_cast<SmallOrderedHashSet*>(this)->Size();
4217 } 4216 }
4218 DCHECK(instance_type == CODE_TYPE); 4217 DCHECK(instance_type == CODE_TYPE);
4219 return reinterpret_cast<Code*>(this)->CodeSize(); 4218 return reinterpret_cast<Code*>(this)->CodeSize();
4220 } 4219 }
4221 4220
4222 4221
4223 void Map::set_instance_size(int value) { 4222 void Map::set_instance_size(int value) {
4224 DCHECK_EQ(0, value & (kPointerSize - 1)); 4223 DCHECK_EQ(0, value & (kPointerSize - 1));
4225 value >>= kPointerSizeLog2; 4224 value >>= kPointerSizeLog2;
4226 DCHECK(0 <= value && value < 256); 4225 DCHECK(0 <= value && value < 256);
4227 NOBARRIER_WRITE_BYTE_FIELD( 4226 RELAXED_WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
4228 this, kInstanceSizeOffset, static_cast<byte>(value));
4229 } 4227 }
4230 4228
4231 4229
4232 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); } 4230 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4233 4231
4234 4232
4235 InstanceType Map::instance_type() { 4233 InstanceType Map::instance_type() {
4236 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset)); 4234 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4237 } 4235 }
4238 4236
(...skipping 2180 matching lines...) Expand 10 before | Expand all | Expand 10 after
6419 } 6417 }
6420 6418
6421 6419
6422 void Foreign::set_foreign_address(Address value) { 6420 void Foreign::set_foreign_address(Address value) {
6423 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value)); 6421 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6424 } 6422 }
6425 6423
6426 template <class Derived> 6424 template <class Derived>
6427 void SmallOrderedHashTable<Derived>::SetDataEntry(int entry, Object* value) { 6425 void SmallOrderedHashTable<Derived>::SetDataEntry(int entry, Object* value) {
6428 int offset = GetDataEntryOffset(entry); 6426 int offset = GetDataEntryOffset(entry);
6429 NOBARRIER_WRITE_FIELD(this, offset, value); 6427 RELAXED_WRITE_FIELD(this, offset, value);
6430 WRITE_BARRIER(GetHeap(), this, offset, value); 6428 WRITE_BARRIER(GetHeap(), this, offset, value);
6431 } 6429 }
6432 6430
6433 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset) 6431 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6434 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset) 6432 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6435 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset) 6433 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6436 ACCESSORS(JSGeneratorObject, input_or_debug_pos, Object, kInputOrDebugPosOffset) 6434 ACCESSORS(JSGeneratorObject, input_or_debug_pos, Object, kInputOrDebugPosOffset)
6437 SMI_ACCESSORS(JSGeneratorObject, resume_mode, kResumeModeOffset) 6435 SMI_ACCESSORS(JSGeneratorObject, resume_mode, kResumeModeOffset)
6438 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset) 6436 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6439 ACCESSORS(JSGeneratorObject, register_file, FixedArray, kRegisterFileOffset) 6437 ACCESSORS(JSGeneratorObject, register_file, FixedArray, kRegisterFileOffset)
(...skipping 1540 matching lines...) Expand 10 before | Expand all | Expand 10 after
7980 7978
7981 ACCESSORS(JSStringIterator, string, String, kStringOffset) 7979 ACCESSORS(JSStringIterator, string, String, kStringOffset)
7982 SMI_ACCESSORS(JSStringIterator, index, kNextIndexOffset) 7980 SMI_ACCESSORS(JSStringIterator, index, kNextIndexOffset)
7983 7981
7984 } // namespace internal 7982 } // namespace internal
7985 } // namespace v8 7983 } // namespace v8
7986 7984
7987 #include "src/objects/object-macros-undef.h" 7985 #include "src/objects/object-macros-undef.h"
7988 7986
7989 #endif // V8_OBJECTS_INL_H_ 7987 #endif // V8_OBJECTS_INL_H_
OLDNEW
« no previous file with comments | « src/objects.h ('k') | src/objects/object-macros.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698