| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 // | 4 // |
| 5 // Review notes: | 5 // Review notes: |
| 6 // | 6 // |
| 7 // - The use of macros in these inline functions may seem superfluous | 7 // - The use of macros in these inline functions may seem superfluous |
| 8 // but it is absolutely needed to make sure gcc generates optimal | 8 // but it is absolutely needed to make sure gcc generates optimal |
| 9 // code. gcc is not happy when attempting to inline too deep. | 9 // code. gcc is not happy when attempting to inline too deep. |
| 10 // | 10 // |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 150 bool Object::IsSmi() { | 150 bool Object::IsSmi() { |
| 151 return HAS_SMI_TAG(this); | 151 return HAS_SMI_TAG(this); |
| 152 } | 152 } |
| 153 | 153 |
| 154 | 154 |
| 155 bool Object::IsHeapObject() { | 155 bool Object::IsHeapObject() { |
| 156 return Internals::HasHeapObjectTag(this); | 156 return Internals::HasHeapObjectTag(this); |
| 157 } | 157 } |
| 158 | 158 |
| 159 | 159 |
| 160 bool Object::NonFailureIsHeapObject() { | |
| 161 ASSERT(!this->IsFailure()); | |
| 162 return (reinterpret_cast<intptr_t>(this) & kSmiTagMask) != 0; | |
| 163 } | |
| 164 | |
| 165 | |
| 166 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE) | 160 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE) |
| 167 TYPE_CHECKER(Symbol, SYMBOL_TYPE) | 161 TYPE_CHECKER(Symbol, SYMBOL_TYPE) |
| 168 | 162 |
| 169 | 163 |
| 170 bool Object::IsString() { | 164 bool Object::IsString() { |
| 171 return Object::IsHeapObject() | 165 return Object::IsHeapObject() |
| 172 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE; | 166 && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE; |
| 173 } | 167 } |
| 174 | 168 |
| 175 | 169 |
| (...skipping 469 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 645 bool Object::IsFixedTypedArrayBase() { | 639 bool Object::IsFixedTypedArrayBase() { |
| 646 if (!Object::IsHeapObject()) return false; | 640 if (!Object::IsHeapObject()) return false; |
| 647 | 641 |
| 648 InstanceType instance_type = | 642 InstanceType instance_type = |
| 649 HeapObject::cast(this)->map()->instance_type(); | 643 HeapObject::cast(this)->map()->instance_type(); |
| 650 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE && | 644 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE && |
| 651 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE); | 645 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE); |
| 652 } | 646 } |
| 653 | 647 |
| 654 | 648 |
| 655 bool MaybeObject::IsFailure() { | |
| 656 return HAS_FAILURE_TAG(this); | |
| 657 } | |
| 658 | |
| 659 | |
| 660 bool MaybeObject::IsRetryAfterGC() { | |
| 661 return HAS_FAILURE_TAG(this) | |
| 662 && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC; | |
| 663 } | |
| 664 | |
| 665 | |
| 666 Failure* Failure::cast(MaybeObject* obj) { | |
| 667 ASSERT(HAS_FAILURE_TAG(obj)); | |
| 668 return reinterpret_cast<Failure*>(obj); | |
| 669 } | |
| 670 | |
| 671 | |
| 672 bool Object::IsJSReceiver() { | 649 bool Object::IsJSReceiver() { |
| 673 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | 650 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); |
| 674 return IsHeapObject() && | 651 return IsHeapObject() && |
| 675 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE; | 652 HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE; |
| 676 } | 653 } |
| 677 | 654 |
| 678 | 655 |
| 679 bool Object::IsJSObject() { | 656 bool Object::IsJSObject() { |
| 680 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE); | 657 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE); |
| 681 return IsHeapObject() && | 658 return IsHeapObject() && |
| (...skipping 593 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1275 } | 1252 } |
| 1276 | 1253 |
| 1277 | 1254 |
| 1278 Smi* Smi::FromIntptr(intptr_t value) { | 1255 Smi* Smi::FromIntptr(intptr_t value) { |
| 1279 ASSERT(Smi::IsValid(value)); | 1256 ASSERT(Smi::IsValid(value)); |
| 1280 int smi_shift_bits = kSmiTagSize + kSmiShiftSize; | 1257 int smi_shift_bits = kSmiTagSize + kSmiShiftSize; |
| 1281 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag); | 1258 return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag); |
| 1282 } | 1259 } |
| 1283 | 1260 |
| 1284 | 1261 |
| 1285 Failure::Type Failure::type() const { | |
| 1286 return static_cast<Type>(value() & kFailureTypeTagMask); | |
| 1287 } | |
| 1288 | |
| 1289 | |
| 1290 AllocationSpace Failure::allocation_space() const { | |
| 1291 ASSERT_EQ(RETRY_AFTER_GC, type()); | |
| 1292 return static_cast<AllocationSpace>((value() >> kFailureTypeTagSize) | |
| 1293 & kSpaceTagMask); | |
| 1294 } | |
| 1295 | |
| 1296 | |
| 1297 intptr_t Failure::value() const { | |
| 1298 return static_cast<intptr_t>( | |
| 1299 reinterpret_cast<uintptr_t>(this) >> kFailureTagSize); | |
| 1300 } | |
| 1301 | |
| 1302 | |
| 1303 Failure* Failure::RetryAfterGC() { | |
| 1304 return RetryAfterGC(NEW_SPACE); | |
| 1305 } | |
| 1306 | |
| 1307 | |
| 1308 Failure* Failure::RetryAfterGC(AllocationSpace space) { | |
| 1309 ASSERT((space & ~kSpaceTagMask) == 0); | |
| 1310 return Construct(RETRY_AFTER_GC, space); | |
| 1311 } | |
| 1312 | |
| 1313 | |
| 1314 Failure* Failure::Construct(Type type, intptr_t value) { | |
| 1315 uintptr_t info = | |
| 1316 (static_cast<uintptr_t>(value) << kFailureTypeTagSize) | type; | |
| 1317 ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info); | |
| 1318 // Fill the unused bits with a pattern that's easy to recognize in crash | |
| 1319 // dumps. | |
| 1320 static const int kFailureMagicPattern = 0x0BAD0000; | |
| 1321 return reinterpret_cast<Failure*>( | |
| 1322 (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern); | |
| 1323 } | |
| 1324 | |
| 1325 | |
| 1326 bool Smi::IsValid(intptr_t value) { | 1262 bool Smi::IsValid(intptr_t value) { |
| 1327 bool result = Internals::IsValidSmi(value); | 1263 bool result = Internals::IsValidSmi(value); |
| 1328 ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue); | 1264 ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue); |
| 1329 return result; | 1265 return result; |
| 1330 } | 1266 } |
| 1331 | 1267 |
| 1332 | 1268 |
| 1333 MapWord MapWord::FromMap(Map* map) { | 1269 MapWord MapWord::FromMap(Map* map) { |
| 1334 return MapWord(reinterpret_cast<uintptr_t>(map)); | 1270 return MapWord(reinterpret_cast<uintptr_t>(map)); |
| 1335 } | 1271 } |
| (...skipping 5566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6902 #undef READ_SHORT_FIELD | 6838 #undef READ_SHORT_FIELD |
| 6903 #undef WRITE_SHORT_FIELD | 6839 #undef WRITE_SHORT_FIELD |
| 6904 #undef READ_BYTE_FIELD | 6840 #undef READ_BYTE_FIELD |
| 6905 #undef WRITE_BYTE_FIELD | 6841 #undef WRITE_BYTE_FIELD |
| 6906 #undef NOBARRIER_READ_BYTE_FIELD | 6842 #undef NOBARRIER_READ_BYTE_FIELD |
| 6907 #undef NOBARRIER_WRITE_BYTE_FIELD | 6843 #undef NOBARRIER_WRITE_BYTE_FIELD |
| 6908 | 6844 |
| 6909 } } // namespace v8::internal | 6845 } } // namespace v8::internal |
| 6910 | 6846 |
| 6911 #endif // V8_OBJECTS_INL_H_ | 6847 #endif // V8_OBJECTS_INL_H_ |
| OLD | NEW |