| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 17 matching lines...) Expand all Loading... |
| 28 // Review notes: | 28 // Review notes: |
| 29 // | 29 // |
| 30 // - The use of macros in these inline functions may seem superfluous | 30 // - The use of macros in these inline functions may seem superfluous |
| 31 // but it is absolutely needed to make sure gcc generates optimal | 31 // but it is absolutely needed to make sure gcc generates optimal |
| 32 // code. gcc is not happy when attempting to inline too deep. | 32 // code. gcc is not happy when attempting to inline too deep. |
| 33 // | 33 // |
| 34 | 34 |
| 35 #ifndef V8_OBJECTS_INL_H_ | 35 #ifndef V8_OBJECTS_INL_H_ |
| 36 #define V8_OBJECTS_INL_H_ | 36 #define V8_OBJECTS_INL_H_ |
| 37 | 37 |
| 38 #include "objects.h" | 38 #include "memory.h" |
| 39 #include "contexts.h" | 39 #include "contexts.h" |
| 40 #include "conversions-inl.h" | 40 #include "conversions-inl.h" |
| 41 #include "objects.h" |
| 41 #include "property.h" | 42 #include "property.h" |
| 42 | 43 |
| 43 namespace v8 { | 44 namespace v8 { |
| 44 namespace internal { | 45 namespace internal { |
| 45 | 46 |
| 46 PropertyDetails::PropertyDetails(Smi* smi) { | 47 PropertyDetails::PropertyDetails(Smi* smi) { |
| 47 value_ = smi->value(); | 48 value_ = smi->value(); |
| 48 } | 49 } |
| 49 | 50 |
| 50 | 51 |
| (...skipping 2053 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2104 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset); | 2105 return READ_BYTE_FIELD(this, kInObjectPropertiesOffset); |
| 2105 } | 2106 } |
| 2106 | 2107 |
| 2107 | 2108 |
| 2108 int Map::pre_allocated_property_fields() { | 2109 int Map::pre_allocated_property_fields() { |
| 2109 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset); | 2110 return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset); |
| 2110 } | 2111 } |
| 2111 | 2112 |
| 2112 | 2113 |
| 2113 int HeapObject::SizeFromMap(Map* map) { | 2114 int HeapObject::SizeFromMap(Map* map) { |
| 2114 InstanceType instance_type = map->instance_type(); | 2115 int instance_size = map->instance_size(); |
| 2116 if (instance_size != kVariableSizeSentinel) return instance_size; |
| 2117 // We can ignore the "symbol" bit becase it is only set for symbols |
| 2118 // and implies a string type. |
| 2119 int instance_type = static_cast<int>(map->instance_type()) & ~kIsSymbolMask; |
| 2115 // Only inline the most frequent cases. | 2120 // Only inline the most frequent cases. |
| 2116 if (instance_type == JS_OBJECT_TYPE || | |
| 2117 (instance_type & (kIsNotStringMask | kStringRepresentationMask)) == | |
| 2118 (kStringTag | kConsStringTag) || | |
| 2119 instance_type == JS_ARRAY_TYPE) return map->instance_size(); | |
| 2120 if (instance_type == FIXED_ARRAY_TYPE) { | 2121 if (instance_type == FIXED_ARRAY_TYPE) { |
| 2121 return FixedArray::BodyDescriptor::SizeOf(map, this); | 2122 return FixedArray::BodyDescriptor::SizeOf(map, this); |
| 2122 } | 2123 } |
| 2124 if (instance_type == ASCII_STRING_TYPE) { |
| 2125 return SeqAsciiString::SizeFor( |
| 2126 reinterpret_cast<SeqAsciiString*>(this)->length()); |
| 2127 } |
| 2123 if (instance_type == BYTE_ARRAY_TYPE) { | 2128 if (instance_type == BYTE_ARRAY_TYPE) { |
| 2124 return reinterpret_cast<ByteArray*>(this)->ByteArraySize(); | 2129 return reinterpret_cast<ByteArray*>(this)->ByteArraySize(); |
| 2125 } | 2130 } |
| 2126 // Otherwise do the general size computation. | 2131 if (instance_type == STRING_TYPE) { |
| 2127 return SlowSizeFromMap(map); | 2132 return SeqTwoByteString::SizeFor( |
| 2133 reinterpret_cast<SeqTwoByteString*>(this)->length()); |
| 2134 } |
| 2135 ASSERT(instance_type == CODE_TYPE); |
| 2136 return reinterpret_cast<Code*>(this)->CodeSize(); |
| 2128 } | 2137 } |
| 2129 | 2138 |
| 2130 | 2139 |
| 2131 void Map::set_instance_size(int value) { | 2140 void Map::set_instance_size(int value) { |
| 2132 ASSERT_EQ(0, value & (kPointerSize - 1)); | 2141 ASSERT_EQ(0, value & (kPointerSize - 1)); |
| 2133 value >>= kPointerSizeLog2; | 2142 value >>= kPointerSizeLog2; |
| 2134 ASSERT(0 <= value && value < 256); | 2143 ASSERT(0 <= value && value < 256); |
| 2135 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value)); | 2144 WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value)); |
| 2136 } | 2145 } |
| 2137 | 2146 |
| (...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2395 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); | 2404 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize); |
| 2396 // GetCodeFromTargetAddress might be called when marking objects during mark | 2405 // GetCodeFromTargetAddress might be called when marking objects during mark |
| 2397 // sweep. reinterpret_cast is therefore used instead of the more appropriate | 2406 // sweep. reinterpret_cast is therefore used instead of the more appropriate |
| 2398 // Code::cast. Code::cast does not work when the object's map is | 2407 // Code::cast. Code::cast does not work when the object's map is |
| 2399 // marked. | 2408 // marked. |
| 2400 Code* result = reinterpret_cast<Code*>(code); | 2409 Code* result = reinterpret_cast<Code*>(code); |
| 2401 return result; | 2410 return result; |
| 2402 } | 2411 } |
| 2403 | 2412 |
| 2404 | 2413 |
| 2414 Object* Code::GetObjectFromEntryAddress(Address location_of_address) { |
| 2415 return HeapObject:: |
| 2416 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize); |
| 2417 } |
| 2418 |
| 2419 |
| 2405 Object* Map::prototype() { | 2420 Object* Map::prototype() { |
| 2406 return READ_FIELD(this, kPrototypeOffset); | 2421 return READ_FIELD(this, kPrototypeOffset); |
| 2407 } | 2422 } |
| 2408 | 2423 |
| 2409 | 2424 |
| 2410 void Map::set_prototype(Object* value, WriteBarrierMode mode) { | 2425 void Map::set_prototype(Object* value, WriteBarrierMode mode) { |
| 2411 ASSERT(value->IsNull() || value->IsJSObject()); | 2426 ASSERT(value->IsNull() || value->IsJSObject()); |
| 2412 WRITE_FIELD(this, kPrototypeOffset, value); | 2427 WRITE_FIELD(this, kPrototypeOffset, value); |
| 2413 CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, mode); | 2428 CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, mode); |
| 2414 } | 2429 } |
| (...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2724 ((code_age & kCodeAgeMask) << kCodeAgeShift)); | 2739 ((code_age & kCodeAgeMask) << kCodeAgeShift)); |
| 2725 } | 2740 } |
| 2726 | 2741 |
| 2727 | 2742 |
| 2728 bool JSFunction::IsBuiltin() { | 2743 bool JSFunction::IsBuiltin() { |
| 2729 return context()->global()->IsJSBuiltinsObject(); | 2744 return context()->global()->IsJSBuiltinsObject(); |
| 2730 } | 2745 } |
| 2731 | 2746 |
| 2732 | 2747 |
| 2733 Code* JSFunction::code() { | 2748 Code* JSFunction::code() { |
| 2734 return Code::cast(READ_FIELD(this, kCodeOffset)); | 2749 return Code::cast(unchecked_code()); |
| 2735 } | 2750 } |
| 2736 | 2751 |
| 2737 | 2752 |
| 2738 Code* JSFunction::unchecked_code() { | 2753 Code* JSFunction::unchecked_code() { |
| 2739 return reinterpret_cast<Code*>(READ_FIELD(this, kCodeOffset)); | 2754 return reinterpret_cast<Code*>( |
| 2755 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset))); |
| 2740 } | 2756 } |
| 2741 | 2757 |
| 2742 | 2758 |
| 2743 void JSFunction::set_code(Code* value) { | 2759 void JSFunction::set_code(Code* value) { |
| 2744 // Skip the write barrier because code is never in new space. | 2760 // Skip the write barrier because code is never in new space. |
| 2745 ASSERT(!Heap::InNewSpace(value)); | 2761 ASSERT(!Heap::InNewSpace(value)); |
| 2746 WRITE_FIELD(this, kCodeOffset, value); | 2762 Address entry = value->entry(); |
| 2763 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry)); |
| 2747 } | 2764 } |
| 2748 | 2765 |
| 2749 | 2766 |
| 2750 Context* JSFunction::context() { | 2767 Context* JSFunction::context() { |
| 2751 return Context::cast(READ_FIELD(this, kContextOffset)); | 2768 return Context::cast(READ_FIELD(this, kContextOffset)); |
| 2752 } | 2769 } |
| 2753 | 2770 |
| 2754 | 2771 |
| 2755 Object* JSFunction::unchecked_context() { | 2772 Object* JSFunction::unchecked_context() { |
| 2756 return READ_FIELD(this, kContextOffset); | 2773 return READ_FIELD(this, kContextOffset); |
| (...skipping 692 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3449 #undef WRITE_INT_FIELD | 3466 #undef WRITE_INT_FIELD |
| 3450 #undef READ_SHORT_FIELD | 3467 #undef READ_SHORT_FIELD |
| 3451 #undef WRITE_SHORT_FIELD | 3468 #undef WRITE_SHORT_FIELD |
| 3452 #undef READ_BYTE_FIELD | 3469 #undef READ_BYTE_FIELD |
| 3453 #undef WRITE_BYTE_FIELD | 3470 #undef WRITE_BYTE_FIELD |
| 3454 | 3471 |
| 3455 | 3472 |
| 3456 } } // namespace v8::internal | 3473 } } // namespace v8::internal |
| 3457 | 3474 |
| 3458 #endif // V8_OBJECTS_INL_H_ | 3475 #endif // V8_OBJECTS_INL_H_ |
| OLD | NEW |