OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2096 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2107 if (HasLocalElement(index)) return NONE; | 2107 if (HasLocalElement(index)) return NONE; |
2108 return ABSENT; | 2108 return ABSENT; |
2109 } | 2109 } |
2110 // Named property. | 2110 // Named property. |
2111 LookupResult result; | 2111 LookupResult result; |
2112 LocalLookup(name, &result); | 2112 LocalLookup(name, &result); |
2113 return GetPropertyAttribute(this, &result, name, false); | 2113 return GetPropertyAttribute(this, &result, name, false); |
2114 } | 2114 } |
2115 | 2115 |
2116 | 2116 |
2117 Object* NormalizedMapCache::Get(Map* fast, PropertyNormalizationMode mode) { | |
2118 int index = Hash(fast) % kEntries; | |
2119 Object* obj = get(index); | |
2120 | |
2121 if (obj->IsMap() && CheckHit(Map::cast(obj), fast, mode)) { | |
2122 #ifdef DEBUG | |
2123 if (FLAG_enable_slow_asserts) { | |
2124 // The cached map should match freshly created normalized map bit-by-bit. | |
2125 Object* fresh = fast->CopyNormalized(mode); | |
2126 if (!fresh->IsFailure()) { | |
2127 // Copy the unused byte so that the assertion below works. | |
2128 Map::cast(fresh)->address()[Map::kUnusedOffset] = | |
2129 Map::cast(obj)->address()[Map::kUnusedOffset]; | |
2130 ASSERT(memcmp(Map::cast(fresh)->address(), | |
2131 Map::cast(obj)->address(), | |
2132 Map::kSize) == 0); | |
2133 } | |
2134 } | |
2135 #endif | |
2136 return obj; | |
2137 } | |
2138 | |
2139 obj = fast->CopyNormalized(mode); | |
2140 if (obj->IsFailure()) return obj; | |
2141 set(index, obj); | |
2142 Counters::normalized_maps.Increment(); | |
2143 | |
2144 return obj; | |
2145 } | |
2146 | |
2147 | |
2148 void NormalizedMapCache::Clear() { | |
2149 int entries = length(); | |
2150 for (int i = 0; i != entries; i++) { | |
2151 set_undefined(i); | |
2152 } | |
2153 } | |
2154 | |
2155 | |
2156 int NormalizedMapCache::Hash(Map* fast) { | |
2157 // For performance reasons we only hash the 3 most variable fields of a map: | |
2158 // constructor, prototype and bit_field2. | |
2159 | |
2160 // Shift away the tag. | |
2161 int hash = (static_cast<uint32_t>( | |
2162 reinterpret_cast<uintptr_t>(fast->constructor())) >> 2); | |
2163 | |
2164 // XOR-ing the prototype and constructor directly yields too many zero bits | |
2165 // when the two pointers are close (which is fairly common). | |
2166 // To avoid this we shift the prototype 4 bits relatively to the constructor. | |
2167 hash ^= (static_cast<uint32_t>( | |
2168 reinterpret_cast<uintptr_t>(fast->prototype())) << 2); | |
2169 | |
2170 return hash ^ (hash >> 16) ^ fast->bit_field2(); | |
2171 } | |
2172 | |
2173 | |
2174 bool NormalizedMapCache::CheckHit(Map* slow, | |
2175 Map* fast, | |
2176 PropertyNormalizationMode mode) { | |
2177 #ifdef DEBUG | |
2178 slow->NormalizedMapVerify(); | |
2179 #endif | |
2180 return | |
2181 slow->constructor() == fast->constructor() && | |
2182 slow->prototype() == fast->prototype() && | |
2183 slow->inobject_properties() == ((mode == CLEAR_INOBJECT_PROPERTIES) ? | |
2184 0 : | |
2185 fast->inobject_properties()) && | |
2186 slow->instance_type() == fast->instance_type() && | |
2187 slow->bit_field() == fast->bit_field() && | |
2188 slow->bit_field2() == fast->bit_field2(); | |
2189 } | |
2190 | |
2191 | |
2192 Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode, | 2117 Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode, |
2193 int expected_additional_properties) { | 2118 int expected_additional_properties) { |
2194 if (!HasFastProperties()) return this; | 2119 if (!HasFastProperties()) return this; |
2195 | 2120 |
2196 // The global object is always normalized. | 2121 // The global object is always normalized. |
2197 ASSERT(!IsGlobalObject()); | 2122 ASSERT(!IsGlobalObject()); |
2198 | 2123 |
2199 // Allocate new content. | 2124 // Allocate new content. |
2200 int property_count = map()->NumberOfDescribedProperties(); | 2125 int property_count = map()->NumberOfDescribedProperties(); |
2201 if (expected_additional_properties > 0) { | 2126 if (expected_additional_properties > 0) { |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2246 break; | 2171 break; |
2247 default: | 2172 default: |
2248 UNREACHABLE(); | 2173 UNREACHABLE(); |
2249 } | 2174 } |
2250 } | 2175 } |
2251 | 2176 |
2252 // Copy the next enumeration index from instance descriptor. | 2177 // Copy the next enumeration index from instance descriptor. |
2253 int index = map()->instance_descriptors()->NextEnumerationIndex(); | 2178 int index = map()->instance_descriptors()->NextEnumerationIndex(); |
2254 dictionary->SetNextEnumerationIndex(index); | 2179 dictionary->SetNextEnumerationIndex(index); |
2255 | 2180 |
2256 obj = Top::context()->global_context()-> | 2181 // Allocate new map. |
2257 normalized_map_cache()->Get(map(), mode); | 2182 obj = map()->CopyDropDescriptors(); |
2258 if (obj->IsFailure()) return obj; | 2183 if (obj->IsFailure()) return obj; |
2259 Map* new_map = Map::cast(obj); | 2184 Map* new_map = Map::cast(obj); |
2260 | 2185 |
| 2186 // Clear inobject properties if needed by adjusting the instance size and |
| 2187 // putting in a filler object instead of the inobject properties. |
| 2188 if (mode == CLEAR_INOBJECT_PROPERTIES && map()->inobject_properties() > 0) { |
| 2189 int instance_size_delta = map()->inobject_properties() * kPointerSize; |
| 2190 int new_instance_size = map()->instance_size() - instance_size_delta; |
| 2191 new_map->set_inobject_properties(0); |
| 2192 new_map->set_instance_size(new_instance_size); |
| 2193 new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(), |
| 2194 new_map->instance_size())); |
| 2195 Heap::CreateFillerObjectAt(this->address() + new_instance_size, |
| 2196 instance_size_delta); |
| 2197 } |
| 2198 new_map->set_unused_property_fields(0); |
| 2199 |
2261 // We have now successfully allocated all the necessary objects. | 2200 // We have now successfully allocated all the necessary objects. |
2262 // Changes can now be made with the guarantee that all of them take effect. | 2201 // Changes can now be made with the guarantee that all of them take effect. |
2263 | |
2264 // Resize the object in the heap if necessary. | |
2265 int new_instance_size = new_map->instance_size(); | |
2266 int instance_size_delta = map()->instance_size() - new_instance_size; | |
2267 ASSERT(instance_size_delta >= 0); | |
2268 Heap::CreateFillerObjectAt(this->address() + new_instance_size, | |
2269 instance_size_delta); | |
2270 | |
2271 set_map(new_map); | 2202 set_map(new_map); |
| 2203 map()->set_instance_descriptors(Heap::empty_descriptor_array()); |
2272 | 2204 |
2273 set_properties(dictionary); | 2205 set_properties(dictionary); |
2274 | 2206 |
2275 Counters::props_to_dictionary.Increment(); | 2207 Counters::props_to_dictionary.Increment(); |
2276 | 2208 |
2277 #ifdef DEBUG | 2209 #ifdef DEBUG |
2278 if (FLAG_trace_normalization) { | 2210 if (FLAG_trace_normalization) { |
2279 PrintF("Object properties have been normalized:\n"); | 2211 PrintF("Object properties have been normalized:\n"); |
2280 Print(); | 2212 Print(); |
2281 } | 2213 } |
(...skipping 875 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3157 Map::cast(result)->set_pre_allocated_property_fields( | 3089 Map::cast(result)->set_pre_allocated_property_fields( |
3158 pre_allocated_property_fields()); | 3090 pre_allocated_property_fields()); |
3159 } | 3091 } |
3160 Map::cast(result)->set_bit_field(bit_field()); | 3092 Map::cast(result)->set_bit_field(bit_field()); |
3161 Map::cast(result)->set_bit_field2(bit_field2()); | 3093 Map::cast(result)->set_bit_field2(bit_field2()); |
3162 Map::cast(result)->ClearCodeCache(); | 3094 Map::cast(result)->ClearCodeCache(); |
3163 return result; | 3095 return result; |
3164 } | 3096 } |
3165 | 3097 |
3166 | 3098 |
3167 Object* Map::CopyNormalized(PropertyNormalizationMode mode) { | |
3168 int new_instance_size = instance_size(); | |
3169 if (mode == CLEAR_INOBJECT_PROPERTIES) { | |
3170 new_instance_size -= inobject_properties() * kPointerSize; | |
3171 } | |
3172 | |
3173 Object* result = Heap::AllocateMap(instance_type(), new_instance_size); | |
3174 if (result->IsFailure()) return result; | |
3175 | |
3176 if (mode != CLEAR_INOBJECT_PROPERTIES) { | |
3177 Map::cast(result)->set_inobject_properties(inobject_properties()); | |
3178 } | |
3179 | |
3180 Map::cast(result)->set_prototype(prototype()); | |
3181 Map::cast(result)->set_constructor(constructor()); | |
3182 | |
3183 Map::cast(result)->set_bit_field(bit_field()); | |
3184 Map::cast(result)->set_bit_field2(bit_field2()); | |
3185 | |
3186 #ifdef DEBUG | |
3187 Map::cast(result)->NormalizedMapVerify(); | |
3188 #endif | |
3189 | |
3190 return result; | |
3191 } | |
3192 | |
3193 | |
3194 Object* Map::CopyDropTransitions() { | 3099 Object* Map::CopyDropTransitions() { |
3195 Object* new_map = CopyDropDescriptors(); | 3100 Object* new_map = CopyDropDescriptors(); |
3196 if (new_map->IsFailure()) return new_map; | 3101 if (new_map->IsFailure()) return new_map; |
3197 Object* descriptors = instance_descriptors()->RemoveTransitions(); | 3102 Object* descriptors = instance_descriptors()->RemoveTransitions(); |
3198 if (descriptors->IsFailure()) return descriptors; | 3103 if (descriptors->IsFailure()) return descriptors; |
3199 cast(new_map)->set_instance_descriptors(DescriptorArray::cast(descriptors)); | 3104 cast(new_map)->set_instance_descriptors(DescriptorArray::cast(descriptors)); |
3200 return new_map; | 3105 return new_map; |
3201 } | 3106 } |
3202 | 3107 |
3203 | 3108 |
(...skipping 5702 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8906 if (break_point_objects()->IsUndefined()) return 0; | 8811 if (break_point_objects()->IsUndefined()) return 0; |
8907 // Single beak point. | 8812 // Single beak point. |
8908 if (!break_point_objects()->IsFixedArray()) return 1; | 8813 if (!break_point_objects()->IsFixedArray()) return 1; |
8909 // Multiple break points. | 8814 // Multiple break points. |
8910 return FixedArray::cast(break_point_objects())->length(); | 8815 return FixedArray::cast(break_point_objects())->length(); |
8911 } | 8816 } |
8912 #endif | 8817 #endif |
8913 | 8818 |
8914 | 8819 |
8915 } } // namespace v8::internal | 8820 } } // namespace v8::internal |
OLD | NEW |