OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/ast/context-slot-cache.h" | 9 #include "src/ast/context-slot-cache.h" |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 6280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6291 max_gc_pause_ = Max(max_gc_pause_, duration); | 6291 max_gc_pause_ = Max(max_gc_pause_, duration); |
6292 max_alive_after_gc_ = Max(max_alive_after_gc_, SizeOfObjects()); | 6292 max_alive_after_gc_ = Max(max_alive_after_gc_, SizeOfObjects()); |
6293 min_in_mutator_ = Min(min_in_mutator_, spent_in_mutator); | 6293 min_in_mutator_ = Min(min_in_mutator_, spent_in_mutator); |
6294 } else if (FLAG_trace_gc_verbose) { | 6294 } else if (FLAG_trace_gc_verbose) { |
6295 total_gc_time_ms_ += duration; | 6295 total_gc_time_ms_ += duration; |
6296 } | 6296 } |
6297 | 6297 |
6298 marking_time_ += marking_time; | 6298 marking_time_ += marking_time; |
6299 } | 6299 } |
6300 | 6300 |
6301 | |
6302 int KeyedLookupCache::Hash(Handle<Map> map, Handle<Name> name) { | |
6303 DisallowHeapAllocation no_gc; | |
6304 // Uses only lower 32 bits if pointers are larger. | |
6305 uintptr_t addr_hash = | |
6306 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(*map)) >> kMapHashShift; | |
6307 return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask); | |
6308 } | |
6309 | |
6310 | |
6311 int KeyedLookupCache::Lookup(Handle<Map> map, Handle<Name> name) { | |
6312 DisallowHeapAllocation no_gc; | |
6313 int index = (Hash(map, name) & kHashMask); | |
6314 for (int i = 0; i < kEntriesPerBucket; i++) { | |
6315 Key& key = keys_[index + i]; | |
6316 if ((key.map == *map) && key.name->Equals(*name)) { | |
6317 return field_offsets_[index + i]; | |
6318 } | |
6319 } | |
6320 return kNotFound; | |
6321 } | |
6322 | |
6323 | |
6324 void KeyedLookupCache::Update(Handle<Map> map, Handle<Name> name, | |
6325 int field_offset) { | |
6326 DisallowHeapAllocation no_gc; | |
6327 if (!name->IsUniqueName()) { | |
6328 if (!StringTable::InternalizeStringIfExists( | |
6329 name->GetIsolate(), Handle<String>::cast(name)).ToHandle(&name)) { | |
6330 return; | |
6331 } | |
6332 } | |
6333 // This cache is cleared only between mark compact passes, so we expect the | |
6334 // cache to only contain old space names. | |
6335 DCHECK(!map->GetIsolate()->heap()->InNewSpace(*name)); | |
6336 | |
6337 int index = (Hash(map, name) & kHashMask); | |
6338 // After a GC there will be free slots, so we use them in order (this may | |
6339 // help to get the most frequently used one in position 0). | |
6340 for (int i = 0; i < kEntriesPerBucket; i++) { | |
6341 Key& key = keys_[index]; | |
6342 Object* free_entry_indicator = NULL; | |
6343 if (key.map == free_entry_indicator) { | |
6344 key.map = *map; | |
6345 key.name = *name; | |
6346 field_offsets_[index + i] = field_offset; | |
6347 return; | |
6348 } | |
6349 } | |
6350 // No free entry found in this bucket, so we move them all down one and | |
6351 // put the new entry at position zero. | |
6352 for (int i = kEntriesPerBucket - 1; i > 0; i--) { | |
6353 Key& key = keys_[index + i]; | |
6354 Key& key2 = keys_[index + i - 1]; | |
6355 key = key2; | |
6356 field_offsets_[index + i] = field_offsets_[index + i - 1]; | |
6357 } | |
6358 | |
6359 // Write the new first entry. | |
6360 Key& key = keys_[index]; | |
6361 key.map = *map; | |
6362 key.name = *name; | |
6363 field_offsets_[index] = field_offset; | |
6364 } | |
6365 | |
6366 | |
6367 void KeyedLookupCache::Clear() { | |
6368 for (int index = 0; index < kLength; index++) keys_[index].map = NULL; | |
6369 } | |
6370 | |
6371 | |
6372 void DescriptorLookupCache::Clear() { | |
6373 for (int index = 0; index < kLength; index++) keys_[index].source = NULL; | |
6374 } | |
6375 | |
6376 void Heap::ExternalStringTable::CleanUp() { | 6301 void Heap::ExternalStringTable::CleanUp() { |
6377 int last = 0; | 6302 int last = 0; |
6378 Isolate* isolate = heap_->isolate(); | 6303 Isolate* isolate = heap_->isolate(); |
6379 for (int i = 0; i < new_space_strings_.length(); ++i) { | 6304 for (int i = 0; i < new_space_strings_.length(); ++i) { |
6380 if (new_space_strings_[i]->IsTheHole(isolate)) { | 6305 if (new_space_strings_[i]->IsTheHole(isolate)) { |
6381 continue; | 6306 continue; |
6382 } | 6307 } |
6383 DCHECK(new_space_strings_[i]->IsExternalString()); | 6308 DCHECK(new_space_strings_[i]->IsExternalString()); |
6384 if (heap_->InNewSpace(new_space_strings_[i])) { | 6309 if (heap_->InNewSpace(new_space_strings_[i])) { |
6385 new_space_strings_[last++] = new_space_strings_[i]; | 6310 new_space_strings_[last++] = new_space_strings_[i]; |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6522 } | 6447 } |
6523 | 6448 |
6524 | 6449 |
6525 // static | 6450 // static |
6526 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6451 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6527 return StaticVisitorBase::GetVisitorId(map); | 6452 return StaticVisitorBase::GetVisitorId(map); |
6528 } | 6453 } |
6529 | 6454 |
6530 } // namespace internal | 6455 } // namespace internal |
6531 } // namespace v8 | 6456 } // namespace v8 |
OLD | NEW |