OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_INL_H_ | 5 #ifndef V8_HEAP_INL_H_ |
6 #define V8_HEAP_INL_H_ | 6 #define V8_HEAP_INL_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 | 9 |
| 10 #include "src/cpu-profiler.h" |
10 #include "src/heap.h" | 11 #include "src/heap.h" |
11 #include "src/heap-profiler.h" | 12 #include "src/heap-profiler.h" |
12 #include "src/isolate.h" | 13 #include "src/isolate.h" |
13 #include "src/list-inl.h" | 14 #include "src/list-inl.h" |
14 #include "src/objects.h" | 15 #include "src/objects.h" |
15 #include "src/platform.h" | 16 #include "src/platform.h" |
16 #include "src/store-buffer.h" | 17 #include "src/store-buffer.h" |
17 #include "src/store-buffer-inl.h" | 18 #include "src/store-buffer-inl.h" |
18 | 19 |
19 namespace v8 { | 20 namespace v8 { |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
177 return CopyConstantPoolArrayWithMap(src, src->map()); | 178 return CopyConstantPoolArrayWithMap(src, src->map()); |
178 } | 179 } |
179 | 180 |
180 | 181 |
181 AllocationResult Heap::AllocateRaw(int size_in_bytes, | 182 AllocationResult Heap::AllocateRaw(int size_in_bytes, |
182 AllocationSpace space, | 183 AllocationSpace space, |
183 AllocationSpace retry_space) { | 184 AllocationSpace retry_space) { |
184 ASSERT(AllowHandleAllocation::IsAllowed()); | 185 ASSERT(AllowHandleAllocation::IsAllowed()); |
185 ASSERT(AllowHeapAllocation::IsAllowed()); | 186 ASSERT(AllowHeapAllocation::IsAllowed()); |
186 ASSERT(gc_state_ == NOT_IN_GC); | 187 ASSERT(gc_state_ == NOT_IN_GC); |
187 HeapProfiler* profiler = isolate_->heap_profiler(); | |
188 #ifdef DEBUG | 188 #ifdef DEBUG |
189 if (FLAG_gc_interval >= 0 && | 189 if (FLAG_gc_interval >= 0 && |
190 AllowAllocationFailure::IsAllowed(isolate_) && | 190 AllowAllocationFailure::IsAllowed(isolate_) && |
191 Heap::allocation_timeout_-- <= 0) { | 191 Heap::allocation_timeout_-- <= 0) { |
192 return AllocationResult::Retry(space); | 192 return AllocationResult::Retry(space); |
193 } | 193 } |
194 isolate_->counters()->objs_since_last_full()->Increment(); | 194 isolate_->counters()->objs_since_last_full()->Increment(); |
195 isolate_->counters()->objs_since_last_young()->Increment(); | 195 isolate_->counters()->objs_since_last_young()->Increment(); |
196 #endif | 196 #endif |
197 | 197 |
198 HeapObject* object; | 198 HeapObject* object; |
199 AllocationResult allocation; | 199 AllocationResult allocation; |
200 if (NEW_SPACE == space) { | 200 if (NEW_SPACE == space) { |
201 allocation = new_space_.AllocateRaw(size_in_bytes); | 201 allocation = new_space_.AllocateRaw(size_in_bytes); |
202 if (always_allocate() && | 202 if (always_allocate() && |
203 allocation.IsRetry() && | 203 allocation.IsRetry() && |
204 retry_space != NEW_SPACE) { | 204 retry_space != NEW_SPACE) { |
205 space = retry_space; | 205 space = retry_space; |
206 } else { | 206 } else { |
207 if (profiler->is_tracking_allocations() && allocation.To(&object)) { | 207 if (allocation.To(&object)) { |
208 profiler->AllocationEvent(object->address(), size_in_bytes); | 208 OnAllocationEvent(object, size_in_bytes); |
209 } | 209 } |
210 return allocation; | 210 return allocation; |
211 } | 211 } |
212 } | 212 } |
213 | 213 |
214 if (OLD_POINTER_SPACE == space) { | 214 if (OLD_POINTER_SPACE == space) { |
215 allocation = old_pointer_space_->AllocateRaw(size_in_bytes); | 215 allocation = old_pointer_space_->AllocateRaw(size_in_bytes); |
216 } else if (OLD_DATA_SPACE == space) { | 216 } else if (OLD_DATA_SPACE == space) { |
217 allocation = old_data_space_->AllocateRaw(size_in_bytes); | 217 allocation = old_data_space_->AllocateRaw(size_in_bytes); |
218 } else if (CODE_SPACE == space) { | 218 } else if (CODE_SPACE == space) { |
219 allocation = code_space_->AllocateRaw(size_in_bytes); | 219 allocation = code_space_->AllocateRaw(size_in_bytes); |
220 } else if (LO_SPACE == space) { | 220 } else if (LO_SPACE == space) { |
221 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); | 221 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); |
222 } else if (CELL_SPACE == space) { | 222 } else if (CELL_SPACE == space) { |
223 allocation = cell_space_->AllocateRaw(size_in_bytes); | 223 allocation = cell_space_->AllocateRaw(size_in_bytes); |
224 } else if (PROPERTY_CELL_SPACE == space) { | 224 } else if (PROPERTY_CELL_SPACE == space) { |
225 allocation = property_cell_space_->AllocateRaw(size_in_bytes); | 225 allocation = property_cell_space_->AllocateRaw(size_in_bytes); |
226 } else { | 226 } else { |
227 ASSERT(MAP_SPACE == space); | 227 ASSERT(MAP_SPACE == space); |
228 allocation = map_space_->AllocateRaw(size_in_bytes); | 228 allocation = map_space_->AllocateRaw(size_in_bytes); |
229 } | 229 } |
230 if (allocation.IsRetry()) old_gen_exhausted_ = true; | 230 if (allocation.To(&object)) { |
231 if (profiler->is_tracking_allocations() && allocation.To(&object)) { | 231 OnAllocationEvent(object, size_in_bytes); |
| 232 } else { |
| 233 old_gen_exhausted_ = true; |
| 234 } |
| 235 return allocation; |
| 236 } |
| 237 |
| 238 |
| 239 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) { |
| 240 HeapProfiler* profiler = isolate_->heap_profiler(); |
| 241 if (profiler->is_tracking_allocations()) { |
232 profiler->AllocationEvent(object->address(), size_in_bytes); | 242 profiler->AllocationEvent(object->address(), size_in_bytes); |
233 } | 243 } |
234 return allocation; | 244 |
| 245 if (FLAG_verify_predictable) { |
| 246 ++allocations_count_; |
| 247 |
| 248 UpdateAllocationsHash(object); |
| 249 UpdateAllocationsHash(size_in_bytes); |
| 250 |
| 251 if ((FLAG_dump_allocations_digest_at_alloc > 0) && |
| 252 (--dump_allocations_hash_countdown_ == 0)) { |
| 253 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; |
| 254 PrintAlloctionsHash(); |
| 255 } |
| 256 } |
| 257 } |
| 258 |
| 259 |
| 260 void Heap::OnMoveEvent(HeapObject* target, |
| 261 HeapObject* source, |
| 262 int size_in_bytes) { |
| 263 HeapProfiler* heap_profiler = isolate_->heap_profiler(); |
| 264 if (heap_profiler->is_tracking_object_moves()) { |
| 265 heap_profiler->ObjectMoveEvent(source->address(), target->address(), |
| 266 size_in_bytes); |
| 267 } |
| 268 |
| 269 if (isolate_->logger()->is_logging_code_events() || |
| 270 isolate_->cpu_profiler()->is_profiling()) { |
| 271 if (target->IsSharedFunctionInfo()) { |
| 272 PROFILE(isolate_, SharedFunctionInfoMoveEvent( |
| 273 source->address(), target->address())); |
| 274 } |
| 275 } |
| 276 |
| 277 if (FLAG_verify_predictable) { |
| 278 ++allocations_count_; |
| 279 |
| 280 UpdateAllocationsHash(source); |
| 281 UpdateAllocationsHash(target); |
| 282 UpdateAllocationsHash(size_in_bytes); |
| 283 |
| 284 if ((FLAG_dump_allocations_digest_at_alloc > 0) && |
| 285 (--dump_allocations_hash_countdown_ == 0)) { |
| 286 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; |
| 287 PrintAlloctionsHash(); |
| 288 } |
| 289 } |
| 290 } |
| 291 |
| 292 |
| 293 void Heap::UpdateAllocationsHash(HeapObject* object) { |
| 294 Address object_address = object->address(); |
| 295 MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); |
| 296 AllocationSpace allocation_space = memory_chunk->owner()->identity(); |
| 297 |
| 298 STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32); |
| 299 uint32_t value = |
| 300 static_cast<uint32_t>(object_address - memory_chunk->address()) | |
| 301 (static_cast<uint32_t>(allocation_space) << kPageSizeBits); |
| 302 |
| 303 UpdateAllocationsHash(value); |
| 304 } |
| 305 |
| 306 |
| 307 void Heap::UpdateAllocationsHash(uint32_t value) { |
| 308 uint16_t c1 = static_cast<uint16_t>(value); |
| 309 uint16_t c2 = static_cast<uint16_t>(value >> 16); |
| 310 raw_allocations_hash_ = |
| 311 StringHasher::AddCharacterCore(raw_allocations_hash_, c1); |
| 312 raw_allocations_hash_ = |
| 313 StringHasher::AddCharacterCore(raw_allocations_hash_, c2); |
| 314 } |
| 315 |
| 316 |
| 317 void Heap::PrintAlloctionsHash() { |
| 318 uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_); |
| 319 PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash); |
235 } | 320 } |
236 | 321 |
237 | 322 |
238 void Heap::FinalizeExternalString(String* string) { | 323 void Heap::FinalizeExternalString(String* string) { |
239 ASSERT(string->IsExternalString()); | 324 ASSERT(string->IsExternalString()); |
240 v8::String::ExternalStringResourceBase** resource_addr = | 325 v8::String::ExternalStringResourceBase** resource_addr = |
241 reinterpret_cast<v8::String::ExternalStringResourceBase**>( | 326 reinterpret_cast<v8::String::ExternalStringResourceBase**>( |
242 reinterpret_cast<byte*>(string) + | 327 reinterpret_cast<byte*>(string) + |
243 ExternalString::kResourceOffset - | 328 ExternalString::kResourceOffset - |
244 kHeapObjectTag); | 329 kHeapObjectTag); |
(...skipping 480 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
725 | 810 |
726 | 811 |
727 double GCTracer::SizeOfHeapObjects() { | 812 double GCTracer::SizeOfHeapObjects() { |
728 return (static_cast<double>(heap_->SizeOfObjects())) / MB; | 813 return (static_cast<double>(heap_->SizeOfObjects())) / MB; |
729 } | 814 } |
730 | 815 |
731 | 816 |
732 } } // namespace v8::internal | 817 } } // namespace v8::internal |
733 | 818 |
734 #endif // V8_HEAP_INL_H_ | 819 #endif // V8_HEAP_INL_H_ |
OLD | NEW |