OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_INL_H_ | 5 #ifndef V8_HEAP_INL_H_ |
6 #define V8_HEAP_INL_H_ | 6 #define V8_HEAP_INL_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 | 9 |
10 #include "src/cpu-profiler.h" | |
10 #include "src/heap.h" | 11 #include "src/heap.h" |
11 #include "src/heap-profiler.h" | 12 #include "src/heap-profiler.h" |
12 #include "src/isolate.h" | 13 #include "src/isolate.h" |
13 #include "src/list-inl.h" | 14 #include "src/list-inl.h" |
14 #include "src/objects.h" | 15 #include "src/objects.h" |
15 #include "src/platform.h" | 16 #include "src/platform.h" |
16 #include "src/store-buffer.h" | 17 #include "src/store-buffer.h" |
17 #include "src/store-buffer-inl.h" | 18 #include "src/store-buffer-inl.h" |
18 | 19 |
19 namespace v8 { | 20 namespace v8 { |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
177 return CopyConstantPoolArrayWithMap(src, src->map()); | 178 return CopyConstantPoolArrayWithMap(src, src->map()); |
178 } | 179 } |
179 | 180 |
180 | 181 |
181 AllocationResult Heap::AllocateRaw(int size_in_bytes, | 182 AllocationResult Heap::AllocateRaw(int size_in_bytes, |
182 AllocationSpace space, | 183 AllocationSpace space, |
183 AllocationSpace retry_space) { | 184 AllocationSpace retry_space) { |
184 ASSERT(AllowHandleAllocation::IsAllowed()); | 185 ASSERT(AllowHandleAllocation::IsAllowed()); |
185 ASSERT(AllowHeapAllocation::IsAllowed()); | 186 ASSERT(AllowHeapAllocation::IsAllowed()); |
186 ASSERT(gc_state_ == NOT_IN_GC); | 187 ASSERT(gc_state_ == NOT_IN_GC); |
187 HeapProfiler* profiler = isolate_->heap_profiler(); | |
188 #ifdef DEBUG | 188 #ifdef DEBUG |
189 if (FLAG_gc_interval >= 0 && | 189 if (FLAG_gc_interval >= 0 && |
190 AllowAllocationFailure::IsAllowed(isolate_) && | 190 AllowAllocationFailure::IsAllowed(isolate_) && |
191 Heap::allocation_timeout_-- <= 0) { | 191 Heap::allocation_timeout_-- <= 0) { |
192 return AllocationResult::Retry(space); | 192 return AllocationResult::Retry(space); |
193 } | 193 } |
194 isolate_->counters()->objs_since_last_full()->Increment(); | 194 isolate_->counters()->objs_since_last_full()->Increment(); |
195 isolate_->counters()->objs_since_last_young()->Increment(); | 195 isolate_->counters()->objs_since_last_young()->Increment(); |
196 #endif | 196 #endif |
197 | 197 |
198 HeapObject* object; | 198 HeapObject* object; |
199 AllocationResult allocation; | 199 AllocationResult allocation; |
200 if (NEW_SPACE == space) { | 200 if (NEW_SPACE == space) { |
201 allocation = new_space_.AllocateRaw(size_in_bytes); | 201 allocation = new_space_.AllocateRaw(size_in_bytes); |
202 if (always_allocate() && | 202 if (always_allocate() && |
203 allocation.IsRetry() && | 203 allocation.IsRetry() && |
204 retry_space != NEW_SPACE) { | 204 retry_space != NEW_SPACE) { |
205 space = retry_space; | 205 space = retry_space; |
206 } else { | 206 } else { |
207 if (profiler->is_tracking_allocations() && allocation.To(&object)) { | 207 if (allocation.To(&object)) { |
208 profiler->AllocationEvent(object->address(), size_in_bytes); | 208 OnAllocationEvent(object, size_in_bytes); |
209 } | 209 } |
210 return allocation; | 210 return allocation; |
211 } | 211 } |
212 } | 212 } |
213 | 213 |
214 if (OLD_POINTER_SPACE == space) { | 214 if (OLD_POINTER_SPACE == space) { |
215 allocation = old_pointer_space_->AllocateRaw(size_in_bytes); | 215 allocation = old_pointer_space_->AllocateRaw(size_in_bytes); |
216 } else if (OLD_DATA_SPACE == space) { | 216 } else if (OLD_DATA_SPACE == space) { |
217 allocation = old_data_space_->AllocateRaw(size_in_bytes); | 217 allocation = old_data_space_->AllocateRaw(size_in_bytes); |
218 } else if (CODE_SPACE == space) { | 218 } else if (CODE_SPACE == space) { |
219 allocation = code_space_->AllocateRaw(size_in_bytes); | 219 if (size_in_bytes <= code_space()->AreaSize()) { |
220 allocation = code_space_->AllocateRaw(size_in_bytes); | |
221 } else { | |
222 // Large code objects are allocated in large object space. | |
223 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE); | |
224 } | |
220 } else if (LO_SPACE == space) { | 225 } else if (LO_SPACE == space) { |
221 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); | 226 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); |
222 } else if (CELL_SPACE == space) { | 227 } else if (CELL_SPACE == space) { |
223 allocation = cell_space_->AllocateRaw(size_in_bytes); | 228 allocation = cell_space_->AllocateRaw(size_in_bytes); |
224 } else if (PROPERTY_CELL_SPACE == space) { | 229 } else if (PROPERTY_CELL_SPACE == space) { |
225 allocation = property_cell_space_->AllocateRaw(size_in_bytes); | 230 allocation = property_cell_space_->AllocateRaw(size_in_bytes); |
226 } else { | 231 } else { |
227 ASSERT(MAP_SPACE == space); | 232 ASSERT(MAP_SPACE == space); |
228 allocation = map_space_->AllocateRaw(size_in_bytes); | 233 allocation = map_space_->AllocateRaw(size_in_bytes); |
229 } | 234 } |
230 if (allocation.IsRetry()) old_gen_exhausted_ = true; | 235 if (allocation.To(&object)) { |
231 if (profiler->is_tracking_allocations() && allocation.To(&object)) { | 236 OnAllocationEvent(object, size_in_bytes); |
237 } else { | |
238 old_gen_exhausted_ = true; | |
239 } | |
240 return allocation; | |
241 } | |
242 | |
243 | |
244 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) { | |
245 HeapProfiler* profiler = isolate_->heap_profiler(); | |
246 if (profiler->is_tracking_allocations()) { | |
232 profiler->AllocationEvent(object->address(), size_in_bytes); | 247 profiler->AllocationEvent(object->address(), size_in_bytes); |
233 } | 248 } |
234 return allocation; | 249 |
250 if (FLAG_verify_predictable) { | |
251 ++allocations_count_; | |
252 | |
253 UpdateAllocationsHash(object); | |
254 UpdateAllocationsHash(size_in_bytes); | |
255 | |
256 if ((FLAG_dump_allocations_digest_at_alloc > 0) && | |
257 (--dump_allocations_hash_countdown_ == 0)) { | |
258 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; | |
259 PrintAlloctionsHash(); | |
260 } | |
261 } | |
262 } | |
263 | |
264 | |
265 void Heap::OnMoveEvent(HeapObject* target, | |
266 HeapObject* source, | |
267 int size_in_bytes) { | |
268 HeapProfiler* heap_profiler = isolate_->heap_profiler(); | |
269 if (heap_profiler->is_tracking_object_moves()) { | |
270 heap_profiler->ObjectMoveEvent(source->address(), target->address(), | |
271 size_in_bytes); | |
272 } | |
273 | |
274 if (isolate_->logger()->is_logging_code_events() || | |
275 isolate_->cpu_profiler()->is_profiling()) { | |
276 if (target->IsSharedFunctionInfo()) { | |
277 PROFILE(isolate_, SharedFunctionInfoMoveEvent( | |
278 source->address(), target->address())); | |
279 } | |
280 } | |
281 | |
282 if (FLAG_verify_predictable) { | |
283 ++allocations_count_; | |
284 | |
285 UpdateAllocationsHash(source); | |
286 UpdateAllocationsHash(target); | |
287 UpdateAllocationsHash(size_in_bytes); | |
288 | |
289 if ((FLAG_dump_allocations_digest_at_alloc > 0) && | |
290 (--dump_allocations_hash_countdown_ == 0)) { | |
291 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc; | |
292 PrintAlloctionsHash(); | |
293 } | |
294 } | |
295 } | |
296 | |
297 | |
298 void Heap::UpdateAllocationsHash(HeapObject* object) { | |
299 Address object_address = object->address(); | |
300 MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address); | |
301 AllocationSpace allocation_space = memory_chunk->owner()->identity(); | |
302 | |
303 STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32); | |
304 uint32_t value = | |
305 static_cast<uint32_t>(object_address - memory_chunk->address()) | | |
306 (static_cast<uint32_t>(allocation_space) << kPageSizeBits); | |
Hannes Payer (out of office)
2014/06/18 09:45:10
Why do you need the or (static_cast<uint32_t>(allo
Igor Sheludko
2014/06/18 12:38:49
I did that in order to involve both object offset
| |
307 | |
308 UpdateAllocationsHash(value); | |
309 } | |
310 | |
311 | |
312 void Heap::UpdateAllocationsHash(uint32_t value) { | |
313 uint16_t c1 = static_cast<uint16_t>(value); | |
314 uint16_t c2 = static_cast<uint16_t>(value >> 16); | |
315 raw_allocations_hash_ = | |
316 StringHasher::AddCharacterCore(raw_allocations_hash_, c1); | |
317 raw_allocations_hash_ = | |
318 StringHasher::AddCharacterCore(raw_allocations_hash_, c2); | |
319 } | |
320 | |
321 | |
322 void Heap::PrintAlloctionsHash() { | |
323 uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_); | |
324 PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash); | |
235 } | 325 } |
236 | 326 |
237 | 327 |
238 void Heap::FinalizeExternalString(String* string) { | 328 void Heap::FinalizeExternalString(String* string) { |
239 ASSERT(string->IsExternalString()); | 329 ASSERT(string->IsExternalString()); |
240 v8::String::ExternalStringResourceBase** resource_addr = | 330 v8::String::ExternalStringResourceBase** resource_addr = |
241 reinterpret_cast<v8::String::ExternalStringResourceBase**>( | 331 reinterpret_cast<v8::String::ExternalStringResourceBase**>( |
242 reinterpret_cast<byte*>(string) + | 332 reinterpret_cast<byte*>(string) + |
243 ExternalString::kResourceOffset - | 333 ExternalString::kResourceOffset - |
244 kHeapObjectTag); | 334 kHeapObjectTag); |
(...skipping 480 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
725 | 815 |
726 | 816 |
727 double GCTracer::SizeOfHeapObjects() { | 817 double GCTracer::SizeOfHeapObjects() { |
728 return (static_cast<double>(heap_->SizeOfObjects())) / MB; | 818 return (static_cast<double>(heap_->SizeOfObjects())) / MB; |
729 } | 819 } |
730 | 820 |
731 | 821 |
732 } } // namespace v8::internal | 822 } } // namespace v8::internal |
733 | 823 |
734 #endif // V8_HEAP_INL_H_ | 824 #endif // V8_HEAP_INL_H_ |
OLD | NEW |