Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(256)

Side by Side Diff: src/heap/heap.h

Issue 437993003: Move a bunch of GC related files to heap/ subdirectory (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: make presubmit happy Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap/gc-tracer.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_H_ 5 #ifndef V8_HEAP_HEAP_H_
6 #define V8_HEAP_H_ 6 #define V8_HEAP_HEAP_H_
7 7
8 #include <cmath> 8 #include <cmath>
9 9
10 #include "src/allocation.h" 10 #include "src/allocation.h"
11 #include "src/assert-scope.h" 11 #include "src/assert-scope.h"
12 #include "src/counters.h" 12 #include "src/counters.h"
13 #include "src/gc-tracer.h"
14 #include "src/globals.h" 13 #include "src/globals.h"
15 #include "src/incremental-marking.h" 14 #include "src/heap/gc-tracer.h"
15 #include "src/heap/incremental-marking.h"
16 #include "src/heap/mark-compact.h"
17 #include "src/heap/spaces.h"
16 #include "src/list.h" 18 #include "src/list.h"
17 #include "src/mark-compact.h"
18 #include "src/objects-visiting.h" 19 #include "src/objects-visiting.h"
19 #include "src/spaces.h"
20 #include "src/splay-tree-inl.h" 20 #include "src/splay-tree-inl.h"
21 #include "src/store-buffer.h" 21 #include "src/store-buffer.h"
22 22
23 namespace v8 { 23 namespace v8 {
24 namespace internal { 24 namespace internal {
25 25
26 // Defines all the roots in Heap. 26 // Defines all the roots in Heap.
27 #define STRONG_ROOT_LIST(V) \ 27 #define STRONG_ROOT_LIST(V) \
28 V(Map, byte_array_map, ByteArrayMap) \ 28 V(Map, byte_array_map, ByteArrayMap) \
29 V(Map, free_space_map, FreeSpaceMap) \ 29 V(Map, free_space_map, FreeSpaceMap) \
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
183 V(Symbol, uninitialized_symbol, UninitializedSymbol) \ 183 V(Symbol, uninitialized_symbol, UninitializedSymbol) \
184 V(Symbol, megamorphic_symbol, MegamorphicSymbol) \ 184 V(Symbol, megamorphic_symbol, MegamorphicSymbol) \
185 V(Symbol, stack_trace_symbol, StackTraceSymbol) \ 185 V(Symbol, stack_trace_symbol, StackTraceSymbol) \
186 V(Symbol, detailed_stack_trace_symbol, DetailedStackTraceSymbol) \ 186 V(Symbol, detailed_stack_trace_symbol, DetailedStackTraceSymbol) \
187 V(Symbol, normal_ic_symbol, NormalICSymbol) \ 187 V(Symbol, normal_ic_symbol, NormalICSymbol) \
188 V(FixedArray, materialized_objects, MaterializedObjects) \ 188 V(FixedArray, materialized_objects, MaterializedObjects) \
189 V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \ 189 V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
190 V(FixedArray, microtask_queue, MicrotaskQueue) 190 V(FixedArray, microtask_queue, MicrotaskQueue)
191 191
192 // Entries in this list are limited to Smis and are not visited during GC. 192 // Entries in this list are limited to Smis and are not visited during GC.
193 #define SMI_ROOT_LIST(V) \ 193 #define SMI_ROOT_LIST(V) \
194 V(Smi, stack_limit, StackLimit) \ 194 V(Smi, stack_limit, StackLimit) \
195 V(Smi, real_stack_limit, RealStackLimit) \ 195 V(Smi, real_stack_limit, RealStackLimit) \
196 V(Smi, last_script_id, LastScriptId) \ 196 V(Smi, last_script_id, LastScriptId) \
197 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \ 197 V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
198 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \ 198 V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
199 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \ 199 V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
200 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset) 200 V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
201 201
202 #define ROOT_LIST(V) \ 202 #define ROOT_LIST(V) \
203 STRONG_ROOT_LIST(V) \ 203 STRONG_ROOT_LIST(V) \
204 SMI_ROOT_LIST(V) \ 204 SMI_ROOT_LIST(V) \
205 V(StringTable, string_table, StringTable) 205 V(StringTable, string_table, StringTable)
206 206
207 // Heap roots that are known to be immortal immovable, for which we can safely 207 // Heap roots that are known to be immortal immovable, for which we can safely
208 // skip write barriers. 208 // skip write barriers.
209 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \ 209 #define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
210 V(byte_array_map) \ 210 V(byte_array_map) \
211 V(free_space_map) \ 211 V(free_space_map) \
212 V(one_pointer_filler_map) \ 212 V(one_pointer_filler_map) \
213 V(two_pointer_filler_map) \ 213 V(two_pointer_filler_map) \
214 V(undefined_value) \ 214 V(undefined_value) \
215 V(the_hole_value) \ 215 V(the_hole_value) \
216 V(null_value) \ 216 V(null_value) \
217 V(true_value) \ 217 V(true_value) \
218 V(false_value) \ 218 V(false_value) \
219 V(uninitialized_value) \ 219 V(uninitialized_value) \
220 V(cell_map) \ 220 V(cell_map) \
221 V(global_property_cell_map) \ 221 V(global_property_cell_map) \
222 V(shared_function_info_map) \ 222 V(shared_function_info_map) \
223 V(meta_map) \ 223 V(meta_map) \
224 V(heap_number_map) \ 224 V(heap_number_map) \
225 V(mutable_heap_number_map) \ 225 V(mutable_heap_number_map) \
226 V(native_context_map) \ 226 V(native_context_map) \
227 V(fixed_array_map) \ 227 V(fixed_array_map) \
228 V(code_map) \ 228 V(code_map) \
229 V(scope_info_map) \ 229 V(scope_info_map) \
230 V(fixed_cow_array_map) \ 230 V(fixed_cow_array_map) \
231 V(fixed_double_array_map) \ 231 V(fixed_double_array_map) \
232 V(constant_pool_array_map) \ 232 V(constant_pool_array_map) \
233 V(no_interceptor_result_sentinel) \ 233 V(no_interceptor_result_sentinel) \
234 V(hash_table_map) \ 234 V(hash_table_map) \
235 V(ordered_hash_table_map) \ 235 V(ordered_hash_table_map) \
236 V(empty_fixed_array) \ 236 V(empty_fixed_array) \
237 V(empty_byte_array) \ 237 V(empty_byte_array) \
238 V(empty_descriptor_array) \ 238 V(empty_descriptor_array) \
239 V(empty_constant_pool_array) \ 239 V(empty_constant_pool_array) \
240 V(arguments_marker) \ 240 V(arguments_marker) \
241 V(symbol_map) \ 241 V(symbol_map) \
242 V(sloppy_arguments_elements_map) \ 242 V(sloppy_arguments_elements_map) \
243 V(function_context_map) \ 243 V(function_context_map) \
244 V(catch_context_map) \ 244 V(catch_context_map) \
245 V(with_context_map) \ 245 V(with_context_map) \
246 V(block_context_map) \ 246 V(block_context_map) \
247 V(module_context_map) \ 247 V(module_context_map) \
248 V(global_context_map) \ 248 V(global_context_map) \
249 V(undefined_map) \ 249 V(undefined_map) \
250 V(the_hole_map) \ 250 V(the_hole_map) \
251 V(null_map) \ 251 V(null_map) \
252 V(boolean_map) \ 252 V(boolean_map) \
253 V(uninitialized_map) \ 253 V(uninitialized_map) \
254 V(message_object_map) \ 254 V(message_object_map) \
255 V(foreign_map) \ 255 V(foreign_map) \
256 V(neander_map) 256 V(neander_map)
257 257
258 #define INTERNALIZED_STRING_LIST(V) \ 258 #define INTERNALIZED_STRING_LIST(V) \
259 V(Array_string, "Array") \ 259 V(Array_string, "Array") \
260 V(Object_string, "Object") \ 260 V(Object_string, "Object") \
261 V(proto_string, "__proto__") \ 261 V(proto_string, "__proto__") \
262 V(arguments_string, "arguments") \ 262 V(arguments_string, "arguments") \
263 V(Arguments_string, "Arguments") \ 263 V(Arguments_string, "Arguments") \
264 V(call_string, "call") \ 264 V(call_string, "call") \
265 V(apply_string, "apply") \ 265 V(apply_string, "apply") \
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
350 class Isolate; 350 class Isolate;
351 class WeakObjectRetainer; 351 class WeakObjectRetainer;
352 352
353 353
354 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap, 354 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
355 Object** pointer); 355 Object** pointer);
356 356
357 class StoreBufferRebuilder { 357 class StoreBufferRebuilder {
358 public: 358 public:
359 explicit StoreBufferRebuilder(StoreBuffer* store_buffer) 359 explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
360 : store_buffer_(store_buffer) { 360 : store_buffer_(store_buffer) {}
361 }
362 361
363 void Callback(MemoryChunk* page, StoreBufferEvent event); 362 void Callback(MemoryChunk* page, StoreBufferEvent event);
364 363
365 private: 364 private:
366 StoreBuffer* store_buffer_; 365 StoreBuffer* store_buffer_;
367 366
368 // We record in this variable how full the store buffer was when we started 367 // We record in this variable how full the store buffer was when we started
369 // iterating over the current page, finding pointers to new space. If the 368 // iterating over the current page, finding pointers to new space. If the
370 // store buffer overflows again we can exempt the page from the store buffer 369 // store buffer overflows again we can exempt the page from the store buffer
371 // by rewinding to this point instead of having to search the store buffer. 370 // by rewinding to this point instead of having to search the store buffer.
372 Object*** start_of_current_page_; 371 Object*** start_of_current_page_;
373 // The current page we are scanning in the store buffer iterator. 372 // The current page we are scanning in the store buffer iterator.
374 MemoryChunk* current_page_; 373 MemoryChunk* current_page_;
375 }; 374 };
376 375
377 376
378
379 // A queue of objects promoted during scavenge. Each object is accompanied 377 // A queue of objects promoted during scavenge. Each object is accompanied
380 // by it's size to avoid dereferencing a map pointer for scanning. 378 // by it's size to avoid dereferencing a map pointer for scanning.
381 class PromotionQueue { 379 class PromotionQueue {
382 public: 380 public:
383 explicit PromotionQueue(Heap* heap) 381 explicit PromotionQueue(Heap* heap)
384 : front_(NULL), 382 : front_(NULL),
385 rear_(NULL), 383 rear_(NULL),
386 limit_(NULL), 384 limit_(NULL),
387 emergency_stack_(0), 385 emergency_stack_(0),
388 heap_(heap) { } 386 heap_(heap) {}
389 387
390 void Initialize(); 388 void Initialize();
391 389
392 void Destroy() { 390 void Destroy() {
393 DCHECK(is_empty()); 391 DCHECK(is_empty());
394 delete emergency_stack_; 392 delete emergency_stack_;
395 emergency_stack_ = NULL; 393 emergency_stack_ = NULL;
396 } 394 }
397 395
398 inline void ActivateGuardIfOnTheSamePage(); 396 inline void ActivateGuardIfOnTheSamePage();
(...skipping 24 matching lines...) Expand all
423 if (GetHeadPage() != Page::FromAddress(to_space_top)) { 421 if (GetHeadPage() != Page::FromAddress(to_space_top)) {
424 return true; 422 return true;
425 } 423 }
426 // If the to space top pointer is smaller or equal than the promotion 424 // If the to space top pointer is smaller or equal than the promotion
427 // queue head, then the to-space objects are below the promotion queue. 425 // queue head, then the to-space objects are below the promotion queue.
428 return reinterpret_cast<intptr_t*>(to_space_top) <= rear_; 426 return reinterpret_cast<intptr_t*>(to_space_top) <= rear_;
429 } 427 }
430 428
431 bool is_empty() { 429 bool is_empty() {
432 return (front_ == rear_) && 430 return (front_ == rear_) &&
433 (emergency_stack_ == NULL || emergency_stack_->length() == 0); 431 (emergency_stack_ == NULL || emergency_stack_->length() == 0);
434 } 432 }
435 433
436 inline void insert(HeapObject* target, int size); 434 inline void insert(HeapObject* target, int size);
437 435
438 void remove(HeapObject** target, int* size) { 436 void remove(HeapObject** target, int* size) {
439 DCHECK(!is_empty()); 437 DCHECK(!is_empty());
440 if (front_ == rear_) { 438 if (front_ == rear_) {
441 Entry e = emergency_stack_->RemoveLast(); 439 Entry e = emergency_stack_->RemoveLast();
442 *target = e.obj_; 440 *target = e.obj_;
443 *size = e.size_; 441 *size = e.size_;
444 return; 442 return;
445 } 443 }
446 444
447 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) { 445 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
448 NewSpacePage* front_page = 446 NewSpacePage* front_page =
449 NewSpacePage::FromAddress(reinterpret_cast<Address>(front_)); 447 NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
450 DCHECK(!front_page->prev_page()->is_anchor()); 448 DCHECK(!front_page->prev_page()->is_anchor());
451 front_ = 449 front_ = reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
452 reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
453 } 450 }
454 *target = reinterpret_cast<HeapObject*>(*(--front_)); 451 *target = reinterpret_cast<HeapObject*>(*(--front_));
455 *size = static_cast<int>(*(--front_)); 452 *size = static_cast<int>(*(--front_));
456 // Assert no underflow. 453 // Assert no underflow.
457 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_), 454 SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
458 reinterpret_cast<Address>(front_)); 455 reinterpret_cast<Address>(front_));
459 } 456 }
460 457
461 private: 458 private:
462 // The front of the queue is higher in the memory page chain than the rear. 459 // The front of the queue is higher in the memory page chain than the rear.
463 intptr_t* front_; 460 intptr_t* front_;
464 intptr_t* rear_; 461 intptr_t* rear_;
465 intptr_t* limit_; 462 intptr_t* limit_;
466 463
467 bool guard_; 464 bool guard_;
468 465
469 static const int kEntrySizeInWords = 2; 466 static const int kEntrySizeInWords = 2;
470 467
471 struct Entry { 468 struct Entry {
472 Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { } 469 Entry(HeapObject* obj, int size) : obj_(obj), size_(size) {}
473 470
474 HeapObject* obj_; 471 HeapObject* obj_;
475 int size_; 472 int size_;
476 }; 473 };
477 List<Entry>* emergency_stack_; 474 List<Entry>* emergency_stack_;
478 475
479 Heap* heap_; 476 Heap* heap_;
480 477
481 void RelocateQueueHead(); 478 void RelocateQueueHead();
482 479
483 DISALLOW_COPY_AND_ASSIGN(PromotionQueue); 480 DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
484 }; 481 };
485 482
486 483
487 typedef void (*ScavengingCallback)(Map* map, 484 typedef void (*ScavengingCallback)(Map* map, HeapObject** slot,
488 HeapObject** slot,
489 HeapObject* object); 485 HeapObject* object);
490 486
491 487
492 // External strings table is a place where all external strings are 488 // External strings table is a place where all external strings are
493 // registered. We need to keep track of such strings to properly 489 // registered. We need to keep track of such strings to properly
494 // finalize them. 490 // finalize them.
495 class ExternalStringTable { 491 class ExternalStringTable {
496 public: 492 public:
497 // Registers an external string. 493 // Registers an external string.
498 inline void AddString(String* string); 494 inline void AddString(String* string);
499 495
500 inline void Iterate(ObjectVisitor* v); 496 inline void Iterate(ObjectVisitor* v);
501 497
502 // Restores internal invariant and gets rid of collected strings. 498 // Restores internal invariant and gets rid of collected strings.
503 // Must be called after each Iterate() that modified the strings. 499 // Must be called after each Iterate() that modified the strings.
504 void CleanUp(); 500 void CleanUp();
505 501
506 // Destroys all allocated memory. 502 // Destroys all allocated memory.
507 void TearDown(); 503 void TearDown();
508 504
509 private: 505 private:
510 explicit ExternalStringTable(Heap* heap) : heap_(heap) { } 506 explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
511 507
512 friend class Heap; 508 friend class Heap;
513 509
514 inline void Verify(); 510 inline void Verify();
515 511
516 inline void AddOldString(String* string); 512 inline void AddOldString(String* string);
517 513
518 // Notifies the table that only a prefix of the new list is valid. 514 // Notifies the table that only a prefix of the new list is valid.
519 inline void ShrinkNewStrings(int position); 515 inline void ShrinkNewStrings(int position);
520 516
(...skipping 11 matching lines...) Expand all
532 enum ArrayStorageAllocationMode { 528 enum ArrayStorageAllocationMode {
533 DONT_INITIALIZE_ARRAY_ELEMENTS, 529 DONT_INITIALIZE_ARRAY_ELEMENTS,
534 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE 530 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
535 }; 531 };
536 532
537 533
538 class Heap { 534 class Heap {
539 public: 535 public:
540 // Configure heap size in MB before setup. Return false if the heap has been 536 // Configure heap size in MB before setup. Return false if the heap has been
541 // set up already. 537 // set up already.
542 bool ConfigureHeap(int max_semi_space_size, 538 bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
543 int max_old_space_size, 539 int max_executable_size, size_t code_range_size);
544 int max_executable_size,
545 size_t code_range_size);
546 bool ConfigureHeapDefault(); 540 bool ConfigureHeapDefault();
547 541
548 // Prepares the heap, setting up memory areas that are needed in the isolate 542 // Prepares the heap, setting up memory areas that are needed in the isolate
549 // without actually creating any objects. 543 // without actually creating any objects.
550 bool SetUp(); 544 bool SetUp();
551 545
552 // Bootstraps the object heap with the core set of objects required to run. 546 // Bootstraps the object heap with the core set of objects required to run.
553 // Returns whether it succeeded. 547 // Returns whether it succeeded.
554 bool CreateHeapObjects(); 548 bool CreateHeapObjects();
555 549
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
612 Address NewSpaceStart() { return new_space_.start(); } 606 Address NewSpaceStart() { return new_space_.start(); }
613 uintptr_t NewSpaceMask() { return new_space_.mask(); } 607 uintptr_t NewSpaceMask() { return new_space_.mask(); }
614 Address NewSpaceTop() { return new_space_.top(); } 608 Address NewSpaceTop() { return new_space_.top(); }
615 609
616 NewSpace* new_space() { return &new_space_; } 610 NewSpace* new_space() { return &new_space_; }
617 OldSpace* old_pointer_space() { return old_pointer_space_; } 611 OldSpace* old_pointer_space() { return old_pointer_space_; }
618 OldSpace* old_data_space() { return old_data_space_; } 612 OldSpace* old_data_space() { return old_data_space_; }
619 OldSpace* code_space() { return code_space_; } 613 OldSpace* code_space() { return code_space_; }
620 MapSpace* map_space() { return map_space_; } 614 MapSpace* map_space() { return map_space_; }
621 CellSpace* cell_space() { return cell_space_; } 615 CellSpace* cell_space() { return cell_space_; }
622 PropertyCellSpace* property_cell_space() { 616 PropertyCellSpace* property_cell_space() { return property_cell_space_; }
623 return property_cell_space_;
624 }
625 LargeObjectSpace* lo_space() { return lo_space_; } 617 LargeObjectSpace* lo_space() { return lo_space_; }
626 PagedSpace* paged_space(int idx) { 618 PagedSpace* paged_space(int idx) {
627 switch (idx) { 619 switch (idx) {
628 case OLD_POINTER_SPACE: 620 case OLD_POINTER_SPACE:
629 return old_pointer_space(); 621 return old_pointer_space();
630 case OLD_DATA_SPACE: 622 case OLD_DATA_SPACE:
631 return old_data_space(); 623 return old_data_space();
632 case MAP_SPACE: 624 case MAP_SPACE:
633 return map_space(); 625 return map_space();
634 case CELL_SPACE: 626 case CELL_SPACE:
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
666 Address* OldDataSpaceAllocationTopAddress() { 658 Address* OldDataSpaceAllocationTopAddress() {
667 return old_data_space_->allocation_top_address(); 659 return old_data_space_->allocation_top_address();
668 } 660 }
669 Address* OldDataSpaceAllocationLimitAddress() { 661 Address* OldDataSpaceAllocationLimitAddress() {
670 return old_data_space_->allocation_limit_address(); 662 return old_data_space_->allocation_limit_address();
671 } 663 }
672 664
673 // Returns a deep copy of the JavaScript object. 665 // Returns a deep copy of the JavaScript object.
674 // Properties and elements are copied too. 666 // Properties and elements are copied too.
675 // Optionally takes an AllocationSite to be appended in an AllocationMemento. 667 // Optionally takes an AllocationSite to be appended in an AllocationMemento.
676 MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source, 668 MUST_USE_RESULT AllocationResult
677 AllocationSite* site = NULL); 669 CopyJSObject(JSObject* source, AllocationSite* site = NULL);
678 670
679 // Clear the Instanceof cache (used when a prototype changes). 671 // Clear the Instanceof cache (used when a prototype changes).
680 inline void ClearInstanceofCache(); 672 inline void ClearInstanceofCache();
681 673
682 // Iterates the whole code space to clear all ICs of the given kind. 674 // Iterates the whole code space to clear all ICs of the given kind.
683 void ClearAllICsByKind(Code::Kind kind); 675 void ClearAllICsByKind(Code::Kind kind);
684 676
685 // For use during bootup. 677 // For use during bootup.
686 void RepairFreeListsAfterBoot(); 678 void RepairFreeListsAfterBoot();
687 679
688 template<typename T> 680 template <typename T>
689 static inline bool IsOneByte(T t, int chars); 681 static inline bool IsOneByte(T t, int chars);
690 682
691 // Move len elements within a given array from src_index index to dst_index 683 // Move len elements within a given array from src_index index to dst_index
692 // index. 684 // index.
693 void MoveElements(FixedArray* array, int dst_index, int src_index, int len); 685 void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
694 686
695 // Sloppy mode arguments object size. 687 // Sloppy mode arguments object size.
696 static const int kSloppyArgumentsObjectSize = 688 static const int kSloppyArgumentsObjectSize =
697 JSObject::kHeaderSize + 2 * kPointerSize; 689 JSObject::kHeaderSize + 2 * kPointerSize;
698 // Strict mode arguments has no callee so it is smaller. 690 // Strict mode arguments has no callee so it is smaller.
(...skipping 19 matching lines...) Expand all
718 // Maintain marking consistency for IncrementalMarking. 710 // Maintain marking consistency for IncrementalMarking.
719 void AdjustLiveBytes(Address address, int by, InvocationMode mode); 711 void AdjustLiveBytes(Address address, int by, InvocationMode mode);
720 712
721 // Converts the given boolean condition to JavaScript boolean value. 713 // Converts the given boolean condition to JavaScript boolean value.
722 inline Object* ToBoolean(bool condition); 714 inline Object* ToBoolean(bool condition);
723 715
724 // Performs garbage collection operation. 716 // Performs garbage collection operation.
725 // Returns whether there is a chance that another major GC could 717 // Returns whether there is a chance that another major GC could
726 // collect more garbage. 718 // collect more garbage.
727 inline bool CollectGarbage( 719 inline bool CollectGarbage(
728 AllocationSpace space, 720 AllocationSpace space, const char* gc_reason = NULL,
729 const char* gc_reason = NULL,
730 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); 721 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
731 722
732 static const int kNoGCFlags = 0; 723 static const int kNoGCFlags = 0;
733 static const int kSweepPreciselyMask = 1; 724 static const int kSweepPreciselyMask = 1;
734 static const int kReduceMemoryFootprintMask = 2; 725 static const int kReduceMemoryFootprintMask = 2;
735 static const int kAbortIncrementalMarkingMask = 4; 726 static const int kAbortIncrementalMarkingMask = 4;
736 727
737 // Making the heap iterable requires us to sweep precisely and abort any 728 // Making the heap iterable requires us to sweep precisely and abort any
738 // incremental marking as well. 729 // incremental marking as well.
739 static const int kMakeHeapIterableMask = 730 static const int kMakeHeapIterableMask =
740 kSweepPreciselyMask | kAbortIncrementalMarkingMask; 731 kSweepPreciselyMask | kAbortIncrementalMarkingMask;
741 732
742 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is 733 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
743 // non-zero, then the slower precise sweeper is used, which leaves the heap 734 // non-zero, then the slower precise sweeper is used, which leaves the heap
744 // in a state where we can iterate over the heap visiting all objects. 735 // in a state where we can iterate over the heap visiting all objects.
745 void CollectAllGarbage( 736 void CollectAllGarbage(
746 int flags, 737 int flags, const char* gc_reason = NULL,
747 const char* gc_reason = NULL,
748 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); 738 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
749 739
750 // Last hope GC, should try to squeeze as much as possible. 740 // Last hope GC, should try to squeeze as much as possible.
751 void CollectAllAvailableGarbage(const char* gc_reason = NULL); 741 void CollectAllAvailableGarbage(const char* gc_reason = NULL);
752 742
753 // Check whether the heap is currently iterable. 743 // Check whether the heap is currently iterable.
754 bool IsHeapIterable(); 744 bool IsHeapIterable();
755 745
756 // Notify the heap that a context has been disposed. 746 // Notify the heap that a context has been disposed.
757 int NotifyContextDisposed(); 747 int NotifyContextDisposed();
758 748
759 inline void increment_scan_on_scavenge_pages() { 749 inline void increment_scan_on_scavenge_pages() {
760 scan_on_scavenge_pages_++; 750 scan_on_scavenge_pages_++;
761 if (FLAG_gc_verbose) { 751 if (FLAG_gc_verbose) {
762 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_); 752 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
763 } 753 }
764 } 754 }
765 755
766 inline void decrement_scan_on_scavenge_pages() { 756 inline void decrement_scan_on_scavenge_pages() {
767 scan_on_scavenge_pages_--; 757 scan_on_scavenge_pages_--;
768 if (FLAG_gc_verbose) { 758 if (FLAG_gc_verbose) {
769 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_); 759 PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
770 } 760 }
771 } 761 }
772 762
773 PromotionQueue* promotion_queue() { return &promotion_queue_; } 763 PromotionQueue* promotion_queue() { return &promotion_queue_; }
774 764
775 void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback, 765 void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback,
776 GCType gc_type_filter, 766 GCType gc_type_filter, bool pass_isolate = true);
777 bool pass_isolate = true);
778 void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback); 767 void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback);
779 768
780 void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback, 769 void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback,
781 GCType gc_type_filter, 770 GCType gc_type_filter, bool pass_isolate = true);
782 bool pass_isolate = true);
783 void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback); 771 void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback);
784 772
785 // Heap root getters. We have versions with and without type::cast() here. 773 // Heap root getters. We have versions with and without type::cast() here.
786 // You can't use type::cast during GC because the assert fails. 774 // You can't use type::cast during GC because the assert fails.
787 // TODO(1490): Try removing the unchecked accessors, now that GC marking does 775 // TODO(1490): Try removing the unchecked accessors, now that GC marking does
788 // not corrupt the map. 776 // not corrupt the map.
789 #define ROOT_ACCESSOR(type, name, camel_name) \ 777 #define ROOT_ACCESSOR(type, name, camel_name) \
790 type* name() { \ 778 type* name() { return type::cast(roots_[k##camel_name##RootIndex]); } \
791 return type::cast(roots_[k##camel_name##RootIndex]); \ 779 type* raw_unchecked_##name() { \
792 } \ 780 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
793 type* raw_unchecked_##name() { \
794 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
795 } 781 }
796 ROOT_LIST(ROOT_ACCESSOR) 782 ROOT_LIST(ROOT_ACCESSOR)
797 #undef ROOT_ACCESSOR 783 #undef ROOT_ACCESSOR
798 784
799 // Utility type maps 785 // Utility type maps
800 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \ 786 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
801 Map* name##_map() { \ 787 Map* name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
802 return Map::cast(roots_[k##Name##MapRootIndex]); \
803 }
804 STRUCT_LIST(STRUCT_MAP_ACCESSOR) 788 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
805 #undef STRUCT_MAP_ACCESSOR 789 #undef STRUCT_MAP_ACCESSOR
806 790
807 #define STRING_ACCESSOR(name, str) String* name() { \ 791 #define STRING_ACCESSOR(name, str) \
808 return String::cast(roots_[k##name##RootIndex]); \ 792 String* name() { return String::cast(roots_[k##name##RootIndex]); }
809 }
810 INTERNALIZED_STRING_LIST(STRING_ACCESSOR) 793 INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
811 #undef STRING_ACCESSOR 794 #undef STRING_ACCESSOR
812 795
813 // The hidden_string is special because it is the empty string, but does 796 // The hidden_string is special because it is the empty string, but does
814 // not match the empty string. 797 // not match the empty string.
815 String* hidden_string() { return hidden_string_; } 798 String* hidden_string() { return hidden_string_; }
816 799
817 void set_native_contexts_list(Object* object) { 800 void set_native_contexts_list(Object* object) {
818 native_contexts_list_ = object; 801 native_contexts_list_ = object;
819 } 802 }
820 Object* native_contexts_list() const { return native_contexts_list_; } 803 Object* native_contexts_list() const { return native_contexts_list_; }
821 804
822 void set_array_buffers_list(Object* object) { 805 void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
823 array_buffers_list_ = object;
824 }
825 Object* array_buffers_list() const { return array_buffers_list_; } 806 Object* array_buffers_list() const { return array_buffers_list_; }
826 807
827 void set_allocation_sites_list(Object* object) { 808 void set_allocation_sites_list(Object* object) {
828 allocation_sites_list_ = object; 809 allocation_sites_list_ = object;
829 } 810 }
830 Object* allocation_sites_list() { return allocation_sites_list_; } 811 Object* allocation_sites_list() { return allocation_sites_list_; }
831 812
832 // Used in CreateAllocationSiteStub and the (de)serializer. 813 // Used in CreateAllocationSiteStub and the (de)serializer.
833 Object** allocation_sites_list_address() { return &allocation_sites_list_; } 814 Object** allocation_sites_list_address() { return &allocation_sites_list_; }
834 815
(...skipping 14 matching lines...) Expand all
849 // Iterates over all strong roots in the heap. 830 // Iterates over all strong roots in the heap.
850 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); 831 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
851 // Iterates over entries in the smi roots list. Only interesting to the 832 // Iterates over entries in the smi roots list. Only interesting to the
852 // serializer/deserializer, since GC does not care about smis. 833 // serializer/deserializer, since GC does not care about smis.
853 void IterateSmiRoots(ObjectVisitor* v); 834 void IterateSmiRoots(ObjectVisitor* v);
854 // Iterates over all the other roots in the heap. 835 // Iterates over all the other roots in the heap.
855 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); 836 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
856 837
857 // Iterate pointers to from semispace of new space found in memory interval 838 // Iterate pointers to from semispace of new space found in memory interval
858 // from start to end. 839 // from start to end.
859 void IterateAndMarkPointersToFromSpace(Address start, 840 void IterateAndMarkPointersToFromSpace(Address start, Address end,
860 Address end,
861 ObjectSlotCallback callback); 841 ObjectSlotCallback callback);
862 842
863 // Returns whether the object resides in new space. 843 // Returns whether the object resides in new space.
864 inline bool InNewSpace(Object* object); 844 inline bool InNewSpace(Object* object);
865 inline bool InNewSpace(Address address); 845 inline bool InNewSpace(Address address);
866 inline bool InNewSpacePage(Address address); 846 inline bool InNewSpacePage(Address address);
867 inline bool InFromSpace(Object* object); 847 inline bool InFromSpace(Object* object);
868 inline bool InToSpace(Object* object); 848 inline bool InToSpace(Object* object);
869 849
870 // Returns whether the object resides in old pointer space. 850 // Returns whether the object resides in old pointer space.
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
980 960
981 // Write barrier support for address[start : start + len[ = o. 961 // Write barrier support for address[start : start + len[ = o.
982 INLINE(void RecordWrites(Address address, int start, int len)); 962 INLINE(void RecordWrites(Address address, int start, int len));
983 963
984 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; 964 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
985 inline HeapState gc_state() { return gc_state_; } 965 inline HeapState gc_state() { return gc_state_; }
986 966
987 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; } 967 inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
988 968
989 #ifdef DEBUG 969 #ifdef DEBUG
990 void set_allocation_timeout(int timeout) { 970 void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
991 allocation_timeout_ = timeout;
992 }
993 971
994 void TracePathToObjectFrom(Object* target, Object* root); 972 void TracePathToObjectFrom(Object* target, Object* root);
995 void TracePathToObject(Object* target); 973 void TracePathToObject(Object* target);
996 void TracePathToGlobal(); 974 void TracePathToGlobal();
997 #endif 975 #endif
998 976
999 // Callback function passed to Heap::Iterate etc. Copies an object if 977 // Callback function passed to Heap::Iterate etc. Copies an object if
1000 // necessary, the object might be promoted to an old space. The caller must 978 // necessary, the object might be promoted to an old space. The caller must
1001 // ensure the precondition that the object is (a) a heap object and (b) in 979 // ensure the precondition that the object is (a) a heap object and (b) in
1002 // the heap's from space. 980 // the heap's from space.
1003 static inline void ScavengePointer(HeapObject** p); 981 static inline void ScavengePointer(HeapObject** p);
1004 static inline void ScavengeObject(HeapObject** p, HeapObject* object); 982 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1005 983
1006 enum ScratchpadSlotMode { 984 enum ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT, RECORD_SCRATCHPAD_SLOT };
1007 IGNORE_SCRATCHPAD_SLOT,
1008 RECORD_SCRATCHPAD_SLOT
1009 };
1010 985
1011 // If an object has an AllocationMemento trailing it, return it, otherwise 986 // If an object has an AllocationMemento trailing it, return it, otherwise
1012 // return NULL; 987 // return NULL;
1013 inline AllocationMemento* FindAllocationMemento(HeapObject* object); 988 inline AllocationMemento* FindAllocationMemento(HeapObject* object);
1014 989
1015 // An object may have an AllocationSite associated with it through a trailing 990 // An object may have an AllocationSite associated with it through a trailing
1016 // AllocationMemento. Its feedback should be updated when objects are found 991 // AllocationMemento. Its feedback should be updated when objects are found
1017 // in the heap. 992 // in the heap.
1018 static inline void UpdateAllocationSiteFeedback( 993 static inline void UpdateAllocationSiteFeedback(HeapObject* object,
1019 HeapObject* object, ScratchpadSlotMode mode); 994 ScratchpadSlotMode mode);
1020 995
1021 // Support for partial snapshots. After calling this we have a linear 996 // Support for partial snapshots. After calling this we have a linear
1022 // space to write objects in each space. 997 // space to write objects in each space.
1023 void ReserveSpace(int *sizes, Address* addresses); 998 void ReserveSpace(int* sizes, Address* addresses);
1024 999
1025 // 1000 //
1026 // Support for the API. 1001 // Support for the API.
1027 // 1002 //
1028 1003
1029 void CreateApiObjects(); 1004 void CreateApiObjects();
1030 1005
1031 inline intptr_t PromotedTotalSize() { 1006 inline intptr_t PromotedTotalSize() {
1032 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize(); 1007 int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
1033 if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt); 1008 if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt);
1034 if (total < 0) return 0; 1009 if (total < 0) return 0;
1035 return static_cast<intptr_t>(total); 1010 return static_cast<intptr_t>(total);
1036 } 1011 }
1037 1012
1038 inline intptr_t OldGenerationSpaceAvailable() { 1013 inline intptr_t OldGenerationSpaceAvailable() {
1039 return old_generation_allocation_limit_ - PromotedTotalSize(); 1014 return old_generation_allocation_limit_ - PromotedTotalSize();
1040 } 1015 }
1041 1016
1042 inline intptr_t OldGenerationCapacityAvailable() { 1017 inline intptr_t OldGenerationCapacityAvailable() {
1043 return max_old_generation_size_ - PromotedTotalSize(); 1018 return max_old_generation_size_ - PromotedTotalSize();
1044 } 1019 }
1045 1020
1046 static const intptr_t kMinimumOldGenerationAllocationLimit = 1021 static const intptr_t kMinimumOldGenerationAllocationLimit =
1047 8 * (Page::kPageSize > MB ? Page::kPageSize : MB); 1022 8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
1048 1023
1049 static const int kPointerMultiplier = i::kPointerSize / 4; 1024 static const int kPointerMultiplier = i::kPointerSize / 4;
1050 1025
1051 // The new space size has to be a power of 2. Sizes are in MB. 1026 // The new space size has to be a power of 2. Sizes are in MB.
1052 static const int kMaxSemiSpaceSizeLowMemoryDevice = 1027 static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
1053 1 * kPointerMultiplier; 1028 static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
1054 static const int kMaxSemiSpaceSizeMediumMemoryDevice = 1029 static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
1055 4 * kPointerMultiplier; 1030 static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;
1056 static const int kMaxSemiSpaceSizeHighMemoryDevice =
1057 8 * kPointerMultiplier;
1058 static const int kMaxSemiSpaceSizeHugeMemoryDevice =
1059 8 * kPointerMultiplier;
1060 1031
1061 // The old space size has to be a multiple of Page::kPageSize. 1032 // The old space size has to be a multiple of Page::kPageSize.
1062 // Sizes are in MB. 1033 // Sizes are in MB.
1063 static const int kMaxOldSpaceSizeLowMemoryDevice = 1034 static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
1064 128 * kPointerMultiplier;
1065 static const int kMaxOldSpaceSizeMediumMemoryDevice = 1035 static const int kMaxOldSpaceSizeMediumMemoryDevice =
1066 256 * kPointerMultiplier; 1036 256 * kPointerMultiplier;
1067 static const int kMaxOldSpaceSizeHighMemoryDevice = 1037 static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
1068 512 * kPointerMultiplier; 1038 static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier;
1069 static const int kMaxOldSpaceSizeHugeMemoryDevice =
1070 700 * kPointerMultiplier;
1071 1039
1072 // The executable size has to be a multiple of Page::kPageSize. 1040 // The executable size has to be a multiple of Page::kPageSize.
1073 // Sizes are in MB. 1041 // Sizes are in MB.
1074 static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier; 1042 static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
1075 static const int kMaxExecutableSizeMediumMemoryDevice = 1043 static const int kMaxExecutableSizeMediumMemoryDevice =
1076 192 * kPointerMultiplier; 1044 192 * kPointerMultiplier;
1077 static const int kMaxExecutableSizeHighMemoryDevice = 1045 static const int kMaxExecutableSizeHighMemoryDevice =
1078 256 * kPointerMultiplier; 1046 256 * kPointerMultiplier;
1079 static const int kMaxExecutableSizeHugeMemoryDevice = 1047 static const int kMaxExecutableSizeHugeMemoryDevice =
1080 256 * kPointerMultiplier; 1048 256 * kPointerMultiplier;
(...skipping 14 matching lines...) Expand all
1095 // Declare all the root indices. This defines the root list order. 1063 // Declare all the root indices. This defines the root list order.
1096 enum RootListIndex { 1064 enum RootListIndex {
1097 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, 1065 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1098 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) 1066 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1099 #undef ROOT_INDEX_DECLARATION 1067 #undef ROOT_INDEX_DECLARATION
1100 1068
1101 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex, 1069 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1102 INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION) 1070 INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
1103 #undef STRING_DECLARATION 1071 #undef STRING_DECLARATION
1104 1072
1105 // Utility type maps 1073 // Utility type maps
1106 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex, 1074 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1107 STRUCT_LIST(DECLARE_STRUCT_MAP) 1075 STRUCT_LIST(DECLARE_STRUCT_MAP)
1108 #undef DECLARE_STRUCT_MAP 1076 #undef DECLARE_STRUCT_MAP
1109
1110 kStringTableRootIndex, 1077 kStringTableRootIndex,
1111 1078
1112 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, 1079 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1113 SMI_ROOT_LIST(ROOT_INDEX_DECLARATION) 1080 SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
1114 #undef ROOT_INDEX_DECLARATION 1081 #undef ROOT_INDEX_DECLARATION
1115
1116 kRootListLength, 1082 kRootListLength,
1117 kStrongRootListLength = kStringTableRootIndex, 1083 kStrongRootListLength = kStringTableRootIndex,
1118 kSmiRootsStart = kStringTableRootIndex + 1 1084 kSmiRootsStart = kStringTableRootIndex + 1
1119 }; 1085 };
1120 1086
1121 STATIC_ASSERT(kUndefinedValueRootIndex == 1087 STATIC_ASSERT(kUndefinedValueRootIndex ==
1122 Internals::kUndefinedValueRootIndex); 1088 Internals::kUndefinedValueRootIndex);
1123 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex); 1089 STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
1124 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex); 1090 STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
1125 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex); 1091 STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
1126 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex); 1092 STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
1127 1093
1128 // Generated code can embed direct references to non-writable roots if 1094 // Generated code can embed direct references to non-writable roots if
1129 // they are in new space. 1095 // they are in new space.
1130 static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index); 1096 static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
1131 // Generated code can treat direct references to this root as constant. 1097 // Generated code can treat direct references to this root as constant.
1132 bool RootCanBeTreatedAsConstant(RootListIndex root_index); 1098 bool RootCanBeTreatedAsConstant(RootListIndex root_index);
1133 1099
1134 Map* MapForFixedTypedArray(ExternalArrayType array_type); 1100 Map* MapForFixedTypedArray(ExternalArrayType array_type);
1135 RootListIndex RootIndexForFixedTypedArray( 1101 RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
1136 ExternalArrayType array_type);
1137 1102
1138 Map* MapForExternalArrayType(ExternalArrayType array_type); 1103 Map* MapForExternalArrayType(ExternalArrayType array_type);
1139 RootListIndex RootIndexForExternalArrayType( 1104 RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type);
1140 ExternalArrayType array_type);
1141 1105
1142 RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind); 1106 RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind);
1143 RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind); 1107 RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
1144 ExternalArray* EmptyExternalArrayForMap(Map* map); 1108 ExternalArray* EmptyExternalArrayForMap(Map* map);
1145 FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map); 1109 FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
1146 1110
1147 void RecordStats(HeapStats* stats, bool take_snapshot = false); 1111 void RecordStats(HeapStats* stats, bool take_snapshot = false);
1148 1112
1149 // Copy block of memory from src to dst. Size of block should be aligned 1113 // Copy block of memory from src to dst. Size of block should be aligned
1150 // by pointer size. 1114 // by pointer size.
1151 static inline void CopyBlock(Address dst, Address src, int byte_size); 1115 static inline void CopyBlock(Address dst, Address src, int byte_size);
1152 1116
1153 // Optimized version of memmove for blocks with pointer size aligned sizes and 1117 // Optimized version of memmove for blocks with pointer size aligned sizes and
1154 // pointer size aligned addresses. 1118 // pointer size aligned addresses.
1155 static inline void MoveBlock(Address dst, Address src, int byte_size); 1119 static inline void MoveBlock(Address dst, Address src, int byte_size);
1156 1120
1157 // Check new space expansion criteria and expand semispaces if it was hit. 1121 // Check new space expansion criteria and expand semispaces if it was hit.
1158 void CheckNewSpaceExpansionCriteria(); 1122 void CheckNewSpaceExpansionCriteria();
1159 1123
1160 inline void IncrementPromotedObjectsSize(int object_size) { 1124 inline void IncrementPromotedObjectsSize(int object_size) {
1161 DCHECK(object_size > 0); 1125 DCHECK(object_size > 0);
1162 promoted_objects_size_ += object_size; 1126 promoted_objects_size_ += object_size;
1163 } 1127 }
1164 1128
1165 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) { 1129 inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
1166 DCHECK(object_size > 0); 1130 DCHECK(object_size > 0);
1167 semi_space_copied_object_size_ += object_size; 1131 semi_space_copied_object_size_ += object_size;
1168 } 1132 }
1169 1133
1170 inline void IncrementNodesDiedInNewSpace() { 1134 inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
1171 nodes_died_in_new_space_++;
1172 }
1173 1135
1174 inline void IncrementNodesCopiedInNewSpace() { 1136 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1175 nodes_copied_in_new_space_++;
1176 }
1177 1137
1178 inline void IncrementNodesPromoted() { 1138 inline void IncrementNodesPromoted() { nodes_promoted_++; }
1179 nodes_promoted_++;
1180 }
1181 1139
1182 inline void IncrementYoungSurvivorsCounter(int survived) { 1140 inline void IncrementYoungSurvivorsCounter(int survived) {
1183 DCHECK(survived >= 0); 1141 DCHECK(survived >= 0);
1184 survived_since_last_expansion_ += survived; 1142 survived_since_last_expansion_ += survived;
1185 } 1143 }
1186 1144
1187 inline bool NextGCIsLikelyToBeFull() { 1145 inline bool NextGCIsLikelyToBeFull() {
1188 if (FLAG_gc_global) return true; 1146 if (FLAG_gc_global) return true;
1189 1147
1190 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true; 1148 if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1243 // Returns maximum size of objects alive after GC. 1201 // Returns maximum size of objects alive after GC.
1244 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; } 1202 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1245 1203
1246 // Returns minimal interval between two subsequent collections. 1204 // Returns minimal interval between two subsequent collections.
1247 double get_min_in_mutator() { return min_in_mutator_; } 1205 double get_min_in_mutator() { return min_in_mutator_; }
1248 1206
1249 MarkCompactCollector* mark_compact_collector() { 1207 MarkCompactCollector* mark_compact_collector() {
1250 return &mark_compact_collector_; 1208 return &mark_compact_collector_;
1251 } 1209 }
1252 1210
1253 StoreBuffer* store_buffer() { 1211 StoreBuffer* store_buffer() { return &store_buffer_; }
1254 return &store_buffer_;
1255 }
1256 1212
1257 Marking* marking() { 1213 Marking* marking() { return &marking_; }
1258 return &marking_;
1259 }
1260 1214
1261 IncrementalMarking* incremental_marking() { 1215 IncrementalMarking* incremental_marking() { return &incremental_marking_; }
1262 return &incremental_marking_;
1263 }
1264 1216
1265 ExternalStringTable* external_string_table() { 1217 ExternalStringTable* external_string_table() {
1266 return &external_string_table_; 1218 return &external_string_table_;
1267 } 1219 }
1268 1220
1269 // Returns the current sweep generation. 1221 // Returns the current sweep generation.
1270 int sweep_generation() { 1222 int sweep_generation() { return sweep_generation_; }
1271 return sweep_generation_;
1272 }
1273 1223
1274 inline Isolate* isolate(); 1224 inline Isolate* isolate();
1275 1225
1276 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags); 1226 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
1277 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags); 1227 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
1278 1228
1279 inline bool OldGenerationAllocationLimitReached(); 1229 inline bool OldGenerationAllocationLimitReached();
1280 1230
1281 inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) { 1231 inline void DoScavengeObject(Map* map, HeapObject** slot, HeapObject* obj) {
1282 scavenging_visitors_table_.GetVisitor(map)(map, slot, obj); 1232 scavenging_visitors_table_.GetVisitor(map)(map, slot, obj);
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1318 void SetSetterStubDeoptPCOffset(int pc_offset) { 1268 void SetSetterStubDeoptPCOffset(int pc_offset) {
1319 DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0)); 1269 DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1320 set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset)); 1270 set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1321 } 1271 }
1322 1272
1323 // For post mortem debugging. 1273 // For post mortem debugging.
1324 void RememberUnmappedPage(Address page, bool compacted); 1274 void RememberUnmappedPage(Address page, bool compacted);
1325 1275
1326 // Global inline caching age: it is incremented on some GCs after context 1276 // Global inline caching age: it is incremented on some GCs after context
1327 // disposal. We use it to flush inline caches. 1277 // disposal. We use it to flush inline caches.
1328 int global_ic_age() { 1278 int global_ic_age() { return global_ic_age_; }
1329 return global_ic_age_;
1330 }
1331 1279
1332 void AgeInlineCaches() { 1280 void AgeInlineCaches() {
1333 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax; 1281 global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
1334 } 1282 }
1335 1283
1336 bool flush_monomorphic_ics() { return flush_monomorphic_ics_; } 1284 bool flush_monomorphic_ics() { return flush_monomorphic_ics_; }
1337 1285
1338 int64_t amount_of_external_allocated_memory() { 1286 int64_t amount_of_external_allocated_memory() {
1339 return amount_of_external_allocated_memory_; 1287 return amount_of_external_allocated_memory_;
1340 } 1288 }
1341 1289
1342 void DeoptMarkedAllocationSites(); 1290 void DeoptMarkedAllocationSites();
1343 1291
1344 bool MaximumSizeScavenge() { 1292 bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
1345 return maximum_size_scavenges_ > 0;
1346 }
1347 1293
1348 bool DeoptMaybeTenuredAllocationSites() { 1294 bool DeoptMaybeTenuredAllocationSites() {
1349 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0; 1295 return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
1350 } 1296 }
1351 1297
1352 // ObjectStats are kept in two arrays, counts and sizes. Related stats are 1298 // ObjectStats are kept in two arrays, counts and sizes. Related stats are
1353 // stored in a contiguous linear buffer. Stats groups are stored one after 1299 // stored in a contiguous linear buffer. Stats groups are stored one after
1354 // another. 1300 // another.
1355 enum { 1301 enum {
1356 FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1, 1302 FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1,
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1391 1337
1392 // We don't use a LockGuard here since we want to lock the heap 1338 // We don't use a LockGuard here since we want to lock the heap
1393 // only when FLAG_concurrent_recompilation is true. 1339 // only when FLAG_concurrent_recompilation is true.
1394 class RelocationLock { 1340 class RelocationLock {
1395 public: 1341 public:
1396 explicit RelocationLock(Heap* heap) : heap_(heap) { 1342 explicit RelocationLock(Heap* heap) : heap_(heap) {
1397 heap_->relocation_mutex_.Lock(); 1343 heap_->relocation_mutex_.Lock();
1398 } 1344 }
1399 1345
1400 1346
1401 ~RelocationLock() { 1347 ~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
1402 heap_->relocation_mutex_.Unlock();
1403 }
1404 1348
1405 private: 1349 private:
1406 Heap* heap_; 1350 Heap* heap_;
1407 }; 1351 };
1408 1352
1409 void AddWeakObjectToCodeDependency(Handle<Object> obj, 1353 void AddWeakObjectToCodeDependency(Handle<Object> obj,
1410 Handle<DependentCode> dep); 1354 Handle<DependentCode> dep);
1411 1355
1412 DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj); 1356 DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj);
1413 1357
1414 void InitializeWeakObjectToCodeTable() { 1358 void InitializeWeakObjectToCodeTable() {
1415 set_weak_object_to_code_table(undefined_value()); 1359 set_weak_object_to_code_table(undefined_value());
1416 } 1360 }
1417 1361
1418 void EnsureWeakObjectToCodeTable(); 1362 void EnsureWeakObjectToCodeTable();
1419 1363
1420 static void FatalProcessOutOfMemory(const char* location, 1364 static void FatalProcessOutOfMemory(const char* location,
1421 bool take_snapshot = false); 1365 bool take_snapshot = false);
1422 1366
1423 // This event is triggered after successful allocation of a new object made 1367 // This event is triggered after successful allocation of a new object made
1424 // by runtime. Allocations of target space for object evacuation do not 1368 // by runtime. Allocations of target space for object evacuation do not
1425 // trigger the event. In order to track ALL allocations one must turn off 1369 // trigger the event. In order to track ALL allocations one must turn off
1426 // FLAG_inline_new and FLAG_use_allocation_folding. 1370 // FLAG_inline_new and FLAG_use_allocation_folding.
1427 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes); 1371 inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
1428 1372
1429 // This event is triggered after object is moved to a new place. 1373 // This event is triggered after object is moved to a new place.
1430 inline void OnMoveEvent(HeapObject* target, 1374 inline void OnMoveEvent(HeapObject* target, HeapObject* source,
1431 HeapObject* source,
1432 int size_in_bytes); 1375 int size_in_bytes);
1433 1376
1434 protected: 1377 protected:
1435 // Methods made available to tests. 1378 // Methods made available to tests.
1436 1379
1437 // Allocates a JS Map in the heap. 1380 // Allocates a JS Map in the heap.
1438 MUST_USE_RESULT AllocationResult AllocateMap( 1381 MUST_USE_RESULT AllocationResult
1439 InstanceType instance_type, 1382 AllocateMap(InstanceType instance_type, int instance_size,
1440 int instance_size, 1383 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1441 ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
1442 1384
1443 // Allocates and initializes a new JavaScript object based on a 1385 // Allocates and initializes a new JavaScript object based on a
1444 // constructor. 1386 // constructor.
1445 // If allocation_site is non-null, then a memento is emitted after the object 1387 // If allocation_site is non-null, then a memento is emitted after the object
1446 // that points to the site. 1388 // that points to the site.
1447 MUST_USE_RESULT AllocationResult AllocateJSObject( 1389 MUST_USE_RESULT AllocationResult
1448 JSFunction* constructor, 1390 AllocateJSObject(JSFunction* constructor,
1449 PretenureFlag pretenure = NOT_TENURED, 1391 PretenureFlag pretenure = NOT_TENURED,
1450 AllocationSite* allocation_site = NULL); 1392 AllocationSite* allocation_site = NULL);
1451 1393
1452 // Allocates and initializes a new JavaScript object based on a map. 1394 // Allocates and initializes a new JavaScript object based on a map.
1453 // Passing an allocation site means that a memento will be created that 1395 // Passing an allocation site means that a memento will be created that
1454 // points to the site. 1396 // points to the site.
1455 MUST_USE_RESULT AllocationResult AllocateJSObjectFromMap( 1397 MUST_USE_RESULT AllocationResult
1456 Map* map, 1398 AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
1457 PretenureFlag pretenure = NOT_TENURED, 1399 bool alloc_props = true,
1458 bool alloc_props = true, 1400 AllocationSite* allocation_site = NULL);
1459 AllocationSite* allocation_site = NULL);
1460 1401
1461 // Allocated a HeapNumber from value. 1402 // Allocated a HeapNumber from value.
1462 MUST_USE_RESULT AllocationResult AllocateHeapNumber( 1403 MUST_USE_RESULT AllocationResult
1463 double value, 1404 AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
1464 MutableMode mode = IMMUTABLE, 1405 PretenureFlag pretenure = NOT_TENURED);
1465 PretenureFlag pretenure = NOT_TENURED);
1466 1406
1467 // Allocate a byte array of the specified length 1407 // Allocate a byte array of the specified length
1468 MUST_USE_RESULT AllocationResult AllocateByteArray( 1408 MUST_USE_RESULT AllocationResult
1469 int length, 1409 AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
1470 PretenureFlag pretenure = NOT_TENURED);
1471 1410
1472 // Copy the code and scope info part of the code object, but insert 1411 // Copy the code and scope info part of the code object, but insert
1473 // the provided data as the relocation information. 1412 // the provided data as the relocation information.
1474 MUST_USE_RESULT AllocationResult CopyCode(Code* code, 1413 MUST_USE_RESULT AllocationResult
1475 Vector<byte> reloc_info); 1414 CopyCode(Code* code, Vector<byte> reloc_info);
1476 1415
1477 MUST_USE_RESULT AllocationResult CopyCode(Code* code); 1416 MUST_USE_RESULT AllocationResult CopyCode(Code* code);
1478 1417
1479 // Allocates a fixed array initialized with undefined values 1418 // Allocates a fixed array initialized with undefined values
1480 MUST_USE_RESULT AllocationResult AllocateFixedArray( 1419 MUST_USE_RESULT AllocationResult
1481 int length, 1420 AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
1482 PretenureFlag pretenure = NOT_TENURED);
1483 1421
1484 private: 1422 private:
1485 Heap(); 1423 Heap();
1486 1424
1487 // The amount of external memory registered through the API kept alive 1425 // The amount of external memory registered through the API kept alive
1488 // by global handles 1426 // by global handles
1489 int64_t amount_of_external_allocated_memory_; 1427 int64_t amount_of_external_allocated_memory_;
1490 1428
1491 // Caches the amount of external memory registered at the last global gc. 1429 // Caches the amount of external memory registered at the last global gc.
1492 int64_t amount_of_external_allocated_memory_at_last_global_gc_; 1430 int64_t amount_of_external_allocated_memory_at_last_global_gc_;
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
1554 unsigned int gc_count_; 1492 unsigned int gc_count_;
1555 1493
1556 // For post mortem debugging. 1494 // For post mortem debugging.
1557 static const int kRememberedUnmappedPages = 128; 1495 static const int kRememberedUnmappedPages = 128;
1558 int remembered_unmapped_pages_index_; 1496 int remembered_unmapped_pages_index_;
1559 Address remembered_unmapped_pages_[kRememberedUnmappedPages]; 1497 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
1560 1498
1561 // Total length of the strings we failed to flatten since the last GC. 1499 // Total length of the strings we failed to flatten since the last GC.
1562 int unflattened_strings_length_; 1500 int unflattened_strings_length_;
1563 1501
1564 #define ROOT_ACCESSOR(type, name, camel_name) \ 1502 #define ROOT_ACCESSOR(type, name, camel_name) \
1565 inline void set_##name(type* value) { \ 1503 inline void set_##name(type* value) { \
1566 /* The deserializer makes use of the fact that these common roots are */ \ 1504 /* The deserializer makes use of the fact that these common roots are */ \
1567 /* never in new space and never on a page that is being compacted. */ \ 1505 /* never in new space and never on a page that is being compacted. */ \
1568 DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \ 1506 DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
1569 roots_[k##camel_name##RootIndex] = value; \ 1507 roots_[k##camel_name##RootIndex] = value; \
1570 } 1508 }
1571 ROOT_LIST(ROOT_ACCESSOR) 1509 ROOT_LIST(ROOT_ACCESSOR)
1572 #undef ROOT_ACCESSOR 1510 #undef ROOT_ACCESSOR
1573 1511
1574 #ifdef DEBUG 1512 #ifdef DEBUG
1575 // If the --gc-interval flag is set to a positive value, this 1513 // If the --gc-interval flag is set to a positive value, this
1576 // variable holds the value indicating the number of allocations 1514 // variable holds the value indicating the number of allocations
1577 // remain until the next failure and garbage collection. 1515 // remain until the next failure and garbage collection.
1578 int allocation_timeout_; 1516 int allocation_timeout_;
1579 #endif // DEBUG 1517 #endif // DEBUG
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
1632 static const StructTable struct_table[]; 1570 static const StructTable struct_table[];
1633 1571
1634 // The special hidden string which is an empty string, but does not match 1572 // The special hidden string which is an empty string, but does not match
1635 // any string when looked up in properties. 1573 // any string when looked up in properties.
1636 String* hidden_string_; 1574 String* hidden_string_;
1637 1575
1638 // GC callback function, called before and after mark-compact GC. 1576 // GC callback function, called before and after mark-compact GC.
1639 // Allocations in the callback function are disallowed. 1577 // Allocations in the callback function are disallowed.
1640 struct GCPrologueCallbackPair { 1578 struct GCPrologueCallbackPair {
1641 GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback, 1579 GCPrologueCallbackPair(v8::Isolate::GCPrologueCallback callback,
1642 GCType gc_type, 1580 GCType gc_type, bool pass_isolate)
1643 bool pass_isolate) 1581 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {}
1644 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
1645 }
1646 bool operator==(const GCPrologueCallbackPair& pair) const { 1582 bool operator==(const GCPrologueCallbackPair& pair) const {
1647 return pair.callback == callback; 1583 return pair.callback == callback;
1648 } 1584 }
1649 v8::Isolate::GCPrologueCallback callback; 1585 v8::Isolate::GCPrologueCallback callback;
1650 GCType gc_type; 1586 GCType gc_type;
1651 // TODO(dcarney): remove variable 1587 // TODO(dcarney): remove variable
1652 bool pass_isolate_; 1588 bool pass_isolate_;
1653 }; 1589 };
1654 List<GCPrologueCallbackPair> gc_prologue_callbacks_; 1590 List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1655 1591
1656 struct GCEpilogueCallbackPair { 1592 struct GCEpilogueCallbackPair {
1657 GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback, 1593 GCEpilogueCallbackPair(v8::Isolate::GCPrologueCallback callback,
1658 GCType gc_type, 1594 GCType gc_type, bool pass_isolate)
1659 bool pass_isolate) 1595 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {}
1660 : callback(callback), gc_type(gc_type), pass_isolate_(pass_isolate) {
1661 }
1662 bool operator==(const GCEpilogueCallbackPair& pair) const { 1596 bool operator==(const GCEpilogueCallbackPair& pair) const {
1663 return pair.callback == callback; 1597 return pair.callback == callback;
1664 } 1598 }
1665 v8::Isolate::GCPrologueCallback callback; 1599 v8::Isolate::GCPrologueCallback callback;
1666 GCType gc_type; 1600 GCType gc_type;
1667 // TODO(dcarney): remove variable 1601 // TODO(dcarney): remove variable
1668 bool pass_isolate_; 1602 bool pass_isolate_;
1669 }; 1603 };
1670 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_; 1604 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1671 1605
(...skipping 28 matching lines...) Expand all
1700 void EnsureFillerObjectAtTop(); 1634 void EnsureFillerObjectAtTop();
1701 1635
1702 // Ensure that we have swept all spaces in such a way that we can iterate 1636 // Ensure that we have swept all spaces in such a way that we can iterate
1703 // over all objects. May cause a GC. 1637 // over all objects. May cause a GC.
1704 void MakeHeapIterable(); 1638 void MakeHeapIterable();
1705 1639
1706 // Performs garbage collection operation. 1640 // Performs garbage collection operation.
1707 // Returns whether there is a chance that another major GC could 1641 // Returns whether there is a chance that another major GC could
1708 // collect more garbage. 1642 // collect more garbage.
1709 bool CollectGarbage( 1643 bool CollectGarbage(
1710 GarbageCollector collector, 1644 GarbageCollector collector, const char* gc_reason,
1711 const char* gc_reason,
1712 const char* collector_reason, 1645 const char* collector_reason,
1713 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); 1646 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1714 1647
1715 // Performs garbage collection 1648 // Performs garbage collection
1716 // Returns whether there is a chance another major GC could 1649 // Returns whether there is a chance another major GC could
1717 // collect more garbage. 1650 // collect more garbage.
1718 bool PerformGarbageCollection( 1651 bool PerformGarbageCollection(
1719 GarbageCollector collector, 1652 GarbageCollector collector,
1720 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); 1653 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
1721 1654
1722 inline void UpdateOldSpaceLimits(); 1655 inline void UpdateOldSpaceLimits();
1723 1656
1724 // Selects the proper allocation space depending on the given object 1657 // Selects the proper allocation space depending on the given object
1725 // size, pretenuring decision, and preferred old-space. 1658 // size, pretenuring decision, and preferred old-space.
1726 static AllocationSpace SelectSpace(int object_size, 1659 static AllocationSpace SelectSpace(int object_size,
1727 AllocationSpace preferred_old_space, 1660 AllocationSpace preferred_old_space,
1728 PretenureFlag pretenure) { 1661 PretenureFlag pretenure) {
1729 DCHECK(preferred_old_space == OLD_POINTER_SPACE || 1662 DCHECK(preferred_old_space == OLD_POINTER_SPACE ||
1730 preferred_old_space == OLD_DATA_SPACE); 1663 preferred_old_space == OLD_DATA_SPACE);
1731 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE; 1664 if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
1732 return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE; 1665 return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
1733 } 1666 }
1734 1667
1735 // Allocate an uninitialized object. The memory is non-executable if the 1668 // Allocate an uninitialized object. The memory is non-executable if the
1736 // hardware and OS allow. This is the single choke-point for allocations 1669 // hardware and OS allow. This is the single choke-point for allocations
1737 // performed by the runtime and should not be bypassed (to extend this to 1670 // performed by the runtime and should not be bypassed (to extend this to
1738 // inlined allocations, use the Heap::DisableInlineAllocation() support). 1671 // inlined allocations, use the Heap::DisableInlineAllocation() support).
1739 MUST_USE_RESULT inline AllocationResult AllocateRaw( 1672 MUST_USE_RESULT inline AllocationResult AllocateRaw(
1740 int size_in_bytes, 1673 int size_in_bytes, AllocationSpace space, AllocationSpace retry_space);
1741 AllocationSpace space,
1742 AllocationSpace retry_space);
1743 1674
1744 // Allocates a heap object based on the map. 1675 // Allocates a heap object based on the map.
1745 MUST_USE_RESULT AllocationResult Allocate( 1676 MUST_USE_RESULT AllocationResult
1746 Map* map, 1677 Allocate(Map* map, AllocationSpace space,
1747 AllocationSpace space, 1678 AllocationSite* allocation_site = NULL);
1748 AllocationSite* allocation_site = NULL);
1749 1679
1750 // Allocates a partial map for bootstrapping. 1680 // Allocates a partial map for bootstrapping.
1751 MUST_USE_RESULT AllocationResult AllocatePartialMap( 1681 MUST_USE_RESULT AllocationResult
1752 InstanceType instance_type, 1682 AllocatePartialMap(InstanceType instance_type, int instance_size);
1753 int instance_size);
1754 1683
1755 // Initializes a JSObject based on its map. 1684 // Initializes a JSObject based on its map.
1756 void InitializeJSObjectFromMap(JSObject* obj, 1685 void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
1757 FixedArray* properties,
1758 Map* map); 1686 Map* map);
1759 void InitializeAllocationMemento(AllocationMemento* memento, 1687 void InitializeAllocationMemento(AllocationMemento* memento,
1760 AllocationSite* allocation_site); 1688 AllocationSite* allocation_site);
1761 1689
1762 // Allocate a block of memory in the given space (filled with a filler). 1690 // Allocate a block of memory in the given space (filled with a filler).
1763 // Used as a fall-back for generated code when the space is full. 1691 // Used as a fall-back for generated code when the space is full.
1764 MUST_USE_RESULT AllocationResult AllocateFillerObject(int size, 1692 MUST_USE_RESULT AllocationResult
1765 bool double_align, 1693 AllocateFillerObject(int size, bool double_align, AllocationSpace space);
1766 AllocationSpace space);
1767 1694
1768 // Allocate an uninitialized fixed array. 1695 // Allocate an uninitialized fixed array.
1769 MUST_USE_RESULT AllocationResult AllocateRawFixedArray( 1696 MUST_USE_RESULT AllocationResult
1770 int length, PretenureFlag pretenure); 1697 AllocateRawFixedArray(int length, PretenureFlag pretenure);
1771 1698
1772 // Allocate an uninitialized fixed double array. 1699 // Allocate an uninitialized fixed double array.
1773 MUST_USE_RESULT AllocationResult AllocateRawFixedDoubleArray( 1700 MUST_USE_RESULT AllocationResult
1774 int length, PretenureFlag pretenure); 1701 AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
1775 1702
1776 // Allocate an initialized fixed array with the given filler value. 1703 // Allocate an initialized fixed array with the given filler value.
1777 MUST_USE_RESULT AllocationResult AllocateFixedArrayWithFiller( 1704 MUST_USE_RESULT AllocationResult
1778 int length, PretenureFlag pretenure, Object* filler); 1705 AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
1706 Object* filler);
1779 1707
1780 // Allocate and partially initializes a String. There are two String 1708 // Allocate and partially initializes a String. There are two String
1781 // encodings: ASCII and two byte. These functions allocate a string of the 1709 // encodings: ASCII and two byte. These functions allocate a string of the
1782 // given length and set its map and length fields. The characters of the 1710 // given length and set its map and length fields. The characters of the
1783 // string are uninitialized. 1711 // string are uninitialized.
1784 MUST_USE_RESULT AllocationResult AllocateRawOneByteString( 1712 MUST_USE_RESULT AllocationResult
1785 int length, PretenureFlag pretenure); 1713 AllocateRawOneByteString(int length, PretenureFlag pretenure);
1786 MUST_USE_RESULT AllocationResult AllocateRawTwoByteString( 1714 MUST_USE_RESULT AllocationResult
1787 int length, PretenureFlag pretenure); 1715 AllocateRawTwoByteString(int length, PretenureFlag pretenure);
1788 1716
1789 bool CreateInitialMaps(); 1717 bool CreateInitialMaps();
1790 void CreateInitialObjects(); 1718 void CreateInitialObjects();
1791 1719
1792 // Allocates an internalized string in old space based on the character 1720 // Allocates an internalized string in old space based on the character
1793 // stream. 1721 // stream.
1794 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8( 1722 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
1795 Vector<const char> str, 1723 Vector<const char> str, int chars, uint32_t hash_field);
1796 int chars,
1797 uint32_t hash_field);
1798 1724
1799 MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString( 1725 MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
1800 Vector<const uint8_t> str, 1726 Vector<const uint8_t> str, uint32_t hash_field);
1801 uint32_t hash_field);
1802 1727
1803 MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString( 1728 MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
1804 Vector<const uc16> str, 1729 Vector<const uc16> str, uint32_t hash_field);
1805 uint32_t hash_field);
1806 1730
1807 template<bool is_one_byte, typename T> 1731 template <bool is_one_byte, typename T>
1808 MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl( 1732 MUST_USE_RESULT AllocationResult
1809 T t, int chars, uint32_t hash_field); 1733 AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
1810 1734
1811 template<typename T> 1735 template <typename T>
1812 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl( 1736 MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
1813 T t, int chars, uint32_t hash_field); 1737 T t, int chars, uint32_t hash_field);
1814 1738
1815 // Allocates an uninitialized fixed array. It must be filled by the caller. 1739 // Allocates an uninitialized fixed array. It must be filled by the caller.
1816 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length); 1740 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
1817 1741
1818 // Make a copy of src and return it. Returns 1742 // Make a copy of src and return it. Returns
1819 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 1743 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1820 MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src); 1744 MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
1821 1745
1822 // Make a copy of src, set the map, and return the copy. Returns 1746 // Make a copy of src, set the map, and return the copy. Returns
1823 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 1747 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1824 MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray* src, 1748 MUST_USE_RESULT AllocationResult
1825 Map* map); 1749 CopyFixedArrayWithMap(FixedArray* src, Map* map);
1826 1750
1827 // Make a copy of src and return it. Returns 1751 // Make a copy of src and return it. Returns
1828 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 1752 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1829 MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray( 1753 MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
1830 FixedDoubleArray* src); 1754 FixedDoubleArray* src);
1831 1755
1832 // Make a copy of src and return it. Returns 1756 // Make a copy of src and return it. Returns
1833 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 1757 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
1834 MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray( 1758 MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray(
1835 ConstantPoolArray* src); 1759 ConstantPoolArray* src);
1836 1760
1837 1761
1838 // Computes a single character string where the character has code. 1762 // Computes a single character string where the character has code.
1839 // A cache is used for ASCII codes. 1763 // A cache is used for ASCII codes.
1840 MUST_USE_RESULT AllocationResult LookupSingleCharacterStringFromCode( 1764 MUST_USE_RESULT AllocationResult
1841 uint16_t code); 1765 LookupSingleCharacterStringFromCode(uint16_t code);
1842 1766
1843 // Allocate a symbol in old space. 1767 // Allocate a symbol in old space.
1844 MUST_USE_RESULT AllocationResult AllocateSymbol(); 1768 MUST_USE_RESULT AllocationResult AllocateSymbol();
1845 1769
1846 // Make a copy of src, set the map, and return the copy. 1770 // Make a copy of src, set the map, and return the copy.
1847 MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap( 1771 MUST_USE_RESULT AllocationResult
1848 ConstantPoolArray* src, Map* map); 1772 CopyConstantPoolArrayWithMap(ConstantPoolArray* src, Map* map);
1849 1773
1850 MUST_USE_RESULT AllocationResult AllocateConstantPoolArray( 1774 MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(
1851 const ConstantPoolArray::NumberOfEntries& small); 1775 const ConstantPoolArray::NumberOfEntries& small);
1852 1776
1853 MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray( 1777 MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(
1854 const ConstantPoolArray::NumberOfEntries& small, 1778 const ConstantPoolArray::NumberOfEntries& small,
1855 const ConstantPoolArray::NumberOfEntries& extended); 1779 const ConstantPoolArray::NumberOfEntries& extended);
1856 1780
1857 // Allocates an external array of the specified length and type. 1781 // Allocates an external array of the specified length and type.
1858 MUST_USE_RESULT AllocationResult AllocateExternalArray( 1782 MUST_USE_RESULT AllocationResult
1859 int length, 1783 AllocateExternalArray(int length, ExternalArrayType array_type,
1860 ExternalArrayType array_type, 1784 void* external_pointer, PretenureFlag pretenure);
1861 void* external_pointer,
1862 PretenureFlag pretenure);
1863 1785
1864 // Allocates a fixed typed array of the specified length and type. 1786 // Allocates a fixed typed array of the specified length and type.
1865 MUST_USE_RESULT AllocationResult AllocateFixedTypedArray( 1787 MUST_USE_RESULT AllocationResult
1866 int length, 1788 AllocateFixedTypedArray(int length, ExternalArrayType array_type,
1867 ExternalArrayType array_type, 1789 PretenureFlag pretenure);
1868 PretenureFlag pretenure);
1869 1790
1870 // Make a copy of src and return it. 1791 // Make a copy of src and return it.
1871 MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src); 1792 MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
1872 1793
1873 // Make a copy of src, set the map, and return the copy. 1794 // Make a copy of src, set the map, and return the copy.
1874 MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap( 1795 MUST_USE_RESULT AllocationResult
1875 FixedDoubleArray* src, Map* map); 1796 CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
1876 1797
1877 // Allocates a fixed double array with uninitialized values. Returns 1798 // Allocates a fixed double array with uninitialized values. Returns
1878 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray( 1799 MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
1879 int length, 1800 int length, PretenureFlag pretenure = NOT_TENURED);
1880 PretenureFlag pretenure = NOT_TENURED);
1881 1801
1882 // These five Create*EntryStub functions are here and forced to not be inlined 1802 // These five Create*EntryStub functions are here and forced to not be inlined
1883 // because of a gcc-4.4 bug that assigns wrong vtable entries. 1803 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1884 NO_INLINE(void CreateJSEntryStub()); 1804 NO_INLINE(void CreateJSEntryStub());
1885 NO_INLINE(void CreateJSConstructEntryStub()); 1805 NO_INLINE(void CreateJSConstructEntryStub());
1886 1806
1887 void CreateFixedStubs(); 1807 void CreateFixedStubs();
1888 1808
1889 // Allocate empty fixed array. 1809 // Allocate empty fixed array.
1890 MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray(); 1810 MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
1891 1811
1892 // Allocate empty external array of given type. 1812 // Allocate empty external array of given type.
1893 MUST_USE_RESULT AllocationResult AllocateEmptyExternalArray( 1813 MUST_USE_RESULT AllocationResult
1894 ExternalArrayType array_type); 1814 AllocateEmptyExternalArray(ExternalArrayType array_type);
1895 1815
1896 // Allocate empty fixed typed array of given type. 1816 // Allocate empty fixed typed array of given type.
1897 MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray( 1817 MUST_USE_RESULT AllocationResult
1898 ExternalArrayType array_type); 1818 AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
1899 1819
1900 // Allocate empty constant pool array. 1820 // Allocate empty constant pool array.
1901 MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray(); 1821 MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray();
1902 1822
1903 // Allocate a tenured simple cell. 1823 // Allocate a tenured simple cell.
1904 MUST_USE_RESULT AllocationResult AllocateCell(Object* value); 1824 MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
1905 1825
1906 // Allocate a tenured JS global property cell initialized with the hole. 1826 // Allocate a tenured JS global property cell initialized with the hole.
1907 MUST_USE_RESULT AllocationResult AllocatePropertyCell(); 1827 MUST_USE_RESULT AllocationResult AllocatePropertyCell();
1908 1828
1909 // Allocates a new utility object in the old generation. 1829 // Allocates a new utility object in the old generation.
1910 MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type); 1830 MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
1911 1831
1912 // Allocates a new foreign object. 1832 // Allocates a new foreign object.
1913 MUST_USE_RESULT AllocationResult AllocateForeign( 1833 MUST_USE_RESULT AllocationResult
1914 Address address, PretenureFlag pretenure = NOT_TENURED); 1834 AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
1915 1835
1916 MUST_USE_RESULT AllocationResult AllocateCode(int object_size, 1836 MUST_USE_RESULT AllocationResult
1917 bool immovable); 1837 AllocateCode(int object_size, bool immovable);
1918 1838
1919 MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key); 1839 MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
1920 1840
1921 MUST_USE_RESULT AllocationResult InternalizeString(String* str); 1841 MUST_USE_RESULT AllocationResult InternalizeString(String* str);
1922 1842
1923 // Performs a minor collection in new generation. 1843 // Performs a minor collection in new generation.
1924 void Scavenge(); 1844 void Scavenge();
1925 1845
1926 // Commits from space if it is uncommitted. 1846 // Commits from space if it is uncommitted.
1927 void EnsureFromSpaceIsCommitted(); 1847 void EnsureFromSpaceIsCommitted();
1928 1848
1929 // Uncommit unused semi space. 1849 // Uncommit unused semi space.
1930 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); } 1850 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
1931 1851
1932 // Fill in bogus values in from space 1852 // Fill in bogus values in from space
1933 void ZapFromSpace(); 1853 void ZapFromSpace();
1934 1854
1935 static String* UpdateNewSpaceReferenceInExternalStringTableEntry( 1855 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1936 Heap* heap, 1856 Heap* heap, Object** pointer);
1937 Object** pointer);
1938 1857
1939 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front); 1858 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1940 static void ScavengeStoreBufferCallback(Heap* heap, 1859 static void ScavengeStoreBufferCallback(Heap* heap, MemoryChunk* page,
1941 MemoryChunk* page,
1942 StoreBufferEvent event); 1860 StoreBufferEvent event);
1943 1861
1944 // Performs a major collection in the whole heap. 1862 // Performs a major collection in the whole heap.
1945 void MarkCompact(); 1863 void MarkCompact();
1946 1864
1947 // Code to be run before and after mark-compact. 1865 // Code to be run before and after mark-compact.
1948 void MarkCompactPrologue(); 1866 void MarkCompactPrologue();
1949 1867
1950 void ProcessNativeContexts(WeakObjectRetainer* retainer); 1868 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1951 void ProcessArrayBuffers(WeakObjectRetainer* retainer); 1869 void ProcessArrayBuffers(WeakObjectRetainer* retainer);
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
2007 int nodes_promoted_; 1925 int nodes_promoted_;
2008 1926
2009 // This is the pretenuring trigger for allocation sites that are in maybe 1927 // This is the pretenuring trigger for allocation sites that are in maybe
2010 // tenure state. When we switched to the maximum new space size we deoptimize 1928 // tenure state. When we switched to the maximum new space size we deoptimize
2011 // the code that belongs to the allocation site and derive the lifetime 1929 // the code that belongs to the allocation site and derive the lifetime
2012 // of the allocation site. 1930 // of the allocation site.
2013 unsigned int maximum_size_scavenges_; 1931 unsigned int maximum_size_scavenges_;
2014 1932
2015 // TODO(hpayer): Allocation site pretenuring may make this method obsolete. 1933 // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
2016 // Re-visit incremental marking heuristics. 1934 // Re-visit incremental marking heuristics.
2017 bool IsHighSurvivalRate() { 1935 bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
2018 return high_survival_rate_period_length_ > 0;
2019 }
2020 1936
2021 void SelectScavengingVisitorsTable(); 1937 void SelectScavengingVisitorsTable();
2022 1938
2023 void StartIdleRound() { 1939 void StartIdleRound() { mark_sweeps_since_idle_round_started_ = 0; }
2024 mark_sweeps_since_idle_round_started_ = 0;
2025 }
2026 1940
2027 void FinishIdleRound() { 1941 void FinishIdleRound() {
2028 mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound; 1942 mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
2029 scavenges_since_last_idle_round_ = 0; 1943 scavenges_since_last_idle_round_ = 0;
2030 } 1944 }
2031 1945
2032 bool EnoughGarbageSinceLastIdleRound() { 1946 bool EnoughGarbageSinceLastIdleRound() {
2033 return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold); 1947 return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
2034 } 1948 }
2035 1949
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
2158 2072
2159 DISALLOW_COPY_AND_ASSIGN(Heap); 2073 DISALLOW_COPY_AND_ASSIGN(Heap);
2160 }; 2074 };
2161 2075
2162 2076
2163 class HeapStats { 2077 class HeapStats {
2164 public: 2078 public:
2165 static const int kStartMarker = 0xDECADE00; 2079 static const int kStartMarker = 0xDECADE00;
2166 static const int kEndMarker = 0xDECADE01; 2080 static const int kEndMarker = 0xDECADE01;
2167 2081
2168 int* start_marker; // 0 2082 int* start_marker; // 0
2169 int* new_space_size; // 1 2083 int* new_space_size; // 1
2170 int* new_space_capacity; // 2 2084 int* new_space_capacity; // 2
2171 intptr_t* old_pointer_space_size; // 3 2085 intptr_t* old_pointer_space_size; // 3
2172 intptr_t* old_pointer_space_capacity; // 4 2086 intptr_t* old_pointer_space_capacity; // 4
2173 intptr_t* old_data_space_size; // 5 2087 intptr_t* old_data_space_size; // 5
2174 intptr_t* old_data_space_capacity; // 6 2088 intptr_t* old_data_space_capacity; // 6
2175 intptr_t* code_space_size; // 7 2089 intptr_t* code_space_size; // 7
2176 intptr_t* code_space_capacity; // 8 2090 intptr_t* code_space_capacity; // 8
2177 intptr_t* map_space_size; // 9 2091 intptr_t* map_space_size; // 9
2178 intptr_t* map_space_capacity; // 10 2092 intptr_t* map_space_capacity; // 10
2179 intptr_t* cell_space_size; // 11 2093 intptr_t* cell_space_size; // 11
2180 intptr_t* cell_space_capacity; // 12 2094 intptr_t* cell_space_capacity; // 12
2181 intptr_t* lo_space_size; // 13 2095 intptr_t* lo_space_size; // 13
2182 int* global_handle_count; // 14 2096 int* global_handle_count; // 14
2183 int* weak_global_handle_count; // 15 2097 int* weak_global_handle_count; // 15
2184 int* pending_global_handle_count; // 16 2098 int* pending_global_handle_count; // 16
2185 int* near_death_global_handle_count; // 17 2099 int* near_death_global_handle_count; // 17
2186 int* free_global_handle_count; // 18 2100 int* free_global_handle_count; // 18
2187 intptr_t* memory_allocator_size; // 19 2101 intptr_t* memory_allocator_size; // 19
2188 intptr_t* memory_allocator_capacity; // 20 2102 intptr_t* memory_allocator_capacity; // 20
2189 int* objects_per_type; // 21 2103 int* objects_per_type; // 21
2190 int* size_per_type; // 22 2104 int* size_per_type; // 22
2191 int* os_error; // 23 2105 int* os_error; // 23
2192 int* end_marker; // 24 2106 int* end_marker; // 24
2193 intptr_t* property_cell_space_size; // 25 2107 intptr_t* property_cell_space_size; // 25
2194 intptr_t* property_cell_space_capacity; // 26 2108 intptr_t* property_cell_space_capacity; // 26
2195 }; 2109 };
2196 2110
2197 2111
2198 class AlwaysAllocateScope { 2112 class AlwaysAllocateScope {
2199 public: 2113 public:
2200 explicit inline AlwaysAllocateScope(Isolate* isolate); 2114 explicit inline AlwaysAllocateScope(Isolate* isolate);
2201 inline ~AlwaysAllocateScope(); 2115 inline ~AlwaysAllocateScope();
2202 2116
2203 private: 2117 private:
2204 // Implicitly disable artificial allocation failures. 2118 // Implicitly disable artificial allocation failures.
(...skipping 21 matching lines...) Expand all
2226 private: 2140 private:
2227 Heap* heap_; 2141 Heap* heap_;
2228 }; 2142 };
2229 2143
2230 2144
2231 // Visitor class to verify interior pointers in spaces that do not contain 2145 // Visitor class to verify interior pointers in spaces that do not contain
2232 // or care about intergenerational references. All heap object pointers have to 2146 // or care about intergenerational references. All heap object pointers have to
2233 // point into the heap to a location that has a map pointer at its first word. 2147 // point into the heap to a location that has a map pointer at its first word.
2234 // Caveat: Heap::Contains is an approximation because it can return true for 2148 // Caveat: Heap::Contains is an approximation because it can return true for
2235 // objects in a heap space but above the allocation pointer. 2149 // objects in a heap space but above the allocation pointer.
2236 class VerifyPointersVisitor: public ObjectVisitor { 2150 class VerifyPointersVisitor : public ObjectVisitor {
2237 public: 2151 public:
2238 inline void VisitPointers(Object** start, Object** end); 2152 inline void VisitPointers(Object** start, Object** end);
2239 }; 2153 };
2240 2154
2241 2155
2242 // Verify that all objects are Smis. 2156 // Verify that all objects are Smis.
2243 class VerifySmisVisitor: public ObjectVisitor { 2157 class VerifySmisVisitor : public ObjectVisitor {
2244 public: 2158 public:
2245 inline void VisitPointers(Object** start, Object** end); 2159 inline void VisitPointers(Object** start, Object** end);
2246 }; 2160 };
2247 2161
2248 2162
2249 // Space iterator for iterating over all spaces of the heap. Returns each space 2163 // Space iterator for iterating over all spaces of the heap. Returns each space
2250 // in turn, and null when it is done. 2164 // in turn, and null when it is done.
2251 class AllSpaces BASE_EMBEDDED { 2165 class AllSpaces BASE_EMBEDDED {
2252 public: 2166 public:
2253 explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {} 2167 explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
2254 Space* next(); 2168 Space* next();
2169
2255 private: 2170 private:
2256 Heap* heap_; 2171 Heap* heap_;
2257 int counter_; 2172 int counter_;
2258 }; 2173 };
2259 2174
2260 2175
2261 // Space iterator for iterating over all old spaces of the heap: Old pointer 2176 // Space iterator for iterating over all old spaces of the heap: Old pointer
2262 // space, old data space and code space. Returns each space in turn, and null 2177 // space, old data space and code space. Returns each space in turn, and null
2263 // when it is done. 2178 // when it is done.
2264 class OldSpaces BASE_EMBEDDED { 2179 class OldSpaces BASE_EMBEDDED {
2265 public: 2180 public:
2266 explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {} 2181 explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2267 OldSpace* next(); 2182 OldSpace* next();
2183
2268 private: 2184 private:
2269 Heap* heap_; 2185 Heap* heap_;
2270 int counter_; 2186 int counter_;
2271 }; 2187 };
2272 2188
2273 2189
2274 // Space iterator for iterating over all the paged spaces of the heap: Map 2190 // Space iterator for iterating over all the paged spaces of the heap: Map
2275 // space, old pointer space, old data space, code space and cell space. Returns 2191 // space, old pointer space, old data space, code space and cell space. Returns
2276 // each space in turn, and null when it is done. 2192 // each space in turn, and null when it is done.
2277 class PagedSpaces BASE_EMBEDDED { 2193 class PagedSpaces BASE_EMBEDDED {
2278 public: 2194 public:
2279 explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {} 2195 explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
2280 PagedSpace* next(); 2196 PagedSpace* next();
2197
2281 private: 2198 private:
2282 Heap* heap_; 2199 Heap* heap_;
2283 int counter_; 2200 int counter_;
2284 }; 2201 };
2285 2202
2286 2203
2287 // Space iterator for iterating over all spaces of the heap. 2204 // Space iterator for iterating over all spaces of the heap.
2288 // For each space an object iterator is provided. The deallocation of the 2205 // For each space an object iterator is provided. The deallocation of the
2289 // returned object iterators is handled by the space iterator. 2206 // returned object iterators is handled by the space iterator.
2290 class SpaceIterator : public Malloced { 2207 class SpaceIterator : public Malloced {
2291 public: 2208 public:
2292 explicit SpaceIterator(Heap* heap); 2209 explicit SpaceIterator(Heap* heap);
2293 SpaceIterator(Heap* heap, HeapObjectCallback size_func); 2210 SpaceIterator(Heap* heap, HeapObjectCallback size_func);
2294 virtual ~SpaceIterator(); 2211 virtual ~SpaceIterator();
2295 2212
2296 bool has_next(); 2213 bool has_next();
2297 ObjectIterator* next(); 2214 ObjectIterator* next();
2298 2215
2299 private: 2216 private:
2300 ObjectIterator* CreateIterator(); 2217 ObjectIterator* CreateIterator();
2301 2218
2302 Heap* heap_; 2219 Heap* heap_;
2303 int current_space_; // from enum AllocationSpace. 2220 int current_space_; // from enum AllocationSpace.
2304 ObjectIterator* iterator_; // object iterator for the current space. 2221 ObjectIterator* iterator_; // object iterator for the current space.
2305 HeapObjectCallback size_func_; 2222 HeapObjectCallback size_func_;
2306 }; 2223 };
2307 2224
2308 2225
2309 // A HeapIterator provides iteration over the whole heap. It 2226 // A HeapIterator provides iteration over the whole heap. It
2310 // aggregates the specific iterators for the different spaces as 2227 // aggregates the specific iterators for the different spaces as
2311 // these can only iterate over one space only. 2228 // these can only iterate over one space only.
2312 // 2229 //
2313 // HeapIterator ensures there is no allocation during its lifetime 2230 // HeapIterator ensures there is no allocation during its lifetime
2314 // (using an embedded DisallowHeapAllocation instance). 2231 // (using an embedded DisallowHeapAllocation instance).
2315 // 2232 //
2316 // HeapIterator can skip free list nodes (that is, de-allocated heap 2233 // HeapIterator can skip free list nodes (that is, de-allocated heap
2317 // objects that still remain in the heap). As implementation of free 2234 // objects that still remain in the heap). As implementation of free
2318 // nodes filtering uses GC marks, it can't be used during MS/MC GC 2235 // nodes filtering uses GC marks, it can't be used during MS/MC GC
2319 // phases. Also, it is forbidden to interrupt iteration in this mode, 2236 // phases. Also, it is forbidden to interrupt iteration in this mode,
2320 // as this will leave heap objects marked (and thus, unusable). 2237 // as this will leave heap objects marked (and thus, unusable).
2321 class HeapObjectsFilter; 2238 class HeapObjectsFilter;
2322 2239
2323 class HeapIterator BASE_EMBEDDED { 2240 class HeapIterator BASE_EMBEDDED {
2324 public: 2241 public:
2325 enum HeapObjectsFiltering { 2242 enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2326 kNoFiltering,
2327 kFilterUnreachable
2328 };
2329 2243
2330 explicit HeapIterator(Heap* heap); 2244 explicit HeapIterator(Heap* heap);
2331 HeapIterator(Heap* heap, HeapObjectsFiltering filtering); 2245 HeapIterator(Heap* heap, HeapObjectsFiltering filtering);
2332 ~HeapIterator(); 2246 ~HeapIterator();
2333 2247
2334 HeapObject* next(); 2248 HeapObject* next();
2335 void reset(); 2249 void reset();
2336 2250
2337 private: 2251 private:
2338 struct MakeHeapIterableHelper { 2252 struct MakeHeapIterableHelper {
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
2390 keys_[i].map = NULL; 2304 keys_[i].map = NULL;
2391 keys_[i].name = NULL; 2305 keys_[i].name = NULL;
2392 field_offsets_[i] = kNotFound; 2306 field_offsets_[i] = kNotFound;
2393 } 2307 }
2394 } 2308 }
2395 2309
2396 static inline int Hash(Handle<Map> map, Handle<Name> name); 2310 static inline int Hash(Handle<Map> map, Handle<Name> name);
2397 2311
2398 // Get the address of the keys and field_offsets arrays. Used in 2312 // Get the address of the keys and field_offsets arrays. Used in
2399 // generated code to perform cache lookups. 2313 // generated code to perform cache lookups.
2400 Address keys_address() { 2314 Address keys_address() { return reinterpret_cast<Address>(&keys_); }
2401 return reinterpret_cast<Address>(&keys_);
2402 }
2403 2315
2404 Address field_offsets_address() { 2316 Address field_offsets_address() {
2405 return reinterpret_cast<Address>(&field_offsets_); 2317 return reinterpret_cast<Address>(&field_offsets_);
2406 } 2318 }
2407 2319
2408 struct Key { 2320 struct Key {
2409 Map* map; 2321 Map* map;
2410 Name* name; 2322 Name* name;
2411 }; 2323 };
2412 2324
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
2457 for (int i = 0; i < kLength; ++i) { 2369 for (int i = 0; i < kLength; ++i) {
2458 keys_[i].source = NULL; 2370 keys_[i].source = NULL;
2459 keys_[i].name = NULL; 2371 keys_[i].name = NULL;
2460 results_[i] = kAbsent; 2372 results_[i] = kAbsent;
2461 } 2373 }
2462 } 2374 }
2463 2375
2464 static int Hash(Object* source, Name* name) { 2376 static int Hash(Object* source, Name* name) {
2465 // Uses only lower 32 bits if pointers are larger. 2377 // Uses only lower 32 bits if pointers are larger.
2466 uint32_t source_hash = 2378 uint32_t source_hash =
2467 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) 2379 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
2468 >> kPointerSizeLog2; 2380 kPointerSizeLog2;
2469 uint32_t name_hash = 2381 uint32_t name_hash =
2470 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) 2382 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >>
2471 >> kPointerSizeLog2; 2383 kPointerSizeLog2;
2472 return (source_hash ^ name_hash) % kLength; 2384 return (source_hash ^ name_hash) % kLength;
2473 } 2385 }
2474 2386
2475 static const int kLength = 64; 2387 static const int kLength = 64;
2476 struct Key { 2388 struct Key {
2477 Map* source; 2389 Map* source;
2478 Name* name; 2390 Name* name;
2479 }; 2391 };
2480 2392
2481 Key keys_[kLength]; 2393 Key keys_[kLength];
2482 int results_[kLength]; 2394 int results_[kLength];
2483 2395
2484 friend class Isolate; 2396 friend class Isolate;
2485 DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache); 2397 DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
2486 }; 2398 };
2487 2399
2488 2400
2489 class RegExpResultsCache { 2401 class RegExpResultsCache {
2490 public: 2402 public:
2491 enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS }; 2403 enum ResultsCacheType { REGEXP_MULTIPLE_INDICES, STRING_SPLIT_SUBSTRINGS };
2492 2404
2493 // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi. 2405 // Attempt to retrieve a cached result. On failure, 0 is returned as a Smi.
2494 // On success, the returned result is guaranteed to be a COW-array. 2406 // On success, the returned result is guaranteed to be a COW-array.
2495 static Object* Lookup(Heap* heap, 2407 static Object* Lookup(Heap* heap, String* key_string, Object* key_pattern,
2496 String* key_string,
2497 Object* key_pattern,
2498 ResultsCacheType type); 2408 ResultsCacheType type);
2499 // Attempt to add value_array to the cache specified by type. On success, 2409 // Attempt to add value_array to the cache specified by type. On success,
2500 // value_array is turned into a COW-array. 2410 // value_array is turned into a COW-array.
2501 static void Enter(Isolate* isolate, 2411 static void Enter(Isolate* isolate, Handle<String> key_string,
2502 Handle<String> key_string, 2412 Handle<Object> key_pattern, Handle<FixedArray> value_array,
2503 Handle<Object> key_pattern,
2504 Handle<FixedArray> value_array,
2505 ResultsCacheType type); 2413 ResultsCacheType type);
2506 static void Clear(FixedArray* cache); 2414 static void Clear(FixedArray* cache);
2507 static const int kRegExpResultsCacheSize = 0x100; 2415 static const int kRegExpResultsCacheSize = 0x100;
2508 2416
2509 private: 2417 private:
2510 static const int kArrayEntriesPerCacheEntry = 4; 2418 static const int kArrayEntriesPerCacheEntry = 4;
2511 static const int kStringOffset = 0; 2419 static const int kStringOffset = 0;
2512 static const int kPatternOffset = 1; 2420 static const int kPatternOffset = 1;
2513 static const int kArrayOffset = 2; 2421 static const int kArrayOffset = 2;
2514 }; 2422 };
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
2576 FIND_ALL, // Will find all matches. 2484 FIND_ALL, // Will find all matches.
2577 FIND_FIRST // Will stop the search after first match. 2485 FIND_FIRST // Will stop the search after first match.
2578 }; 2486 };
2579 2487
2580 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject. 2488 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
2581 static const int kMarkTag = 2; 2489 static const int kMarkTag = 2;
2582 2490
2583 // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop 2491 // For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
2584 // after the first match. If FIND_ALL is specified, then tracing will be 2492 // after the first match. If FIND_ALL is specified, then tracing will be
2585 // done for all matches. 2493 // done for all matches.
2586 PathTracer(Object* search_target, 2494 PathTracer(Object* search_target, WhatToFind what_to_find,
2587 WhatToFind what_to_find,
2588 VisitMode visit_mode) 2495 VisitMode visit_mode)
2589 : search_target_(search_target), 2496 : search_target_(search_target),
2590 found_target_(false), 2497 found_target_(false),
2591 found_target_in_trace_(false), 2498 found_target_in_trace_(false),
2592 what_to_find_(what_to_find), 2499 what_to_find_(what_to_find),
2593 visit_mode_(visit_mode), 2500 visit_mode_(visit_mode),
2594 object_stack_(20), 2501 object_stack_(20),
2595 no_allocation() {} 2502 no_allocation() {}
2596 2503
2597 virtual void VisitPointers(Object** start, Object** end); 2504 virtual void VisitPointers(Object** start, Object** end);
(...skipping 19 matching lines...) Expand all
2617 WhatToFind what_to_find_; 2524 WhatToFind what_to_find_;
2618 VisitMode visit_mode_; 2525 VisitMode visit_mode_;
2619 List<Object*> object_stack_; 2526 List<Object*> object_stack_;
2620 2527
2621 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. 2528 DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
2622 2529
2623 private: 2530 private:
2624 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); 2531 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2625 }; 2532 };
2626 #endif // DEBUG 2533 #endif // DEBUG
2534 }
2535 } // namespace v8::internal
2627 2536
2628 } } // namespace v8::internal 2537 #endif // V8_HEAP_HEAP_H_
2629
2630 #endif // V8_HEAP_H_
OLDNEW
« no previous file with comments | « src/heap/gc-tracer.cc ('k') | src/heap/heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698