Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(369)

Side by Side Diff: src/heap.h

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/hashmap.h ('k') | src/heap.cc » ('j') | src/heap.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 13 matching lines...) Expand all
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #ifndef V8_HEAP_H_ 28 #ifndef V8_HEAP_H_
29 #define V8_HEAP_H_ 29 #define V8_HEAP_H_
30 30
31 #include <math.h> 31 #include <math.h>
32 32
33 #include "globals.h" 33 #include "globals.h"
34 #include "incremental-marking.h"
34 #include "list.h" 35 #include "list.h"
36 #include "mark-compact.h"
35 #include "spaces.h" 37 #include "spaces.h"
36 #include "splay-tree-inl.h" 38 #include "splay-tree-inl.h"
39 #include "store-buffer.h"
37 #include "v8-counters.h" 40 #include "v8-counters.h"
38 #include "v8globals.h" 41 #include "v8globals.h"
39 42
40 namespace v8 { 43 namespace v8 {
41 namespace internal { 44 namespace internal {
42 45
46 // TODO(isolates): remove HEAP here
47 #define HEAP (_inline_get_heap_())
48 class Heap;
49 inline Heap* _inline_get_heap_();
50
43 51
44 // Defines all the roots in Heap. 52 // Defines all the roots in Heap.
45 #define STRONG_ROOT_LIST(V) \ 53 #define STRONG_ROOT_LIST(V) \
46 V(Map, byte_array_map, ByteArrayMap) \ 54 V(Map, byte_array_map, ByteArrayMap) \
47 V(Map, free_space_map, FreeSpaceMap) \ 55 V(Map, free_space_map, FreeSpaceMap) \
48 V(Map, one_pointer_filler_map, OnePointerFillerMap) \ 56 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
49 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \ 57 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
50 /* Cluster the most popular ones in a few cache lines here at the top. */ \ 58 /* Cluster the most popular ones in a few cache lines here at the top. */ \
51 V(Smi, store_buffer_top, StoreBufferTop) \ 59 V(Smi, store_buffer_top, StoreBufferTop) \
52 V(Object, undefined_value, UndefinedValue) \ 60 V(Object, undefined_value, UndefinedValue) \
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after
219 "KeyedStoreExternalUnsignedShortArray") \ 227 "KeyedStoreExternalUnsignedShortArray") \
220 V(KeyedStoreExternalIntArray_symbol, "KeyedStoreExternalIntArray") \ 228 V(KeyedStoreExternalIntArray_symbol, "KeyedStoreExternalIntArray") \
221 V(KeyedStoreExternalUnsignedIntArray_symbol, \ 229 V(KeyedStoreExternalUnsignedIntArray_symbol, \
222 "KeyedStoreExternalUnsignedIntArray") \ 230 "KeyedStoreExternalUnsignedIntArray") \
223 V(KeyedStoreExternalFloatArray_symbol, "KeyedStoreExternalFloatArray") \ 231 V(KeyedStoreExternalFloatArray_symbol, "KeyedStoreExternalFloatArray") \
224 V(KeyedStoreExternalPixelArray_symbol, "KeyedStoreExternalPixelArray") 232 V(KeyedStoreExternalPixelArray_symbol, "KeyedStoreExternalPixelArray")
225 233
226 // Forward declarations. 234 // Forward declarations.
227 class GCTracer; 235 class GCTracer;
228 class HeapStats; 236 class HeapStats;
237 class Isolate;
229 class WeakObjectRetainer; 238 class WeakObjectRetainer;
230 239
231 240
232 typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer); 241 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
242 Object** pointer);
233 243
234 typedef void (*PointerRegionCallback)(Address start, 244 typedef void (*PointerRegionCallback)(Heap* heap,
245 Address start,
235 Address end, 246 Address end,
236 ObjectSlotCallback copy_object_func); 247 ObjectSlotCallback copy_object_func);
237 248
249 class StoreBufferRebuilder {
250 public:
251 explicit StoreBufferRebuilder(StoreBuffer* store_buffer)
252 : store_buffer_(store_buffer) {
253 }
238 254
239 class StoreBufferRebuilder : public AllStatic {
240 public:
241 void Callback(MemoryChunk* page, StoreBufferEvent event); 255 void Callback(MemoryChunk* page, StoreBufferEvent event);
242 256
243 private: 257 private:
258 StoreBuffer* store_buffer_;
259
244 // We record in this variable how full the store buffer was when we started 260 // We record in this variable how full the store buffer was when we started
245 // iterating over the current page, finding pointers to new space. If the 261 // iterating over the current page, finding pointers to new space. If the
246 // store buffer overflows again we can exempt the page from the store buffer 262 // store buffer overflows again we can exempt the page from the store buffer
247 // by rewinding to this point instead of having to search the store buffer. 263 // by rewinding to this point instead of having to search the store buffer.
248 Object*** start_of_current_page_; 264 Object*** start_of_current_page_;
249 // The current page we are scanning in the store buffer iterator. 265 // The current page we are scanning in the store buffer iterator.
250 MemoryChunk* current_page_; 266 MemoryChunk* current_page_;
251 }; 267 };
252 268
253 269
254 270
255 // The all static Heap captures the interface to the global object heap. 271 // The all static Heap captures the interface to the global object heap.
256 // All JavaScript contexts by this process share the same object heap. 272 // All JavaScript contexts by this process share the same object heap.
Erik Corry 2011/04/20 20:07:40 This comment is out of date, but thats a bug on bl
257 273
258 class Heap : public AllStatic { 274 #ifdef DEBUG
275 class HeapDebugUtils;
276 #endif
277
278
279 // A queue of objects promoted during scavenge. Each object is accompanied
280 // by it's size to avoid dereferencing a map pointer for scanning.
281 class PromotionQueue {
282 public:
283 PromotionQueue() : front_(NULL), rear_(NULL) { }
284
285 void Initialize(Address start_address) {
286 front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
287 }
288
289 bool is_empty() { return front_ <= rear_; }
290
291 inline void insert(HeapObject* target, int size);
292
293 void remove(HeapObject** target, int* size) {
294 *target = reinterpret_cast<HeapObject*>(*(--front_));
295 *size = static_cast<int>(*(--front_));
296 // Assert no underflow.
297 ASSERT(front_ >= rear_);
298 }
299
300 private:
301 // The front of the queue is higher in memory than the rear.
302 intptr_t* front_;
303 intptr_t* rear_;
304
305 DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
306 };
307
308
309 // External strings table is a place where all external strings are
310 // registered. We need to keep track of such strings to properly
311 // finalize them.
312 class ExternalStringTable {
313 public:
314 // Registers an external string.
315 inline void AddString(String* string);
316
317 inline void Iterate(ObjectVisitor* v);
318
319 // Restores internal invariant and gets rid of collected strings.
320 // Must be called after each Iterate() that modified the strings.
321 void CleanUp();
322
323 // Destroys all allocated memory.
324 void TearDown();
325
326 private:
327 ExternalStringTable() { }
328
329 friend class Heap;
330
331 inline void Verify();
332
333 inline void AddOldString(String* string);
334
335 // Notifies the table that only a prefix of the new list is valid.
336 inline void ShrinkNewStrings(int position);
337
338 // To speed up scavenge collections new space string are kept
339 // separate from old space strings.
340 List<Object*> new_space_strings_;
341 List<Object*> old_space_strings_;
342
343 Heap* heap_;
344
345 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
346 };
347
348
349 class Heap {
259 public: 350 public:
260 // Configure heap size before setup. Return false if the heap has been 351 // Configure heap size before setup. Return false if the heap has been
261 // setup already. 352 // setup already.
262 static bool ConfigureHeap(intptr_t max_semispace_size, 353 bool ConfigureHeap(intptr_t max_semispace_size,
263 intptr_t max_old_gen_size, 354 intptr_t max_old_gen_size,
264 intptr_t max_executable_size); 355 intptr_t max_executable_size);
265 static bool ConfigureHeapDefault(); 356 bool ConfigureHeapDefault();
266 357
267 // Initializes the global object heap. If create_heap_objects is true, 358 // Initializes the global object heap. If create_heap_objects is true,
268 // also creates the basic non-mutable objects. 359 // also creates the basic non-mutable objects.
269 // Returns whether it succeeded. 360 // Returns whether it succeeded.
270 static bool Setup(bool create_heap_objects); 361 bool Setup(bool create_heap_objects);
271 362
272 // Destroys all memory allocated by the heap. 363 // Destroys all memory allocated by the heap.
273 static void TearDown(); 364 void TearDown();
274 365
275 // Set the stack limit in the roots_ array. Some architectures generate 366 // Set the stack limit in the roots_ array. Some architectures generate
276 // code that looks here, because it is faster than loading from the static 367 // code that looks here, because it is faster than loading from the static
277 // jslimit_/real_jslimit_ variable in the StackGuard. 368 // jslimit_/real_jslimit_ variable in the StackGuard.
278 static void SetStackLimits(); 369 void SetStackLimits();
279 370
280 // Returns whether Setup has been called. 371 // Returns whether Setup has been called.
281 static bool HasBeenSetup(); 372 bool HasBeenSetup();
282 373
283 // Returns the maximum amount of memory reserved for the heap. For 374 // Returns the maximum amount of memory reserved for the heap. For
284 // the young generation, we reserve 4 times the amount needed for a 375 // the young generation, we reserve 4 times the amount needed for a
285 // semi space. The young generation consists of two semi spaces and 376 // semi space. The young generation consists of two semi spaces and
286 // we reserve twice the amount needed for those in order to ensure 377 // we reserve twice the amount needed for those in order to ensure
287 // that new space can be aligned to its size. 378 // that new space can be aligned to its size.
288 static intptr_t MaxReserved() { 379 intptr_t MaxReserved() {
289 return 4 * reserved_semispace_size_ + max_old_generation_size_; 380 return 4 * reserved_semispace_size_ + max_old_generation_size_;
290 } 381 }
291 static int MaxSemiSpaceSize() { return max_semispace_size_; } 382 int MaxSemiSpaceSize() { return max_semispace_size_; }
292 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; } 383 int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
293 static int InitialSemiSpaceSize() { return initial_semispace_size_; } 384 int InitialSemiSpaceSize() { return initial_semispace_size_; }
294 static intptr_t MaxOldGenerationSize() { return max_old_generation_size_; } 385 intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
295 static intptr_t MaxExecutableSize() { return max_executable_size_; } 386 intptr_t MaxExecutableSize() { return max_executable_size_; }
296 387
297 // Returns the capacity of the heap in bytes w/o growing. Heap grows when 388 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
298 // more spaces are needed until it reaches the limit. 389 // more spaces are needed until it reaches the limit.
299 static intptr_t Capacity(); 390 intptr_t Capacity();
300 391
301 // Returns the amount of memory currently committed for the heap. 392 // Returns the amount of memory currently committed for the heap.
302 static intptr_t CommittedMemory(); 393 intptr_t CommittedMemory();
303 394
304 // Returns the amount of executable memory currently committed for the heap. 395 // Returns the amount of executable memory currently committed for the heap.
305 static intptr_t CommittedMemoryExecutable(); 396 intptr_t CommittedMemoryExecutable();
306 397
307 // Returns the available bytes in space w/o growing. 398 // Returns the available bytes in space w/o growing.
308 // Heap doesn't guarantee that it can allocate an object that requires 399 // Heap doesn't guarantee that it can allocate an object that requires
309 // all available bytes. Check MaxHeapObjectSize() instead. 400 // all available bytes. Check MaxHeapObjectSize() instead.
310 static intptr_t Available(); 401 intptr_t Available();
311 402
312 // Returns the maximum object size in paged space. 403 // Returns the maximum object size in paged space.
313 static inline int MaxObjectSizeInPagedSpace(); 404 inline int MaxObjectSizeInPagedSpace();
314 405
315 // Returns of size of all objects residing in the heap. 406 // Returns of size of all objects residing in the heap.
316 static intptr_t SizeOfObjects(); 407 intptr_t SizeOfObjects();
317 408
318 // Return the starting address and a mask for the new space. And-masking an 409 // Return the starting address and a mask for the new space. And-masking an
319 // address with the mask will result in the start address of the new space 410 // address with the mask will result in the start address of the new space
320 // for all addresses in either semispace. 411 // for all addresses in either semispace.
321 static Address NewSpaceStart() { return new_space_.start(); } 412 Address NewSpaceStart() { return new_space_.start(); }
322 static uintptr_t NewSpaceMask() { return new_space_.mask(); } 413 uintptr_t NewSpaceMask() { return new_space_.mask(); }
323 static Address NewSpaceTop() { return new_space_.top(); } 414 Address NewSpaceTop() { return new_space_.top(); }
324 415
325 static NewSpace* new_space() { return &new_space_; } 416 NewSpace* new_space() { return &new_space_; }
326 static OldSpace* old_pointer_space() { return old_pointer_space_; } 417 OldSpace* old_pointer_space() { return old_pointer_space_; }
327 static OldSpace* old_data_space() { return old_data_space_; } 418 OldSpace* old_data_space() { return old_data_space_; }
328 static OldSpace* code_space() { return code_space_; } 419 OldSpace* code_space() { return code_space_; }
329 static MapSpace* map_space() { return map_space_; } 420 MapSpace* map_space() { return map_space_; }
330 static CellSpace* cell_space() { return cell_space_; } 421 CellSpace* cell_space() { return cell_space_; }
331 static LargeObjectSpace* lo_space() { return lo_space_; } 422 LargeObjectSpace* lo_space() { return lo_space_; }
332 423
333 static bool always_allocate() { return always_allocate_scope_depth_ != 0; } 424 bool always_allocate() { return always_allocate_scope_depth_ != 0; }
334 static Address always_allocate_scope_depth_address() { 425 Address always_allocate_scope_depth_address() {
335 return reinterpret_cast<Address>(&always_allocate_scope_depth_); 426 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
336 } 427 }
337 static bool linear_allocation() { 428 bool linear_allocation() {
338 return linear_allocation_scope_depth_ != 0; 429 return linear_allocation_scope_depth_ != 0;
339 } 430 }
340 431
341 static Address* NewSpaceAllocationTopAddress() { 432 Address* NewSpaceAllocationTopAddress() {
342 return new_space_.allocation_top_address(); 433 return new_space_.allocation_top_address();
343 } 434 }
344 static Address* NewSpaceAllocationLimitAddress() { 435 Address* NewSpaceAllocationLimitAddress() {
345 return new_space_.allocation_limit_address(); 436 return new_space_.allocation_limit_address();
346 } 437 }
347 438
348 // Uncommit unused semi space. 439 // Uncommit unused semi space.
349 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); } 440 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
350 441
351 #ifdef ENABLE_HEAP_PROTECTION 442 #ifdef ENABLE_HEAP_PROTECTION
352 // Protect/unprotect the heap by marking all spaces read-only/writable. 443 // Protect/unprotect the heap by marking all spaces read-only/writable.
353 static void Protect(); 444 void Protect();
354 static void Unprotect(); 445 void Unprotect();
355 #endif 446 #endif
356 447
357 // Allocates and initializes a new JavaScript object based on a 448 // Allocates and initializes a new JavaScript object based on a
358 // constructor. 449 // constructor.
359 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 450 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
360 // failed. 451 // failed.
361 // Please note this does not perform a garbage collection. 452 // Please note this does not perform a garbage collection.
362 MUST_USE_RESULT static MaybeObject* AllocateJSObject( 453 MUST_USE_RESULT MaybeObject* AllocateJSObject(
363 JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED); 454 JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
364 455
365 // Allocates and initializes a new global object based on a constructor. 456 // Allocates and initializes a new global object based on a constructor.
366 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 457 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
367 // failed. 458 // failed.
368 // Please note this does not perform a garbage collection. 459 // Please note this does not perform a garbage collection.
369 MUST_USE_RESULT static MaybeObject* AllocateGlobalObject( 460 MUST_USE_RESULT MaybeObject* AllocateGlobalObject(JSFunction* constructor);
370 JSFunction* constructor);
371 461
372 // Returns a deep copy of the JavaScript object. 462 // Returns a deep copy of the JavaScript object.
373 // Properties and elements are copied too. 463 // Properties and elements are copied too.
374 // Returns failure if allocation failed. 464 // Returns failure if allocation failed.
375 MUST_USE_RESULT static MaybeObject* CopyJSObject(JSObject* source); 465 MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
376 466
377 // Allocates the function prototype. 467 // Allocates the function prototype.
378 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 468 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
379 // failed. 469 // failed.
380 // Please note this does not perform a garbage collection. 470 // Please note this does not perform a garbage collection.
381 MUST_USE_RESULT static MaybeObject* AllocateFunctionPrototype( 471 MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
382 JSFunction* function);
383 472
384 // Reinitialize an JSGlobalProxy based on a constructor. The object 473 // Reinitialize an JSGlobalProxy based on a constructor. The object
385 // must have the same size as objects allocated using the 474 // must have the same size as objects allocated using the
386 // constructor. The object is reinitialized and behaves as an 475 // constructor. The object is reinitialized and behaves as an
387 // object that has been freshly allocated using the constructor. 476 // object that has been freshly allocated using the constructor.
388 MUST_USE_RESULT static MaybeObject* ReinitializeJSGlobalProxy( 477 MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
389 JSFunction* constructor, 478 JSFunction* constructor, JSGlobalProxy* global);
390 JSGlobalProxy* global);
391 479
392 // Allocates and initializes a new JavaScript object based on a map. 480 // Allocates and initializes a new JavaScript object based on a map.
393 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 481 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
394 // failed. 482 // failed.
395 // Please note this does not perform a garbage collection. 483 // Please note this does not perform a garbage collection.
396 MUST_USE_RESULT static MaybeObject* AllocateJSObjectFromMap( 484 MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
397 Map* map, PretenureFlag pretenure = NOT_TENURED); 485 Map* map, PretenureFlag pretenure = NOT_TENURED);
398 486
399 // Allocates a heap object based on the map. 487 // Allocates a heap object based on the map.
400 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 488 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
401 // failed. 489 // failed.
402 // Please note this function does not perform a garbage collection. 490 // Please note this function does not perform a garbage collection.
403 MUST_USE_RESULT static MaybeObject* Allocate(Map* map, AllocationSpace space); 491 MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
404 492
405 // Allocates a JS Map in the heap. 493 // Allocates a JS Map in the heap.
406 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 494 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
407 // failed. 495 // failed.
408 // Please note this function does not perform a garbage collection. 496 // Please note this function does not perform a garbage collection.
409 MUST_USE_RESULT static MaybeObject* AllocateMap(InstanceType instance_type, 497 MUST_USE_RESULT MaybeObject* AllocateMap(InstanceType instance_type,
410 int instance_size); 498 int instance_size);
411 499
412 // Allocates a partial map for bootstrapping. 500 // Allocates a partial map for bootstrapping.
413 MUST_USE_RESULT static MaybeObject* AllocatePartialMap( 501 MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
414 InstanceType instance_type, 502 int instance_size);
415 int instance_size);
416 503
417 // Allocate a map for the specified function 504 // Allocate a map for the specified function
418 MUST_USE_RESULT static MaybeObject* AllocateInitialMap(JSFunction* fun); 505 MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
419 506
420 // Allocates an empty code cache. 507 // Allocates an empty code cache.
421 MUST_USE_RESULT static MaybeObject* AllocateCodeCache(); 508 MUST_USE_RESULT MaybeObject* AllocateCodeCache();
422 509
423 // Clear the Instanceof cache (used when a prototype changes). 510 // Clear the Instanceof cache (used when a prototype changes).
424 static void ClearInstanceofCache() { 511 inline void ClearInstanceofCache();
425 set_instanceof_cache_function(the_hole_value());
426 }
427 512
428 // Allocates and fully initializes a String. There are two String 513 // Allocates and fully initializes a String. There are two String
429 // encodings: ASCII and two byte. One should choose between the three string 514 // encodings: ASCII and two byte. One should choose between the three string
430 // allocation functions based on the encoding of the string buffer used to 515 // allocation functions based on the encoding of the string buffer used to
431 // initialized the string. 516 // initialized the string.
432 // - ...FromAscii initializes the string from a buffer that is ASCII 517 // - ...FromAscii initializes the string from a buffer that is ASCII
433 // encoded (it does not check that the buffer is ASCII encoded) and the 518 // encoded (it does not check that the buffer is ASCII encoded) and the
434 // result will be ASCII encoded. 519 // result will be ASCII encoded.
435 // - ...FromUTF8 initializes the string from a buffer that is UTF-8 520 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
436 // encoded. If the characters are all single-byte characters, the 521 // encoded. If the characters are all single-byte characters, the
437 // result will be ASCII encoded, otherwise it will converted to two 522 // result will be ASCII encoded, otherwise it will converted to two
438 // byte. 523 // byte.
439 // - ...FromTwoByte initializes the string from a buffer that is two-byte 524 // - ...FromTwoByte initializes the string from a buffer that is two-byte
440 // encoded. If the characters are all single-byte characters, the 525 // encoded. If the characters are all single-byte characters, the
441 // result will be converted to ASCII, otherwise it will be left as 526 // result will be converted to ASCII, otherwise it will be left as
442 // two-byte. 527 // two-byte.
443 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 528 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
444 // failed. 529 // failed.
445 // Please note this does not perform a garbage collection. 530 // Please note this does not perform a garbage collection.
446 MUST_USE_RESULT static MaybeObject* AllocateStringFromAscii( 531 MUST_USE_RESULT MaybeObject* AllocateStringFromAscii(
447 Vector<const char> str, 532 Vector<const char> str,
448 PretenureFlag pretenure = NOT_TENURED); 533 PretenureFlag pretenure = NOT_TENURED);
449 MUST_USE_RESULT static inline MaybeObject* AllocateStringFromUtf8( 534 MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
450 Vector<const char> str, 535 Vector<const char> str,
451 PretenureFlag pretenure = NOT_TENURED); 536 PretenureFlag pretenure = NOT_TENURED);
452 MUST_USE_RESULT static MaybeObject* AllocateStringFromUtf8Slow( 537 MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
453 Vector<const char> str, 538 Vector<const char> str,
454 PretenureFlag pretenure = NOT_TENURED); 539 PretenureFlag pretenure = NOT_TENURED);
455 MUST_USE_RESULT static MaybeObject* AllocateStringFromTwoByte( 540 MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
456 Vector<const uc16> str, 541 Vector<const uc16> str,
457 PretenureFlag pretenure = NOT_TENURED); 542 PretenureFlag pretenure = NOT_TENURED);
458 543
459 // Allocates a symbol in old space based on the character stream. 544 // Allocates a symbol in old space based on the character stream.
460 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 545 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
461 // failed. 546 // failed.
462 // Please note this function does not perform a garbage collection. 547 // Please note this function does not perform a garbage collection.
463 MUST_USE_RESULT static inline MaybeObject* AllocateSymbol( 548 MUST_USE_RESULT inline MaybeObject* AllocateSymbol(Vector<const char> str,
464 Vector<const char> str, 549 int chars,
465 int chars, 550 uint32_t hash_field);
466 uint32_t hash_field);
467 551
468 MUST_USE_RESULT static inline MaybeObject* AllocateAsciiSymbol( 552 MUST_USE_RESULT inline MaybeObject* AllocateAsciiSymbol(
469 Vector<const char> str, 553 Vector<const char> str,
470 uint32_t hash_field); 554 uint32_t hash_field);
471 555
472 MUST_USE_RESULT static inline MaybeObject* AllocateTwoByteSymbol( 556 MUST_USE_RESULT inline MaybeObject* AllocateTwoByteSymbol(
473 Vector<const uc16> str, 557 Vector<const uc16> str,
474 uint32_t hash_field); 558 uint32_t hash_field);
475 559
476 MUST_USE_RESULT static MaybeObject* AllocateInternalSymbol( 560 MUST_USE_RESULT MaybeObject* AllocateInternalSymbol(
477 unibrow::CharacterStream* buffer, int chars, uint32_t hash_field); 561 unibrow::CharacterStream* buffer, int chars, uint32_t hash_field);
478 562
479 MUST_USE_RESULT static MaybeObject* AllocateExternalSymbol( 563 MUST_USE_RESULT MaybeObject* AllocateExternalSymbol(
480 Vector<const char> str, 564 Vector<const char> str,
481 int chars); 565 int chars);
482 566
483
484 // Allocates and partially initializes a String. There are two String 567 // Allocates and partially initializes a String. There are two String
485 // encodings: ASCII and two byte. These functions allocate a string of the 568 // encodings: ASCII and two byte. These functions allocate a string of the
486 // given length and set its map and length fields. The characters of the 569 // given length and set its map and length fields. The characters of the
487 // string are uninitialized. 570 // string are uninitialized.
488 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 571 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
489 // failed. 572 // failed.
490 // Please note this does not perform a garbage collection. 573 // Please note this does not perform a garbage collection.
491 MUST_USE_RESULT static MaybeObject* AllocateRawAsciiString( 574 MUST_USE_RESULT MaybeObject* AllocateRawAsciiString(
492 int length, 575 int length,
493 PretenureFlag pretenure = NOT_TENURED); 576 PretenureFlag pretenure = NOT_TENURED);
494 MUST_USE_RESULT static MaybeObject* AllocateRawTwoByteString( 577 MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString(
495 int length, 578 int length,
496 PretenureFlag pretenure = NOT_TENURED); 579 PretenureFlag pretenure = NOT_TENURED);
497 580
498 // Computes a single character string where the character has code. 581 // Computes a single character string where the character has code.
499 // A cache is used for ascii codes. 582 // A cache is used for ascii codes.
500 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 583 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
501 // failed. Please note this does not perform a garbage collection. 584 // failed. Please note this does not perform a garbage collection.
502 MUST_USE_RESULT static MaybeObject* LookupSingleCharacterStringFromCode( 585 MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
503 uint16_t code); 586 uint16_t code);
504 587
505 // Allocate a byte array of the specified length 588 // Allocate a byte array of the specified length
506 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 589 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
507 // failed. 590 // failed.
508 // Please note this does not perform a garbage collection. 591 // Please note this does not perform a garbage collection.
509 MUST_USE_RESULT static MaybeObject* AllocateByteArray( 592 MUST_USE_RESULT MaybeObject* AllocateByteArray(int length,
510 int length, 593 PretenureFlag pretenure);
511 PretenureFlag pretenure);
512 594
513 // Allocate a non-tenured byte array of the specified length 595 // Allocate a non-tenured byte array of the specified length
514 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 596 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
515 // failed. 597 // failed.
516 // Please note this does not perform a garbage collection. 598 // Please note this does not perform a garbage collection.
517 MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length); 599 MUST_USE_RESULT MaybeObject* AllocateByteArray(int length);
518 600
519 // Allocates an external array of the specified length and type. 601 // Allocates an external array of the specified length and type.
520 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 602 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
521 // failed. 603 // failed.
522 // Please note this does not perform a garbage collection. 604 // Please note this does not perform a garbage collection.
523 MUST_USE_RESULT static MaybeObject* AllocateExternalArray( 605 MUST_USE_RESULT MaybeObject* AllocateExternalArray(
524 int length, 606 int length,
525 ExternalArrayType array_type, 607 ExternalArrayType array_type,
526 void* external_pointer, 608 void* external_pointer,
527 PretenureFlag pretenure); 609 PretenureFlag pretenure);
528 610
529 // Allocate a tenured JS global property cell. 611 // Allocate a tenured JS global property cell.
530 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 612 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
531 // failed. 613 // failed.
532 // Please note this does not perform a garbage collection. 614 // Please note this does not perform a garbage collection.
533 MUST_USE_RESULT static MaybeObject* AllocateJSGlobalPropertyCell( 615 MUST_USE_RESULT MaybeObject* AllocateJSGlobalPropertyCell(Object* value);
534 Object* value);
535 616
536 // Allocates a fixed array initialized with undefined values 617 // Allocates a fixed array initialized with undefined values
537 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 618 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
538 // failed. 619 // failed.
539 // Please note this does not perform a garbage collection. 620 // Please note this does not perform a garbage collection.
540 MUST_USE_RESULT static MaybeObject* AllocateFixedArray( 621 MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length,
541 int length, 622 PretenureFlag pretenure);
542 PretenureFlag pretenure);
543 // Allocates a fixed array initialized with undefined values 623 // Allocates a fixed array initialized with undefined values
544 MUST_USE_RESULT static MaybeObject* AllocateFixedArray(int length); 624 MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length);
545 625
546 // Allocates an uninitialized fixed array. It must be filled by the caller. 626 // Allocates an uninitialized fixed array. It must be filled by the caller.
547 // 627 //
548 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 628 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
549 // failed. 629 // failed.
550 // Please note this does not perform a garbage collection. 630 // Please note this does not perform a garbage collection.
551 MUST_USE_RESULT static MaybeObject* AllocateUninitializedFixedArray( 631 MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
552 int length);
553 632
554 // Make a copy of src and return it. Returns 633 // Make a copy of src and return it. Returns
555 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 634 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
556 MUST_USE_RESULT static inline MaybeObject* CopyFixedArray(FixedArray* src); 635 MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
557 636
558 // Make a copy of src, set the map, and return the copy. Returns 637 // Make a copy of src, set the map, and return the copy. Returns
559 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 638 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
560 MUST_USE_RESULT static MaybeObject* CopyFixedArrayWithMap(FixedArray* src, 639 MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
561 Map* map);
562 640
563 // Allocates a fixed array initialized with the hole values. 641 // Allocates a fixed array initialized with the hole values.
564 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 642 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
565 // failed. 643 // failed.
566 // Please note this does not perform a garbage collection. 644 // Please note this does not perform a garbage collection.
567 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithHoles( 645 MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles(
568 int length, 646 int length,
569 PretenureFlag pretenure = NOT_TENURED); 647 PretenureFlag pretenure = NOT_TENURED);
570 648
571 // AllocateHashTable is identical to AllocateFixedArray except 649 // AllocateHashTable is identical to AllocateFixedArray except
572 // that the resulting object has hash_table_map as map. 650 // that the resulting object has hash_table_map as map.
573 MUST_USE_RESULT static MaybeObject* AllocateHashTable( 651 MUST_USE_RESULT MaybeObject* AllocateHashTable(
574 int length, PretenureFlag pretenure = NOT_TENURED); 652 int length, PretenureFlag pretenure = NOT_TENURED);
575 653
576 // Allocate a global (but otherwise uninitialized) context. 654 // Allocate a global (but otherwise uninitialized) context.
577 MUST_USE_RESULT static MaybeObject* AllocateGlobalContext(); 655 MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
578 656
579 // Allocate a function context. 657 // Allocate a function context.
580 MUST_USE_RESULT static MaybeObject* AllocateFunctionContext( 658 MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
581 int length, 659 JSFunction* closure);
582 JSFunction* closure);
583 660
584 // Allocate a 'with' context. 661 // Allocate a 'with' context.
585 MUST_USE_RESULT static MaybeObject* AllocateWithContext( 662 MUST_USE_RESULT MaybeObject* AllocateWithContext(Context* previous,
586 Context* previous, 663 JSObject* extension,
587 JSObject* extension, 664 bool is_catch_context);
588 bool is_catch_context);
589 665
590 // Allocates a new utility object in the old generation. 666 // Allocates a new utility object in the old generation.
591 MUST_USE_RESULT static MaybeObject* AllocateStruct(InstanceType type); 667 MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
592 668
593 // Allocates a function initialized with a shared part. 669 // Allocates a function initialized with a shared part.
594 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 670 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
595 // failed. 671 // failed.
596 // Please note this does not perform a garbage collection. 672 // Please note this does not perform a garbage collection.
597 MUST_USE_RESULT static MaybeObject* AllocateFunction( 673 MUST_USE_RESULT MaybeObject* AllocateFunction(
598 Map* function_map, 674 Map* function_map,
599 SharedFunctionInfo* shared, 675 SharedFunctionInfo* shared,
600 Object* prototype, 676 Object* prototype,
601 PretenureFlag pretenure = TENURED); 677 PretenureFlag pretenure = TENURED);
602 678
603 // Arguments object size. 679 // Arguments object size.
604 static const int kArgumentsObjectSize = 680 static const int kArgumentsObjectSize =
605 JSObject::kHeaderSize + 2 * kPointerSize; 681 JSObject::kHeaderSize + 2 * kPointerSize;
606 // Strict mode arguments has no callee so it is smaller. 682 // Strict mode arguments has no callee so it is smaller.
607 static const int kArgumentsObjectSizeStrict = 683 static const int kArgumentsObjectSizeStrict =
608 JSObject::kHeaderSize + 1 * kPointerSize; 684 JSObject::kHeaderSize + 1 * kPointerSize;
609 // Indicies for direct access into argument objects. 685 // Indicies for direct access into argument objects.
610 static const int kArgumentsLengthIndex = 0; 686 static const int kArgumentsLengthIndex = 0;
611 // callee is only valid in non-strict mode. 687 // callee is only valid in non-strict mode.
612 static const int kArgumentsCalleeIndex = 1; 688 static const int kArgumentsCalleeIndex = 1;
613 689
614 // Allocates an arguments object - optionally with an elements array. 690 // Allocates an arguments object - optionally with an elements array.
615 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 691 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
616 // failed. 692 // failed.
617 // Please note this does not perform a garbage collection. 693 // Please note this does not perform a garbage collection.
618 MUST_USE_RESULT static MaybeObject* AllocateArgumentsObject(Object* callee, 694 MUST_USE_RESULT MaybeObject* AllocateArgumentsObject(
619 int length); 695 Object* callee, int length);
620 696
621 // Same as NewNumberFromDouble, but may return a preallocated/immutable 697 // Same as NewNumberFromDouble, but may return a preallocated/immutable
622 // number object (e.g., minus_zero_value_, nan_value_) 698 // number object (e.g., minus_zero_value_, nan_value_)
623 MUST_USE_RESULT static MaybeObject* NumberFromDouble( 699 MUST_USE_RESULT MaybeObject* NumberFromDouble(
624 double value, PretenureFlag pretenure = NOT_TENURED); 700 double value, PretenureFlag pretenure = NOT_TENURED);
625 701
626 // Allocated a HeapNumber from value. 702 // Allocated a HeapNumber from value.
627 MUST_USE_RESULT static MaybeObject* AllocateHeapNumber( 703 MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
628 double value, 704 double value,
629 PretenureFlag pretenure); 705 PretenureFlag pretenure);
630 // pretenure = NOT_TENURED. 706 // pretenure = NOT_TENURED
631 MUST_USE_RESULT static MaybeObject* AllocateHeapNumber(double value); 707 MUST_USE_RESULT MaybeObject* AllocateHeapNumber(double value);
632 708
633 // Converts an int into either a Smi or a HeapNumber object. 709 // Converts an int into either a Smi or a HeapNumber object.
634 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 710 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
635 // failed. 711 // failed.
636 // Please note this does not perform a garbage collection. 712 // Please note this does not perform a garbage collection.
637 MUST_USE_RESULT static inline MaybeObject* NumberFromInt32(int32_t value); 713 MUST_USE_RESULT inline MaybeObject* NumberFromInt32(int32_t value);
638 714
639 // Converts an int into either a Smi or a HeapNumber object. 715 // Converts an int into either a Smi or a HeapNumber object.
640 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 716 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
641 // failed. 717 // failed.
642 // Please note this does not perform a garbage collection. 718 // Please note this does not perform a garbage collection.
643 MUST_USE_RESULT static inline MaybeObject* NumberFromUint32(uint32_t value); 719 MUST_USE_RESULT inline MaybeObject* NumberFromUint32(uint32_t value);
644 720
645 // Allocates a new proxy object. 721 // Allocates a new proxy object.
646 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 722 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
647 // failed. 723 // failed.
648 // Please note this does not perform a garbage collection. 724 // Please note this does not perform a garbage collection.
649 MUST_USE_RESULT static MaybeObject* AllocateProxy( 725 MUST_USE_RESULT MaybeObject* AllocateProxy(
650 Address proxy, 726 Address proxy, PretenureFlag pretenure = NOT_TENURED);
651 PretenureFlag pretenure = NOT_TENURED);
652 727
653 // Allocates a new SharedFunctionInfo object. 728 // Allocates a new SharedFunctionInfo object.
654 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 729 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
655 // failed. 730 // failed.
656 // Please note this does not perform a garbage collection. 731 // Please note this does not perform a garbage collection.
657 MUST_USE_RESULT static MaybeObject* AllocateSharedFunctionInfo(Object* name); 732 MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name);
658 733
659 // Allocates a new JSMessageObject object. 734 // Allocates a new JSMessageObject object.
660 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 735 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
661 // failed. 736 // failed.
662 // Please note that this does not perform a garbage collection. 737 // Please note that this does not perform a garbage collection.
663 MUST_USE_RESULT static MaybeObject* AllocateJSMessageObject( 738 MUST_USE_RESULT MaybeObject* AllocateJSMessageObject(
664 String* type, 739 String* type,
665 JSArray* arguments, 740 JSArray* arguments,
666 int start_position, 741 int start_position,
667 int end_position, 742 int end_position,
668 Object* script, 743 Object* script,
669 Object* stack_trace, 744 Object* stack_trace,
670 Object* stack_frames); 745 Object* stack_frames);
671 746
672 // Allocates a new cons string object. 747 // Allocates a new cons string object.
673 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 748 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
674 // failed. 749 // failed.
675 // Please note this does not perform a garbage collection. 750 // Please note this does not perform a garbage collection.
676 MUST_USE_RESULT static MaybeObject* AllocateConsString(String* first, 751 MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
677 String* second); 752 String* second);
678 753
679 // Allocates a new sub string object which is a substring of an underlying 754 // Allocates a new sub string object which is a substring of an underlying
680 // string buffer stretching from the index start (inclusive) to the index 755 // string buffer stretching from the index start (inclusive) to the index
681 // end (exclusive). 756 // end (exclusive).
682 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 757 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
683 // failed. 758 // failed.
684 // Please note this does not perform a garbage collection. 759 // Please note this does not perform a garbage collection.
685 MUST_USE_RESULT static MaybeObject* AllocateSubString( 760 MUST_USE_RESULT MaybeObject* AllocateSubString(
686 String* buffer, 761 String* buffer,
687 int start, 762 int start,
688 int end, 763 int end,
689 PretenureFlag pretenure = NOT_TENURED); 764 PretenureFlag pretenure = NOT_TENURED);
690 765
691 // Allocate a new external string object, which is backed by a string 766 // Allocate a new external string object, which is backed by a string
692 // resource that resides outside the V8 heap. 767 // resource that resides outside the V8 heap.
693 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 768 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
694 // failed. 769 // failed.
695 // Please note this does not perform a garbage collection. 770 // Please note this does not perform a garbage collection.
696 MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromAscii( 771 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
697 ExternalAsciiString::Resource* resource); 772 ExternalAsciiString::Resource* resource);
698 MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromTwoByte( 773 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
699 ExternalTwoByteString::Resource* resource); 774 ExternalTwoByteString::Resource* resource);
700 775
701 // Finalizes an external string by deleting the associated external 776 // Finalizes an external string by deleting the associated external
702 // data and clearing the resource pointer. 777 // data and clearing the resource pointer.
703 static inline void FinalizeExternalString(String* string); 778 inline void FinalizeExternalString(String* string);
704 779
705 // Allocates an uninitialized object. The memory is non-executable if the 780 // Allocates an uninitialized object. The memory is non-executable if the
706 // hardware and OS allow. 781 // hardware and OS allow.
707 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 782 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
708 // failed. 783 // failed.
709 // Please note this function does not perform a garbage collection. 784 // Please note this function does not perform a garbage collection.
710 MUST_USE_RESULT static inline MaybeObject* AllocateRaw( 785 MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
711 int size_in_bytes, 786 AllocationSpace space,
712 AllocationSpace space, 787 AllocationSpace retry_space);
713 AllocationSpace retry_space);
714 788
715 // Initialize a filler object to keep the ability to iterate over the heap 789 // Initialize a filler object to keep the ability to iterate over the heap
716 // when shortening objects. 790 // when shortening objects.
717 static void CreateFillerObjectAt(Address addr, int size); 791 void CreateFillerObjectAt(Address addr, int size);
718 792
719 // Makes a new native code object 793 // Makes a new native code object
720 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 794 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
721 // failed. On success, the pointer to the Code object is stored in the 795 // failed. On success, the pointer to the Code object is stored in the
722 // self_reference. This allows generated code to reference its own Code 796 // self_reference. This allows generated code to reference its own Code
723 // object by containing this pointer. 797 // object by containing this pointer.
724 // Please note this function does not perform a garbage collection. 798 // Please note this function does not perform a garbage collection.
725 MUST_USE_RESULT static MaybeObject* CreateCode(const CodeDesc& desc, 799 MUST_USE_RESULT MaybeObject* CreateCode(const CodeDesc& desc,
726 Code::Flags flags, 800 Code::Flags flags,
727 Handle<Object> self_reference, 801 Handle<Object> self_reference,
728 bool immovable = false); 802 bool immovable = false);
729 803
730 MUST_USE_RESULT static MaybeObject* CopyCode(Code* code); 804 MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
731 805
732 // Copy the code and scope info part of the code object, but insert 806 // Copy the code and scope info part of the code object, but insert
733 // the provided data as the relocation information. 807 // the provided data as the relocation information.
734 MUST_USE_RESULT static MaybeObject* CopyCode(Code* code, 808 MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
735 Vector<byte> reloc_info);
736 809
737 // Finds the symbol for string in the symbol table. 810 // Finds the symbol for string in the symbol table.
738 // If not found, a new symbol is added to the table and returned. 811 // If not found, a new symbol is added to the table and returned.
739 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation 812 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
740 // failed. 813 // failed.
741 // Please note this function does not perform a garbage collection. 814 // Please note this function does not perform a garbage collection.
742 MUST_USE_RESULT static MaybeObject* LookupSymbol(Vector<const char> str); 815 MUST_USE_RESULT MaybeObject* LookupSymbol(Vector<const char> str);
743 MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(Vector<const char> str); 816 MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(Vector<const char> str);
744 MUST_USE_RESULT static MaybeObject* LookupTwoByteSymbol( 817 MUST_USE_RESULT MaybeObject* LookupTwoByteSymbol(
745 Vector<const uc16> str); 818 Vector<const uc16> str);
746 MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(const char* str) { 819 MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(const char* str) {
747 return LookupSymbol(CStrVector(str)); 820 return LookupSymbol(CStrVector(str));
748 } 821 }
749 MUST_USE_RESULT static MaybeObject* LookupSymbol(String* str); 822 MUST_USE_RESULT MaybeObject* LookupSymbol(String* str);
750 static bool LookupSymbolIfExists(String* str, String** symbol); 823 bool LookupSymbolIfExists(String* str, String** symbol);
751 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol); 824 bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
752 825
753 // Compute the matching symbol map for a string if possible. 826 // Compute the matching symbol map for a string if possible.
754 // NULL is returned if string is in new space or not flattened. 827 // NULL is returned if string is in new space or not flattened.
755 static Map* SymbolMapForString(String* str); 828 Map* SymbolMapForString(String* str);
756 829
757 // Tries to flatten a string before compare operation. 830 // Tries to flatten a string before compare operation.
758 // 831 //
759 // Returns a failure in case it was decided that flattening was 832 // Returns a failure in case it was decided that flattening was
760 // necessary and failed. Note, if flattening is not necessary the 833 // necessary and failed. Note, if flattening is not necessary the
761 // string might stay non-flat even when not a failure is returned. 834 // string might stay non-flat even when not a failure is returned.
762 // 835 //
763 // Please note this function does not perform a garbage collection. 836 // Please note this function does not perform a garbage collection.
764 MUST_USE_RESULT static inline MaybeObject* PrepareForCompare(String* str); 837 MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
765 838
766 // Converts the given boolean condition to JavaScript boolean value. 839 // Converts the given boolean condition to JavaScript boolean value.
767 static Object* ToBoolean(bool condition) { 840 inline Object* ToBoolean(bool condition);
768 return condition ? true_value() : false_value();
769 }
770 841
771 // Code that should be run before and after each GC. Includes some 842 // Code that should be run before and after each GC. Includes some
772 // reporting/verification activities when compiled with DEBUG set. 843 // reporting/verification activities when compiled with DEBUG set.
773 static void GarbageCollectionPrologue(); 844 void GarbageCollectionPrologue();
774 static void GarbageCollectionEpilogue(); 845 void GarbageCollectionEpilogue();
775 846
776 // Performs garbage collection operation. 847 // Performs garbage collection operation.
777 // Returns whether there is a chance that another major GC could 848 // Returns whether there is a chance that another major GC could
778 // collect more garbage. 849 // collect more garbage.
779 static bool CollectGarbage(AllocationSpace space, GarbageCollector collector); 850 bool CollectGarbage(AllocationSpace space, GarbageCollector collector);
780 851
781 // Performs garbage collection operation. 852 // Performs garbage collection operation.
782 // Returns whether there is a chance that another major GC could 853 // Returns whether there is a chance that another major GC could
783 // collect more garbage. 854 // collect more garbage.
784 inline static bool CollectGarbage(AllocationSpace space); 855 inline bool CollectGarbage(AllocationSpace space);
785 856
786 static const int kNoGCFlags = 0; 857 static const int kNoGCFlags = 0;
787 static const int kForceCompactionMask = 1; 858 static const int kForceCompactionMask = 1;
788 static const int kMakeHeapIterableMask = 2; 859 static const int kMakeHeapIterableMask = 2;
789 860
790 // Performs a full garbage collection. If (flags & kForceCompactionMask) is 861 // Performs a full garbage collection. If (flags & kForceCompactionMask) is
791 // non-zero then force compaction. If (flags & kMakeHeapIterableMask) is non- 862 // non-zero then force compaction. If (flags & kMakeHeapIterableMask) is non-
792 // zero, then the slower precise sweeper is used, which leaves the heap in a 863 // zero, then the slower precise sweeper is used, which leaves the heap in a
793 // state where we can iterate over the heap visiting all objects. 864 // state where we can iterate over the heap visiting all objects.
794 static void CollectAllGarbage(int flags); 865 void CollectAllGarbage(int flags);
795 866
796 // Ensure that we have swept all spaces in such a way that we can iterate 867 // Ensure that we have swept all spaces in such a way that we can iterate
797 // over all objects. May cause a GC. 868 // over all objects. May cause a GC.
798 static void EnsureHeapIsIterable(); 869 void EnsureHeapIsIterable();
799 870
800 // Last hope GC, should try to squeeze as much as possible. 871 // Last hope GC, should try to squeeze as much as possible.
801 static void CollectAllAvailableGarbage(); 872 void CollectAllAvailableGarbage();
802 873
803 // Notify the heap that a context has been disposed. 874 // Notify the heap that a context has been disposed.
804 static int NotifyContextDisposed() { return ++contexts_disposed_; } 875 int NotifyContextDisposed() { return ++contexts_disposed_; }
805 876
806 // Utility to invoke the scavenger. This is needed in test code to 877 // Utility to invoke the scavenger. This is needed in test code to
807 // ensure correct callback for weak global handles. 878 // ensure correct callback for weak global handles.
808 static void PerformScavenge(); 879 void PerformScavenge();
880
881 PromotionQueue* promotion_queue() { return &promotion_queue_; }
809 882
810 #ifdef DEBUG 883 #ifdef DEBUG
811 // Utility used with flag gc-greedy. 884 // Utility used with flag gc-greedy.
812 static void GarbageCollectionGreedyCheck(); 885 void GarbageCollectionGreedyCheck();
813 #endif 886 #endif
814 887
815 static void AddGCPrologueCallback( 888 void AddGCPrologueCallback(
816 GCEpilogueCallback callback, GCType gc_type_filter); 889 GCEpilogueCallback callback, GCType gc_type_filter);
817 static void RemoveGCPrologueCallback(GCEpilogueCallback callback); 890 void RemoveGCPrologueCallback(GCEpilogueCallback callback);
818 891
819 static void AddGCEpilogueCallback( 892 void AddGCEpilogueCallback(
820 GCEpilogueCallback callback, GCType gc_type_filter); 893 GCEpilogueCallback callback, GCType gc_type_filter);
821 static void RemoveGCEpilogueCallback(GCEpilogueCallback callback); 894 void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
822 895
823 static void SetGlobalGCPrologueCallback(GCCallback callback) { 896 void SetGlobalGCPrologueCallback(GCCallback callback) {
824 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL)); 897 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
825 global_gc_prologue_callback_ = callback; 898 global_gc_prologue_callback_ = callback;
826 } 899 }
827 static void SetGlobalGCEpilogueCallback(GCCallback callback) { 900 void SetGlobalGCEpilogueCallback(GCCallback callback) {
828 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL)); 901 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
829 global_gc_epilogue_callback_ = callback; 902 global_gc_epilogue_callback_ = callback;
830 } 903 }
831 904
832 // Heap root getters. We have versions with and without type::cast() here. 905 // Heap root getters. We have versions with and without type::cast() here.
833 // You can't use type::cast during GC because the assert fails. 906 // You can't use type::cast during GC because the assert fails.
834 // TODO(gc): Try removing the unchecked accessors, now that GC marking does 907 // TODO(gc): Try removing the unchecked accessors, now that GC marking does
835 // not corrupt the stack. 908 // not corrupt the stack.
836 #define ROOT_ACCESSOR(type, name, camel_name) \ 909 #define ROOT_ACCESSOR(type, name, camel_name) \
837 static inline type* name() { \ 910 type* name() { \
838 return type::cast(roots_[k##camel_name##RootIndex]); \ 911 return type::cast(roots_[k##camel_name##RootIndex]); \
839 } \ 912 } \
840 static inline type* raw_unchecked_##name() { \ 913 type* raw_unchecked_##name() { \
841 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \ 914 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
842 } 915 }
843 ROOT_LIST(ROOT_ACCESSOR) 916 ROOT_LIST(ROOT_ACCESSOR)
844 #undef ROOT_ACCESSOR 917 #undef ROOT_ACCESSOR
845 918
846 // Utility type maps 919 // Utility type maps
847 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \ 920 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
848 static inline Map* name##_map() { \ 921 Map* name##_map() { \
849 return Map::cast(roots_[k##Name##MapRootIndex]); \ 922 return Map::cast(roots_[k##Name##MapRootIndex]); \
850 } 923 }
851 STRUCT_LIST(STRUCT_MAP_ACCESSOR) 924 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
852 #undef STRUCT_MAP_ACCESSOR 925 #undef STRUCT_MAP_ACCESSOR
853 926
854 #define SYMBOL_ACCESSOR(name, str) static inline String* name() { \ 927 #define SYMBOL_ACCESSOR(name, str) String* name() { \
855 return String::cast(roots_[k##name##RootIndex]); \ 928 return String::cast(roots_[k##name##RootIndex]); \
856 } 929 }
857 SYMBOL_LIST(SYMBOL_ACCESSOR) 930 SYMBOL_LIST(SYMBOL_ACCESSOR)
858 #undef SYMBOL_ACCESSOR 931 #undef SYMBOL_ACCESSOR
859 932
860 // The hidden_symbol is special because it is the empty string, but does 933 // The hidden_symbol is special because it is the empty string, but does
861 // not match the empty string. 934 // not match the empty string.
862 static String* hidden_symbol() { return hidden_symbol_; } 935 String* hidden_symbol() { return hidden_symbol_; }
863 936
864 static void set_global_contexts_list(Object* object) { 937 void set_global_contexts_list(Object* object) {
865 global_contexts_list_ = object; 938 global_contexts_list_ = object;
866 } 939 }
867 static Object* global_contexts_list() { return global_contexts_list_; } 940 Object* global_contexts_list() { return global_contexts_list_; }
868 941
869 // Iterates over all roots in the heap. 942 // Iterates over all roots in the heap.
870 static void IterateRoots(ObjectVisitor* v, VisitMode mode); 943 void IterateRoots(ObjectVisitor* v, VisitMode mode);
871 // Iterates over all strong roots in the heap. 944 // Iterates over all strong roots in the heap.
872 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); 945 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
873 // Iterates over all the other roots in the heap. 946 // Iterates over all the other roots in the heap.
874 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); 947 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
875 948
876 // For each region of pointers on a page in use from an old space call 949 // For each region of pointers on a page in use from an old space call
877 // visit_pointer_region callback. 950 // visit_pointer_region callback.
878 // If either visit_pointer_region or callback can cause an allocation 951 // If either visit_pointer_region or callback can cause an allocation
879 // in old space and changes in allocation watermark then 952 // in old space and changes in allocation watermark then
880 // can_preallocate_during_iteration should be set to true. 953 // can_preallocate_during_iteration should be set to true.
881 // All pages will be marked as having invalid watermark upon 954 // All pages will be marked as having invalid watermark upon
882 // iteration completion. 955 // iteration completion.
883 static void IteratePointers( 956 void IteratePointers(
884 PagedSpace* space, 957 PagedSpace* space,
885 PointerRegionCallback visit_pointer_region, 958 PointerRegionCallback visit_pointer_region,
886 ObjectSlotCallback callback); 959 ObjectSlotCallback callback);
887 static void IteratePointersOnPage( 960 static void IteratePointersOnPage(
888 PagedSpace* space, 961 PagedSpace* space,
889 PointerRegionCallback visit_pointer_region, 962 PointerRegionCallback visit_pointer_region,
890 ObjectSlotCallback callback, 963 ObjectSlotCallback callback,
891 Page* page); 964 Page* page);
892 965
893 // Iterate pointers to from semispace of new space found in memory interval 966 // Iterate pointers to from semispace of new space found in memory interval
894 // from start to end. 967 // from start to end.
895 static void IterateAndMarkPointersToFromSpace(Address start, 968 void IterateAndMarkPointersToFromSpace(Address start,
896 Address end, 969 Address end,
897 ObjectSlotCallback callback); 970 ObjectSlotCallback callback);
898 971
899 // Iterate pointers to new space found in memory interval from start to end. 972 // Iterate pointers to new space found in memory interval from start to end.
900 static void IteratePointersToNewSpace(Address start, 973 static void IteratePointersToNewSpace(Heap* heap,
974 Address start,
901 Address end, 975 Address end,
902 ObjectSlotCallback callback); 976 ObjectSlotCallback callback);
903 977
904 978
905 // Iterate pointers to new space found in memory interval from start to end. 979 // Iterate pointers to new space found in memory interval from start to end.
906 // This interval is considered to belong to the map space. 980 // This interval is considered to belong to the map space.
907 static void IteratePointersFromMapsToNewSpace(Address start, 981 static void IteratePointersFromMapsToNewSpace(Heap* heap,
982 Address start,
908 Address end, 983 Address end,
909 ObjectSlotCallback callback); 984 ObjectSlotCallback callback);
910 985
911 986
912 // Returns whether the object resides in new space. 987 // Returns whether the object resides in new space.
913 static inline bool InNewSpace(Object* object); 988 inline bool InNewSpace(Object* object);
914 static inline bool InNewSpace(Address addr); 989 inline bool InNewSpace(Address addr);
915 static inline bool InFromSpace(Object* object); 990 inline bool InFromSpace(Object* object);
916 static inline bool InToSpace(Object* object); 991 inline bool InToSpace(Object* object);
917 992
918 // Checks whether an address/object in the heap (including auxiliary 993 // Checks whether an address/object in the heap (including auxiliary
919 // area and unused area). 994 // area and unused area).
920 static bool Contains(Address addr); 995 bool Contains(Address addr);
921 static bool Contains(HeapObject* value); 996 bool Contains(HeapObject* value);
922 997
923 // Checks whether an address/object in a space. 998 // Checks whether an address/object in a space.
924 // Currently used by tests, serialization and heap verification only. 999 // Currently used by tests, serialization and heap verification only.
925 static bool InSpace(Address addr, AllocationSpace space); 1000 bool InSpace(Address addr, AllocationSpace space);
926 static bool InSpace(HeapObject* value, AllocationSpace space); 1001 bool InSpace(HeapObject* value, AllocationSpace space);
927 1002
928 // Finds out which space an object should get promoted to based on its type. 1003 // Finds out which space an object should get promoted to based on its type.
929 static inline OldSpace* TargetSpace(HeapObject* object); 1004 inline OldSpace* TargetSpace(HeapObject* object);
930 static inline AllocationSpace TargetSpaceId(InstanceType type); 1005 inline AllocationSpace TargetSpaceId(InstanceType type);
931 1006
932 // Sets the stub_cache_ (only used when expanding the dictionary). 1007 // Sets the stub_cache_ (only used when expanding the dictionary).
933 static void public_set_code_stubs(NumberDictionary* value) { 1008 void public_set_code_stubs(NumberDictionary* value) {
934 roots_[kCodeStubsRootIndex] = value; 1009 roots_[kCodeStubsRootIndex] = value;
935 } 1010 }
936 1011
937 // Support for computing object sizes for old objects during GCs. Returns 1012 // Support for computing object sizes for old objects during GCs. Returns
938 // a function that is guaranteed to be safe for computing object sizes in 1013 // a function that is guaranteed to be safe for computing object sizes in
939 // the current GC phase. 1014 // the current GC phase.
940 static HeapObjectCallback GcSafeSizeOfOldObjectFunction() { 1015 HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
941 return gc_safe_size_of_old_object_; 1016 return gc_safe_size_of_old_object_;
942 } 1017 }
943 1018
944 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary). 1019 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
945 static void public_set_non_monomorphic_cache(NumberDictionary* value) { 1020 void public_set_non_monomorphic_cache(NumberDictionary* value) {
946 roots_[kNonMonomorphicCacheRootIndex] = value; 1021 roots_[kNonMonomorphicCacheRootIndex] = value;
947 } 1022 }
948 1023
949 static void public_set_empty_script(Script* script) { 1024 void public_set_empty_script(Script* script) {
950 roots_[kEmptyScriptRootIndex] = script; 1025 roots_[kEmptyScriptRootIndex] = script;
951 } 1026 }
952 1027
953 static void public_set_store_buffer_top(Address* top) { 1028 void public_set_store_buffer_top(Address* top) {
954 roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top); 1029 roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
955 } 1030 }
956 1031
957 // Update the next script id. 1032 // Update the next script id.
958 static inline void SetLastScriptId(Object* last_script_id); 1033 inline void SetLastScriptId(Object* last_script_id);
959 1034
960 // Generated code can embed this address to get access to the roots. 1035 // Generated code can embed this address to get access to the roots.
961 static Object** roots_address() { return roots_; } 1036 Object** roots_address() { return roots_; }
962 1037
963 static Address* store_buffer_top_address() { 1038 Address* store_buffer_top_address() {
964 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]); 1039 return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
965 } 1040 }
966 1041
967 // Get address of global contexts list for serialization support. 1042 // Get address of global contexts list for serialization support.
968 static Object** global_contexts_list_address() { 1043 Object** global_contexts_list_address() {
969 return &global_contexts_list_; 1044 return &global_contexts_list_;
970 } 1045 }
971 1046
972 #ifdef DEBUG 1047 #ifdef DEBUG
973 static void Print(); 1048 void Print();
974 static void PrintHandles(); 1049 void PrintHandles();
975 1050
976 // Verify the heap is in its normal state before or after a GC. 1051 // Verify the heap is in its normal state before or after a GC.
977 static void Verify(); 1052 void Verify();
978 1053
979 static void OldPointerSpaceCheckStoreBuffer(); 1054 void OldPointerSpaceCheckStoreBuffer();
980 static void MapSpaceCheckStoreBuffer(); 1055 void MapSpaceCheckStoreBuffer();
981 static void LargeObjectSpaceCheckStoreBuffer(); 1056 void LargeObjectSpaceCheckStoreBuffer();
982 1057
983 // Report heap statistics. 1058 // Report heap statistics.
984 static void ReportHeapStatistics(const char* title); 1059 void ReportHeapStatistics(const char* title);
985 static void ReportCodeStatistics(const char* title); 1060 void ReportCodeStatistics(const char* title);
986 1061
987 // Fill in bogus values in from space 1062 // Fill in bogus values in from space
988 static void ZapFromSpace(); 1063 void ZapFromSpace();
989 #endif 1064 #endif
990 1065
991 #if defined(ENABLE_LOGGING_AND_PROFILING) 1066 #if defined(ENABLE_LOGGING_AND_PROFILING)
992 // Print short heap statistics. 1067 // Print short heap statistics.
993 static void PrintShortHeapStatistics(); 1068 void PrintShortHeapStatistics();
994 #endif 1069 #endif
995 1070
996 // Makes a new symbol object 1071 // Makes a new symbol object
997 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 1072 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
998 // failed. 1073 // failed.
999 // Please note this function does not perform a garbage collection. 1074 // Please note this function does not perform a garbage collection.
1000 MUST_USE_RESULT static MaybeObject* CreateSymbol(const char* str, 1075 MUST_USE_RESULT MaybeObject* CreateSymbol(
1001 int length, 1076 const char* str, int length, int hash);
1002 int hash); 1077 MUST_USE_RESULT MaybeObject* CreateSymbol(String* str);
1003 MUST_USE_RESULT static MaybeObject* CreateSymbol(String* str);
1004 1078
1005 // Write barrier support for address[offset] = o. 1079 // Write barrier support for address[offset] = o.
1006 static inline void RecordWrite(Address address, int offset); 1080 inline void RecordWrite(Address address, int offset);
1007 1081
1008 // Write barrier support for address[start : start + len[ = o. 1082 // Write barrier support for address[start : start + len[ = o.
1009 static inline void RecordWrites(Address address, int start, int len); 1083 inline void RecordWrites(Address address, int start, int len);
1010 1084
1011 // Given an address occupied by a live code object, return that object. 1085 // Given an address occupied by a live code object, return that object.
1012 static Object* FindCodeObject(Address a); 1086 Object* FindCodeObject(Address a);
1013 1087
1014 // Invoke Shrink on shrinkable spaces. 1088 // Invoke Shrink on shrinkable spaces.
1015 static void Shrink(); 1089 void Shrink();
1016 1090
1017 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; 1091 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
1018 static inline HeapState gc_state() { return gc_state_; } 1092 inline HeapState gc_state() { return gc_state_; }
1019 1093
1020 #ifdef DEBUG 1094 #ifdef DEBUG
1021 static bool IsAllocationAllowed() { return allocation_allowed_; } 1095 bool IsAllocationAllowed() { return allocation_allowed_; }
1022 static inline bool allow_allocation(bool enable); 1096 inline bool allow_allocation(bool enable);
1023 1097
1024 static bool disallow_allocation_failure() { 1098 bool disallow_allocation_failure() {
1025 return disallow_allocation_failure_; 1099 return disallow_allocation_failure_;
1026 } 1100 }
1027 1101
1028 static void TracePathToObject(Object* target); 1102 void TracePathToObject(Object* target);
1029 static void TracePathToGlobal(); 1103 void TracePathToGlobal();
1030 #endif 1104 #endif
1031 1105
1032 // Callback function passed to Heap::Iterate etc. Copies an object if 1106 // Callback function passed to Heap::Iterate etc. Copies an object if
1033 // necessary, the object might be promoted to an old space. The caller must 1107 // necessary, the object might be promoted to an old space. The caller must
1034 // ensure the precondition that the object is (a) a heap object and (b) in 1108 // ensure the precondition that the object is (a) a heap object and (b) in
1035 // the heap's from space. 1109 // the heap's from space.
1036 static void ScavengePointer(HeapObject** p); 1110 static inline void ScavengePointer(HeapObject** p);
1037 static inline void ScavengeObject(HeapObject** p, HeapObject* object); 1111 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1038 1112
1039 // Commits from space if it is uncommitted. 1113 // Commits from space if it is uncommitted.
1040 static void EnsureFromSpaceIsCommitted(); 1114 void EnsureFromSpaceIsCommitted();
1041 1115
1042 // Support for partial snapshots. After calling this we can allocate a 1116 // Support for partial snapshots. After calling this we can allocate a
1043 // certain number of bytes using only linear allocation (with a 1117 // certain number of bytes using only linear allocation (with a
1044 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists 1118 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
1045 // or causing a GC. It returns true of space was reserved or false if a GC is 1119 // or causing a GC. It returns true of space was reserved or false if a GC is
1046 // needed. For paged spaces the space requested must include the space wasted 1120 // needed. For paged spaces the space requested must include the space wasted
1047 // at the end of each page when allocating linearly. 1121 // at the end of each page when allocating linearly.
1048 static void ReserveSpace( 1122 void ReserveSpace(
1049 int new_space_size, 1123 int new_space_size,
1050 int pointer_space_size, 1124 int pointer_space_size,
1051 int data_space_size, 1125 int data_space_size,
1052 int code_space_size, 1126 int code_space_size,
1053 int map_space_size, 1127 int map_space_size,
1054 int cell_space_size, 1128 int cell_space_size,
1055 int large_object_size); 1129 int large_object_size);
1056 1130
1057 // 1131 //
1058 // Support for the API. 1132 // Support for the API.
1059 // 1133 //
1060 1134
1061 static bool CreateApiObjects(); 1135 bool CreateApiObjects();
1062 1136
1063 // Attempt to find the number in a small cache. If we finds it, return 1137 // Attempt to find the number in a small cache. If we finds it, return
1064 // the string representation of the number. Otherwise return undefined. 1138 // the string representation of the number. Otherwise return undefined.
1065 static Object* GetNumberStringCache(Object* number); 1139 Object* GetNumberStringCache(Object* number);
1066 1140
1067 // Update the cache with a new number-string pair. 1141 // Update the cache with a new number-string pair.
1068 static void SetNumberStringCache(Object* number, String* str); 1142 void SetNumberStringCache(Object* number, String* str);
1069 1143
1070 // Adjusts the amount of registered external memory. 1144 // Adjusts the amount of registered external memory.
1071 // Returns the adjusted value. 1145 // Returns the adjusted value.
1072 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes); 1146 inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
1073 1147
1074 // Allocate uninitialized fixed array. 1148 // Allocate uninitialized fixed array.
1075 MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray(int length); 1149 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
1076 MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray( 1150 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
1077 int length, 1151 PretenureFlag pretenure);
1078 PretenureFlag pretenure);
1079 1152
1080 // True if we have reached the allocation limit in the old generation that 1153 // True if we have reached the allocation limit in the old generation that
1081 // should force the next GC (caused normally) to be a full one. 1154 // should force the next GC (caused normally) to be a full one.
1082 static bool OldGenerationPromotionLimitReached() { 1155 inline bool OldGenerationPromotionLimitReached() {
1083 return (PromotedSpaceSize() + PromotedExternalMemorySize()) 1156 return (PromotedSpaceSize() + PromotedExternalMemorySize())
1084 > old_gen_promotion_limit_; 1157 > old_gen_promotion_limit_;
1085 } 1158 }
1086 1159
1087 static inline intptr_t OldGenerationSpaceAvailable() { 1160 inline intptr_t OldGenerationSpaceAvailable() {
1088 return old_gen_allocation_limit_ - 1161 return old_gen_allocation_limit_ -
1089 (PromotedSpaceSize() + PromotedExternalMemorySize()); 1162 (PromotedSpaceSize() + PromotedExternalMemorySize());
1090 } 1163 }
1091 1164
1092 // True if we have reached the allocation limit in the old generation that
1093 // should artificially cause a GC right now.
1094 static inline bool OldGenerationAllocationLimitReached();
1095
1096 // Can be called when the embedding application is idle. 1165 // Can be called when the embedding application is idle.
1097 static bool IdleNotification(); 1166 bool IdleNotification();
1098 1167
1099 // Declare all the root indices. 1168 // Declare all the root indices.
1100 enum RootListIndex { 1169 enum RootListIndex {
1101 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, 1170 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1102 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) 1171 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1103 #undef ROOT_INDEX_DECLARATION 1172 #undef ROOT_INDEX_DECLARATION
1104 1173
1105 // Utility type maps 1174 // Utility type maps
1106 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex, 1175 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1107 STRUCT_LIST(DECLARE_STRUCT_MAP) 1176 STRUCT_LIST(DECLARE_STRUCT_MAP)
1108 #undef DECLARE_STRUCT_MAP 1177 #undef DECLARE_STRUCT_MAP
1109 1178
1110 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex, 1179 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1111 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION) 1180 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
1112 #undef SYMBOL_DECLARATION 1181 #undef SYMBOL_DECLARATION
1113 1182
1114 kSymbolTableRootIndex, 1183 kSymbolTableRootIndex,
1115 kStrongRootListLength = kSymbolTableRootIndex, 1184 kStrongRootListLength = kSymbolTableRootIndex,
1116 kRootListLength 1185 kRootListLength
1117 }; 1186 };
1118 1187
1119 MUST_USE_RESULT static MaybeObject* NumberToString( 1188 MUST_USE_RESULT MaybeObject* NumberToString(
1120 Object* number, 1189 Object* number, bool check_number_string_cache = true);
1121 bool check_number_string_cache = true);
1122 1190
1123 static Map* MapForExternalArrayType(ExternalArrayType array_type); 1191 Map* MapForExternalArrayType(ExternalArrayType array_type);
1124 static RootListIndex RootIndexForExternalArrayType( 1192 RootListIndex RootIndexForExternalArrayType(
1125 ExternalArrayType array_type); 1193 ExternalArrayType array_type);
1126 1194
1127 static void RecordStats(HeapStats* stats, bool take_snapshot = false); 1195 void RecordStats(HeapStats* stats, bool take_snapshot = false);
1128 1196
1129 // Copy block of memory from src to dst. Size of block should be aligned 1197 // Copy block of memory from src to dst. Size of block should be aligned
1130 // by pointer size. 1198 // by pointer size.
1131 static inline void CopyBlock(Address dst, Address src, int byte_size); 1199 static inline void CopyBlock(Address dst, Address src, int byte_size);
1132 1200
1133 static inline void CopyBlockToOldSpaceAndUpdateWriteBarrier(Address dst, 1201 inline void CopyBlockToOldSpaceAndUpdateWriteBarrier(Address dst,
1134 Address src, 1202 Address src,
1135 int byte_size); 1203 int byte_size);
1136 1204
1137 // Optimized version of memmove for blocks with pointer size aligned sizes and 1205 // Optimized version of memmove for blocks with pointer size aligned sizes and
1138 // pointer size aligned addresses. 1206 // pointer size aligned addresses.
1139 static inline void MoveBlock(Address dst, Address src, int byte_size); 1207 static inline void MoveBlock(Address dst, Address src, int byte_size);
1140 1208
1141 // Check new space expansion criteria and expand semispaces if it was hit. 1209 // Check new space expansion criteria and expand semispaces if it was hit.
1142 static void CheckNewSpaceExpansionCriteria(); 1210 void CheckNewSpaceExpansionCriteria();
1143 1211
1144 static inline void IncrementYoungSurvivorsCounter(int survived) { 1212 inline void IncrementYoungSurvivorsCounter(int survived) {
1145 young_survivors_after_last_gc_ = survived; 1213 young_survivors_after_last_gc_ = survived;
1146 survived_since_last_expansion_ += survived; 1214 survived_since_last_expansion_ += survived;
1147 } 1215 }
1148 1216
1149 static inline bool NextGCIsLikelyToBeFull() { 1217 inline bool NextGCIsLikelyToBeFull() {
1150 if (FLAG_gc_global) return true; 1218 if (FLAG_gc_global) return true;
1151 1219
1152 intptr_t total_promoted = 1220 intptr_t total_promoted =
1153 PromotedSpaceSize() + PromotedExternalMemorySize(); 1221 PromotedSpaceSize() + PromotedExternalMemorySize();
1154 1222
1155 intptr_t adjusted_promotion_limit = 1223 intptr_t adjusted_promotion_limit =
1156 old_gen_promotion_limit_ - new_space_.Capacity(); 1224 old_gen_promotion_limit_ - new_space_.Capacity();
1157 1225
1158 if (total_promoted >= adjusted_promotion_limit) return true; 1226 if (total_promoted >= adjusted_promotion_limit) return true;
1159 1227
1160 intptr_t adjusted_allocation_limit = 1228 intptr_t adjusted_allocation_limit =
1161 old_gen_allocation_limit_ - new_space_.Capacity() / 5; 1229 old_gen_allocation_limit_ - new_space_.Capacity() / 5;
1162 1230
1163 if (PromotedSpaceSize() >= adjusted_allocation_limit) return true; 1231 if (PromotedSpaceSize() >= adjusted_allocation_limit) return true;
1164 1232
1165 return false; 1233 return false;
1166 } 1234 }
1167 1235
1168 1236
1169 static void UpdateNewSpaceReferencesInExternalStringTable( 1237 void UpdateNewSpaceReferencesInExternalStringTable(
1170 ExternalStringTableUpdaterCallback updater_func); 1238 ExternalStringTableUpdaterCallback updater_func);
1171 1239
1172 static void ProcessWeakReferences(WeakObjectRetainer* retainer); 1240 void ProcessWeakReferences(WeakObjectRetainer* retainer);
1173 1241
1174 // Helper function that governs the promotion policy from new space to 1242 // Helper function that governs the promotion policy from new space to
1175 // old. If the object's old address lies below the new space's age 1243 // old. If the object's old address lies below the new space's age
1176 // mark or if we've already filled the bottom 1/16th of the to space, 1244 // mark or if we've already filled the bottom 1/16th of the to space,
1177 // we try to promote this object. 1245 // we try to promote this object.
1178 static inline bool ShouldBePromoted(Address old_address, int object_size); 1246 inline bool ShouldBePromoted(Address old_address, int object_size);
1179 1247
1180 static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; } 1248 int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1181 1249
1182 static void ClearJSFunctionResultCaches(); 1250 void ClearJSFunctionResultCaches();
1183 1251
1184 static void ClearNormalizedMapCaches(); 1252 void ClearNormalizedMapCaches();
1185 1253
1186 static GCTracer* tracer() { return tracer_; } 1254 GCTracer* tracer() { return tracer_; }
1187 1255
1188 // Returns the size of objects residing in non new spaces. 1256 // Returns the size of objects residing in non new spaces.
1189 static intptr_t PromotedSpaceSize(); 1257 intptr_t PromotedSpaceSize();
1190 1258
1191 static void CallGlobalGCPrologueCallback() { 1259 // Returns maximum GC pause.
1260 int get_max_gc_pause() { return max_gc_pause_; }
1261
1262 // Returns maximum size of objects alive after GC.
1263 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1264
1265 // Returns minimal interval between two subsequent collections.
1266 int get_min_in_mutator() { return min_in_mutator_; }
1267
1268 MarkCompactCollector* mark_compact_collector() {
1269 return &mark_compact_collector_;
1270 }
1271
1272 StoreBuffer* store_buffer() {
1273 return &store_buffer_;
1274 }
1275
1276 Marking* marking() {
1277 return &marking_;
1278 }
1279
1280 // TODO(gc) Rename to IncrementalMarker after merge.
1281 IncrementalMarking* incremental_marking() {
1282 return &incremental_marking_;
1283 }
1284
1285 ExternalStringTable* external_string_table() {
1286 return &external_string_table_;
1287 }
1288
1289 inline Isolate* isolate();
1290
1291 inline void CallGlobalGCPrologueCallback() {
1192 if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_(); 1292 if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_();
1193 } 1293 }
1194 1294
1195 static void CallGlobalGCEpilogueCallback() { 1295 inline void CallGlobalGCEpilogueCallback() {
1196 if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_(); 1296 if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_();
1197 } 1297 }
1198 1298
1299
1300 inline bool OldGenerationAllocationLimitReached();
1301
1199 private: 1302 private:
1200 static int reserved_semispace_size_; 1303 Heap();
1201 static int max_semispace_size_; 1304
1202 static int initial_semispace_size_; 1305 // This can be calculated directly from a pointer to the heap; however, it is
1203 static intptr_t max_old_generation_size_; 1306 // more expedient to get at the isolate directly from within Heap methods.
1204 static intptr_t max_executable_size_; 1307 Isolate* isolate_;
1205 static intptr_t code_range_size_; 1308
1309 int reserved_semispace_size_;
1310 int max_semispace_size_;
1311 int initial_semispace_size_;
1312 intptr_t max_old_generation_size_;
1313 intptr_t max_executable_size_;
1314 intptr_t code_range_size_;
1206 1315
1207 // For keeping track of how much data has survived 1316 // For keeping track of how much data has survived
1208 // scavenge since last new space expansion. 1317 // scavenge since last new space expansion.
1209 static int survived_since_last_expansion_; 1318 int survived_since_last_expansion_;
1210 1319
1211 static int always_allocate_scope_depth_; 1320 int always_allocate_scope_depth_;
1212 static int linear_allocation_scope_depth_; 1321 int linear_allocation_scope_depth_;
1213 1322
1214 // For keeping track of context disposals. 1323 // For keeping track of context disposals.
1215 static int contexts_disposed_; 1324 int contexts_disposed_;
1216 1325
1217 #if defined(V8_TARGET_ARCH_X64) 1326 #if defined(V8_TARGET_ARCH_X64)
1218 static const int kMaxObjectSizeInNewSpace = 1024*KB; 1327 static const int kMaxObjectSizeInNewSpace = 1024*KB;
1219 #else 1328 #else
1220 static const int kMaxObjectSizeInNewSpace = 512*KB; 1329 static const int kMaxObjectSizeInNewSpace = 512*KB;
1221 #endif 1330 #endif
1222 1331
1223 static NewSpace new_space_; 1332 NewSpace new_space_;
1224 static OldSpace* old_pointer_space_; 1333 OldSpace* old_pointer_space_;
1225 static OldSpace* old_data_space_; 1334 OldSpace* old_data_space_;
1226 static OldSpace* code_space_; 1335 OldSpace* code_space_;
1227 static MapSpace* map_space_; 1336 MapSpace* map_space_;
1228 static CellSpace* cell_space_; 1337 CellSpace* cell_space_;
1229 static LargeObjectSpace* lo_space_; 1338 LargeObjectSpace* lo_space_;
1230 static HeapState gc_state_; 1339 HeapState gc_state_;
1231 1340
1232 // Returns the amount of external memory registered since last global gc. 1341 // Returns the amount of external memory registered since last global gc.
1233 static int PromotedExternalMemorySize(); 1342 int PromotedExternalMemorySize();
1234 1343
1235 static int mc_count_; // how many mark-compact collections happened 1344 int mc_count_; // how many mark-compact collections happened
1236 static int ms_count_; // how many mark-sweep collections happened 1345 int ms_count_; // how many mark-sweep collections happened
1237 static unsigned int gc_count_; // how many gc happened 1346 unsigned int gc_count_; // how many gc happened
1238 1347
1239 // Total length of the strings we failed to flatten since the last GC. 1348 // Total length of the strings we failed to flatten since the last GC.
1240 static int unflattened_strings_length_; 1349 int unflattened_strings_length_;
1241 1350
1242 #define ROOT_ACCESSOR(type, name, camel_name) \ 1351 #define ROOT_ACCESSOR(type, name, camel_name) \
1243 static inline void set_##name(type* value) { \ 1352 inline void set_##name(type* value) { \
1244 roots_[k##camel_name##RootIndex] = value; \ 1353 roots_[k##camel_name##RootIndex] = value; \
1245 } 1354 }
1246 ROOT_LIST(ROOT_ACCESSOR) 1355 ROOT_LIST(ROOT_ACCESSOR)
1247 #undef ROOT_ACCESSOR 1356 #undef ROOT_ACCESSOR
1248 1357
1249 #ifdef DEBUG 1358 #ifdef DEBUG
1250 static bool allocation_allowed_; 1359 bool allocation_allowed_;
1251 1360
1252 // If the --gc-interval flag is set to a positive value, this 1361 // If the --gc-interval flag is set to a positive value, this
1253 // variable holds the value indicating the number of allocations 1362 // variable holds the value indicating the number of allocations
1254 // remain until the next failure and garbage collection. 1363 // remain until the next failure and garbage collection.
1255 static int allocation_timeout_; 1364 int allocation_timeout_;
1256 1365
1257 // Do we expect to be able to handle allocation failure at this 1366 // Do we expect to be able to handle allocation failure at this
1258 // time? 1367 // time?
1259 static bool disallow_allocation_failure_; 1368 bool disallow_allocation_failure_;
1369
1370 HeapDebugUtils* debug_utils_;
1260 #endif // DEBUG 1371 #endif // DEBUG
1261 1372
1262 // Limit that triggers a global GC on the next (normally caused) GC. This 1373 // Limit that triggers a global GC on the next (normally caused) GC. This
1263 // is checked when we have already decided to do a GC to help determine 1374 // is checked when we have already decided to do a GC to help determine
1264 // which collector to invoke. 1375 // which collector to invoke.
1265 static intptr_t old_gen_promotion_limit_; 1376 intptr_t old_gen_promotion_limit_;
1266 1377
1267 // Limit that triggers a global GC as soon as is reasonable. This is 1378 // Limit that triggers a global GC as soon as is reasonable. This is
1268 // checked before expanding a paged space in the old generation and on 1379 // checked before expanding a paged space in the old generation and on
1269 // every allocation in large object space. 1380 // every allocation in large object space.
1270 static intptr_t old_gen_allocation_limit_; 1381 intptr_t old_gen_allocation_limit_;
1271 1382
1272 // Limit on the amount of externally allocated memory allowed 1383 // Limit on the amount of externally allocated memory allowed
1273 // between global GCs. If reached a global GC is forced. 1384 // between global GCs. If reached a global GC is forced.
1274 static intptr_t external_allocation_limit_; 1385 intptr_t external_allocation_limit_;
1275 1386
1276 // The amount of external memory registered through the API kept alive 1387 // The amount of external memory registered through the API kept alive
1277 // by global handles 1388 // by global handles
1278 static int amount_of_external_allocated_memory_; 1389 int amount_of_external_allocated_memory_;
1279 1390
1280 // Caches the amount of external memory registered at the last global gc. 1391 // Caches the amount of external memory registered at the last global gc.
1281 static int amount_of_external_allocated_memory_at_last_global_gc_; 1392 int amount_of_external_allocated_memory_at_last_global_gc_;
1282 1393
1283 // Indicates that an allocation has failed in the old generation since the 1394 // Indicates that an allocation has failed in the old generation since the
1284 // last GC. 1395 // last GC.
1285 static int old_gen_exhausted_; 1396 int old_gen_exhausted_;
1286 1397
1287 static Object* roots_[kRootListLength]; 1398 Object* roots_[kRootListLength];
1288 1399
1289 static Object* global_contexts_list_; 1400 Object* global_contexts_list_;
1290 1401
1291 static StoreBufferRebuilder store_buffer_rebuilder_; 1402 StoreBufferRebuilder store_buffer_rebuilder_;
1292 1403
1293 struct StringTypeTable { 1404 struct StringTypeTable {
1294 InstanceType type; 1405 InstanceType type;
1295 int size; 1406 int size;
1296 RootListIndex index; 1407 RootListIndex index;
1297 }; 1408 };
1298 1409
1299 struct ConstantSymbolTable { 1410 struct ConstantSymbolTable {
1300 const char* contents; 1411 const char* contents;
1301 RootListIndex index; 1412 RootListIndex index;
1302 }; 1413 };
1303 1414
1304 struct StructTable { 1415 struct StructTable {
1305 InstanceType type; 1416 InstanceType type;
1306 int size; 1417 int size;
1307 RootListIndex index; 1418 RootListIndex index;
1308 }; 1419 };
1309 1420
1310 static const StringTypeTable string_type_table[]; 1421 static const StringTypeTable string_type_table[];
1311 static const ConstantSymbolTable constant_symbol_table[]; 1422 static const ConstantSymbolTable constant_symbol_table[];
1312 static const StructTable struct_table[]; 1423 static const StructTable struct_table[];
1313 1424
1314 // The special hidden symbol which is an empty string, but does not match 1425 // The special hidden symbol which is an empty string, but does not match
1315 // any string when looked up in properties. 1426 // any string when looked up in properties.
1316 static String* hidden_symbol_; 1427 String* hidden_symbol_;
1317 1428
1318 // GC callback function, called before and after mark-compact GC. 1429 // GC callback function, called before and after mark-compact GC.
1319 // Allocations in the callback function are disallowed. 1430 // Allocations in the callback function are disallowed.
1320 struct GCPrologueCallbackPair { 1431 struct GCPrologueCallbackPair {
1321 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type) 1432 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1322 : callback(callback), gc_type(gc_type) { 1433 : callback(callback), gc_type(gc_type) {
1323 } 1434 }
1324 bool operator==(const GCPrologueCallbackPair& pair) const { 1435 bool operator==(const GCPrologueCallbackPair& pair) const {
1325 return pair.callback == callback; 1436 return pair.callback == callback;
1326 } 1437 }
1327 GCPrologueCallback callback; 1438 GCPrologueCallback callback;
1328 GCType gc_type; 1439 GCType gc_type;
1329 }; 1440 };
1330 static List<GCPrologueCallbackPair> gc_prologue_callbacks_; 1441 List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1331 1442
1332 struct GCEpilogueCallbackPair { 1443 struct GCEpilogueCallbackPair {
1333 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type) 1444 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1334 : callback(callback), gc_type(gc_type) { 1445 : callback(callback), gc_type(gc_type) {
1335 } 1446 }
1336 bool operator==(const GCEpilogueCallbackPair& pair) const { 1447 bool operator==(const GCEpilogueCallbackPair& pair) const {
1337 return pair.callback == callback; 1448 return pair.callback == callback;
1338 } 1449 }
1339 GCEpilogueCallback callback; 1450 GCEpilogueCallback callback;
1340 GCType gc_type; 1451 GCType gc_type;
1341 }; 1452 };
1342 static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_; 1453 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1343 1454
1344 static GCCallback global_gc_prologue_callback_; 1455 GCCallback global_gc_prologue_callback_;
1345 static GCCallback global_gc_epilogue_callback_; 1456 GCCallback global_gc_epilogue_callback_;
1346 1457
1347 // Support for computing object sizes during GC. 1458 // Support for computing object sizes during GC.
1348 static HeapObjectCallback gc_safe_size_of_old_object_; 1459 HeapObjectCallback gc_safe_size_of_old_object_;
1349 static int GcSafeSizeOfOldObject(HeapObject* object); 1460 static int GcSafeSizeOfOldObject(HeapObject* object);
1350 1461
1351 // Update the GC state. Called from the mark-compact collector. 1462 // Update the GC state. Called from the mark-compact collector.
1352 static void MarkMapPointersAsEncoded(bool encoded) { 1463 void MarkMapPointersAsEncoded(bool encoded) {
1353 ASSERT(!encoded); 1464 ASSERT(!encoded);
1354 gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject; 1465 gc_safe_size_of_old_object_ = &GcSafeSizeOfOldObject;
1355 } 1466 }
1356 1467
1357 // Checks whether a global GC is necessary 1468 // Checks whether a global GC is necessary
1358 static GarbageCollector SelectGarbageCollector(AllocationSpace space); 1469 GarbageCollector SelectGarbageCollector(AllocationSpace space);
1359 1470
1360 // Performs garbage collection 1471 // Performs garbage collection
1361 // Returns whether there is a chance another major GC could 1472 // Returns whether there is a chance another major GC could
1362 // collect more garbage. 1473 // collect more garbage.
1363 static bool PerformGarbageCollection(GarbageCollector collector, 1474 bool PerformGarbageCollection(GarbageCollector collector,
1364 GCTracer* tracer); 1475 GCTracer* tracer);
1476
1477 static const intptr_t kMinimumPromotionLimit = 2 * MB;
1478 static const intptr_t kMinimumAllocationLimit = 8 * MB;
1479
1480 inline void UpdateOldSpaceLimits();
1481
1365 1482
1366 // Allocate an uninitialized object in map space. The behavior is identical 1483 // Allocate an uninitialized object in map space. The behavior is identical
1367 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't 1484 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1368 // have to test the allocation space argument and (b) can reduce code size 1485 // have to test the allocation space argument and (b) can reduce code size
1369 // (since both AllocateRaw and AllocateRawMap are inlined). 1486 // (since both AllocateRaw and AllocateRawMap are inlined).
1370 MUST_USE_RESULT static inline MaybeObject* AllocateRawMap(); 1487 MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
1371 1488
1372 // Allocate an uninitialized object in the global property cell space. 1489 // Allocate an uninitialized object in the global property cell space.
1373 MUST_USE_RESULT static inline MaybeObject* AllocateRawCell(); 1490 MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
1374 1491
1375 // Initializes a JSObject based on its map. 1492 // Initializes a JSObject based on its map.
1376 static void InitializeJSObjectFromMap(JSObject* obj, 1493 void InitializeJSObjectFromMap(JSObject* obj,
1377 FixedArray* properties, 1494 FixedArray* properties,
1378 Map* map); 1495 Map* map);
1379 1496
1380 static bool CreateInitialMaps(); 1497 bool CreateInitialMaps();
1381 static bool CreateInitialObjects(); 1498 bool CreateInitialObjects();
1382 1499
1383 // These two Create*EntryStub functions are here and forced to not be inlined 1500 // These five Create*EntryStub functions are here and forced to not be inlined
1384 // because of a gcc-4.4 bug that assigns wrong vtable entries. 1501 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1385 NO_INLINE(static void CreateJSEntryStub()); 1502 NO_INLINE(void CreateJSEntryStub());
1386 NO_INLINE(static void CreateJSConstructEntryStub()); 1503 NO_INLINE(void CreateJSConstructEntryStub());
1387 1504
1388 static void CreateFixedStubs(); 1505 void CreateFixedStubs();
1389 1506
1390 MUST_USE_RESULT static MaybeObject* CreateOddball(const char* to_string, 1507 MaybeObject* CreateOddball(const char* to_string,
1391 Object* to_number); 1508 Object* to_number,
1509 byte kind);
1392 1510
1393 // Allocate empty fixed array. 1511 // Allocate empty fixed array.
1394 MUST_USE_RESULT static MaybeObject* AllocateEmptyFixedArray(); 1512 MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
1395 1513
1396 // Performs a minor collection in new generation. 1514 // Performs a minor collection in new generation.
1397 static void Scavenge(); 1515 void Scavenge();
1398 1516
1399 static String* UpdateNewSpaceReferenceInExternalStringTableEntry( 1517 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1518 Heap* heap,
1400 Object** pointer); 1519 Object** pointer);
1401 1520
1402 static Address DoScavenge(ObjectVisitor* scavenge_visitor, 1521 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1403 Address new_space_front); 1522 static void ScavengeStoreBufferCallback(Heap* heap,
1404 static void ScavengeStoreBufferCallback(MemoryChunk* page, 1523 MemoryChunk* page,
1405 StoreBufferEvent event); 1524 StoreBufferEvent event);
1406 1525
1407 // Performs a major collection in the whole heap. 1526 // Performs a major collection in the whole heap.
1408 static void MarkCompact(GCTracer* tracer); 1527 void MarkCompact(GCTracer* tracer);
1409 1528
1410 // Code to be run before and after mark-compact. 1529 // Code to be run before and after mark-compact.
1411 static void MarkCompactPrologue(bool is_compacting); 1530 void MarkCompactPrologue(bool is_compacting);
1412 1531
1413 // Completely clear the Instanceof cache (to stop it keeping objects alive 1532 // Completely clear the Instanceof cache (to stop it keeping objects alive
1414 // around a GC). 1533 // around a GC).
1415 static void CompletelyClearInstanceofCache() { 1534 inline void CompletelyClearInstanceofCache();
1416 set_instanceof_cache_map(the_hole_value());
1417 set_instanceof_cache_function(the_hole_value());
1418 }
1419 1535
1420 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1536 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1421 // Record statistics before and after garbage collection. 1537 // Record statistics before and after garbage collection.
1422 static void ReportStatisticsBeforeGC(); 1538 void ReportStatisticsBeforeGC();
1423 static void ReportStatisticsAfterGC(); 1539 void ReportStatisticsAfterGC();
1424 #endif 1540 #endif
1425 1541
1426 // Slow part of scavenge object. 1542 // Slow part of scavenge object.
1427 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object); 1543 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1428 1544
1429 // Initializes a function with a shared part and prototype. 1545 // Initializes a function with a shared part and prototype.
1430 // Returns the function. 1546 // Returns the function.
1431 // Note: this code was factored out of AllocateFunction such that 1547 // Note: this code was factored out of AllocateFunction such that
1432 // other parts of the VM could use it. Specifically, a function that creates 1548 // other parts of the VM could use it. Specifically, a function that creates
1433 // instances of type JS_FUNCTION_TYPE benefit from the use of this function. 1549 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1434 // Please note this does not perform a garbage collection. 1550 // Please note this does not perform a garbage collection.
1435 MUST_USE_RESULT static inline MaybeObject* InitializeFunction( 1551 MUST_USE_RESULT inline MaybeObject* InitializeFunction(
1436 JSFunction* function, 1552 JSFunction* function,
1437 SharedFunctionInfo* shared, 1553 SharedFunctionInfo* shared,
1438 Object* prototype); 1554 Object* prototype);
1439 1555
1440 static GCTracer* tracer_; 1556 GCTracer* tracer_;
1441 1557
1442 1558
1443 // Initializes the number to string cache based on the max semispace size. 1559 // Initializes the number to string cache based on the max semispace size.
1444 MUST_USE_RESULT static MaybeObject* InitializeNumberStringCache(); 1560 MUST_USE_RESULT MaybeObject* InitializeNumberStringCache();
1445 // Flush the number to string cache. 1561 // Flush the number to string cache.
1446 static void FlushNumberStringCache(); 1562 void FlushNumberStringCache();
1447 1563
1448 static void UpdateSurvivalRateTrend(int start_new_space_size); 1564 void UpdateSurvivalRateTrend(int start_new_space_size);
1449 1565
1450 enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING }; 1566 enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
1451 1567
1452 static const int kYoungSurvivalRateThreshold = 90; 1568 static const int kYoungSurvivalRateThreshold = 90;
1453 static const int kYoungSurvivalRateAllowedDeviation = 15; 1569 static const int kYoungSurvivalRateAllowedDeviation = 15;
1454 1570
1455 static int young_survivors_after_last_gc_; 1571 int young_survivors_after_last_gc_;
1456 static int high_survival_rate_period_length_; 1572 int high_survival_rate_period_length_;
1457 static double survival_rate_; 1573 double survival_rate_;
1458 static SurvivalRateTrend previous_survival_rate_trend_; 1574 SurvivalRateTrend previous_survival_rate_trend_;
1459 static SurvivalRateTrend survival_rate_trend_; 1575 SurvivalRateTrend survival_rate_trend_;
1460 1576
1461 static void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) { 1577 void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
1462 ASSERT(survival_rate_trend != FLUCTUATING); 1578 ASSERT(survival_rate_trend != FLUCTUATING);
1463 previous_survival_rate_trend_ = survival_rate_trend_; 1579 previous_survival_rate_trend_ = survival_rate_trend_;
1464 survival_rate_trend_ = survival_rate_trend; 1580 survival_rate_trend_ = survival_rate_trend;
1465 } 1581 }
1466 1582
1467 static SurvivalRateTrend survival_rate_trend() { 1583 SurvivalRateTrend survival_rate_trend() {
1468 if (survival_rate_trend_ == STABLE) { 1584 if (survival_rate_trend_ == STABLE) {
1469 return STABLE; 1585 return STABLE;
1470 } else if (previous_survival_rate_trend_ == STABLE) { 1586 } else if (previous_survival_rate_trend_ == STABLE) {
1471 return survival_rate_trend_; 1587 return survival_rate_trend_;
1472 } else if (survival_rate_trend_ != previous_survival_rate_trend_) { 1588 } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
1473 return FLUCTUATING; 1589 return FLUCTUATING;
1474 } else { 1590 } else {
1475 return survival_rate_trend_; 1591 return survival_rate_trend_;
1476 } 1592 }
1477 } 1593 }
1478 1594
1479 static bool IsStableOrIncreasingSurvivalTrend() { 1595 bool IsStableOrIncreasingSurvivalTrend() {
1480 switch (survival_rate_trend()) { 1596 switch (survival_rate_trend()) {
1481 case STABLE: 1597 case STABLE:
1482 case INCREASING: 1598 case INCREASING:
1483 return true; 1599 return true;
1484 default: 1600 default:
1485 return false; 1601 return false;
1486 } 1602 }
1487 } 1603 }
1488 1604
1489 static bool IsIncreasingSurvivalTrend() { 1605 bool IsIncreasingSurvivalTrend() {
1490 return survival_rate_trend() == INCREASING; 1606 return survival_rate_trend() == INCREASING;
1491 } 1607 }
1492 1608
1493 static bool IsHighSurvivalRate() { 1609 bool IsHighSurvivalRate() {
1494 return high_survival_rate_period_length_ > 0; 1610 return high_survival_rate_period_length_ > 0;
1495 } 1611 }
1496 1612
1497 static void SelectScavengingVisitorsTable(); 1613 void SelectScavengingVisitorsTable();
1498 1614
1499 static const int kInitialSymbolTableSize = 2048; 1615 static const int kInitialSymbolTableSize = 2048;
1500 static const int kInitialEvalCacheSize = 64; 1616 static const int kInitialEvalCacheSize = 64;
1501 1617
1618 // Maximum GC pause.
1619 int max_gc_pause_;
1620
1621 // Maximum size of objects alive after GC.
1622 intptr_t max_alive_after_gc_;
1623
1624 // Minimal interval between two subsequent collections.
1625 int min_in_mutator_;
1626
1627 // Size of objects alive after last GC.
1628 intptr_t alive_after_last_gc_;
1629
1630 double last_gc_end_timestamp_;
1631
1632 MarkCompactCollector mark_compact_collector_;
1633
1634 StoreBuffer store_buffer_;
1635
1636 Marking marking_;
1637
1638 IncrementalMarking incremental_marking_;
1639
1640 int number_idle_notifications_;
1641 unsigned int last_idle_notification_gc_count_;
1642 bool last_idle_notification_gc_count_init_;
1643
1644 // Shared state read by the scavenge collector and set by ScavengeObject.
1645 PromotionQueue promotion_queue_;
1646
1647 // Flag is set when the heap has been configured. The heap can be repeatedly
1648 // configured through the API until it is setup.
1649 bool configured_;
1650
1651 ExternalStringTable external_string_table_;
1652
1502 friend class Factory; 1653 friend class Factory;
1654 friend class GCTracer;
1503 friend class DisallowAllocationFailure; 1655 friend class DisallowAllocationFailure;
1504 friend class AlwaysAllocateScope; 1656 friend class AlwaysAllocateScope;
1505 friend class LinearAllocationScope; 1657 friend class LinearAllocationScope;
1658 friend class Page;
1659 friend class Isolate;
1506 friend class MarkCompactCollector; 1660 friend class MarkCompactCollector;
1661 friend class MapCompact;
1662
1663 DISALLOW_COPY_AND_ASSIGN(Heap);
1507 }; 1664 };
1508 1665
1509 1666
1510 class HeapStats { 1667 class HeapStats {
1511 public: 1668 public:
1512 static const int kStartMarker = 0xDECADE00; 1669 static const int kStartMarker = 0xDECADE00;
1513 static const int kEndMarker = 0xDECADE01; 1670 static const int kEndMarker = 0xDECADE01;
1514 1671
1515 int* start_marker; // 0 1672 int* start_marker; // 0
1516 int* new_space_size; // 1 1673 int* new_space_size; // 1
(...skipping 23 matching lines...) Expand all
1540 }; 1697 };
1541 1698
1542 1699
1543 class AlwaysAllocateScope { 1700 class AlwaysAllocateScope {
1544 public: 1701 public:
1545 AlwaysAllocateScope() { 1702 AlwaysAllocateScope() {
1546 // We shouldn't hit any nested scopes, because that requires 1703 // We shouldn't hit any nested scopes, because that requires
1547 // non-handle code to call handle code. The code still works but 1704 // non-handle code to call handle code. The code still works but
1548 // performance will degrade, so we want to catch this situation 1705 // performance will degrade, so we want to catch this situation
1549 // in debug mode. 1706 // in debug mode.
1550 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1707 ASSERT(HEAP->always_allocate_scope_depth_ == 0);
1551 Heap::always_allocate_scope_depth_++; 1708 HEAP->always_allocate_scope_depth_++;
1552 } 1709 }
1553 1710
1554 ~AlwaysAllocateScope() { 1711 ~AlwaysAllocateScope() {
1555 Heap::always_allocate_scope_depth_--; 1712 HEAP->always_allocate_scope_depth_--;
1556 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1713 ASSERT(HEAP->always_allocate_scope_depth_ == 0);
1557 } 1714 }
1558 }; 1715 };
1559 1716
1560 1717
1561 class LinearAllocationScope { 1718 class LinearAllocationScope {
1562 public: 1719 public:
1563 LinearAllocationScope() { 1720 LinearAllocationScope() {
1564 Heap::linear_allocation_scope_depth_++; 1721 HEAP->linear_allocation_scope_depth_++;
1565 } 1722 }
1566 1723
1567 ~LinearAllocationScope() { 1724 ~LinearAllocationScope() {
1568 Heap::linear_allocation_scope_depth_--; 1725 HEAP->linear_allocation_scope_depth_--;
1569 ASSERT(Heap::linear_allocation_scope_depth_ >= 0); 1726 ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
1570 } 1727 }
1571 }; 1728 };
1572 1729
1573 1730
1574 #ifdef DEBUG 1731 #ifdef DEBUG
1575 // Visitor class to verify interior pointers in spaces that do not contain 1732 // Visitor class to verify interior pointers in spaces that do not contain
1576 // or care about intergenerational references. All heap object pointers have to 1733 // or care about intergenerational references. All heap object pointers have to
1577 // point into the heap to a location that has a map pointer at its first word. 1734 // point into the heap to a location that has a map pointer at its first word.
1578 // Caveat: Heap::Contains is an approximation because it can return true for 1735 // Caveat: Heap::Contains is an approximation because it can return true for
1579 // objects in a heap space but above the allocation pointer. 1736 // objects in a heap space but above the allocation pointer.
1580 class VerifyPointersVisitor: public ObjectVisitor { 1737 class VerifyPointersVisitor: public ObjectVisitor {
1581 public: 1738 public:
1582 void VisitPointers(Object** start, Object** end) { 1739 void VisitPointers(Object** start, Object** end) {
1583 for (Object** current = start; current < end; current++) { 1740 for (Object** current = start; current < end; current++) {
1584 if ((*current)->IsHeapObject()) { 1741 if ((*current)->IsHeapObject()) {
1585 HeapObject* object = HeapObject::cast(*current); 1742 HeapObject* object = HeapObject::cast(*current);
1586 ASSERT(Heap::Contains(object)); 1743 ASSERT(HEAP->Contains(object));
1587 ASSERT(object->map()->IsMap()); 1744 ASSERT(object->map()->IsMap());
1588 } 1745 }
1589 } 1746 }
1590 } 1747 }
1591 }; 1748 };
1592 #endif 1749 #endif
1593 1750
1594 1751
1595 // Space iterator for iterating over all spaces of the heap. 1752 // Space iterator for iterating over all spaces of the heap.
1596 // Returns each space in turn, and null when it is done. 1753 // Returns each space in turn, and null when it is done.
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
1679 // Object iterator for the space currently being iterated. 1836 // Object iterator for the space currently being iterated.
1680 ObjectIterator* object_iterator_; 1837 ObjectIterator* object_iterator_;
1681 }; 1838 };
1682 1839
1683 1840
1684 // Cache for mapping (map, property name) into field offset. 1841 // Cache for mapping (map, property name) into field offset.
1685 // Cleared at startup and prior to mark sweep collection. 1842 // Cleared at startup and prior to mark sweep collection.
1686 class KeyedLookupCache { 1843 class KeyedLookupCache {
1687 public: 1844 public:
1688 // Lookup field offset for (map, name). If absent, -1 is returned. 1845 // Lookup field offset for (map, name). If absent, -1 is returned.
1689 static int Lookup(Map* map, String* name); 1846 int Lookup(Map* map, String* name);
1690 1847
1691 // Update an element in the cache. 1848 // Update an element in the cache.
1692 static void Update(Map* map, String* name, int field_offset); 1849 void Update(Map* map, String* name, int field_offset);
1693 1850
1694 // Clear the cache. 1851 // Clear the cache.
1695 static void Clear(); 1852 void Clear();
1696 1853
1697 static const int kLength = 64; 1854 static const int kLength = 64;
1698 static const int kCapacityMask = kLength - 1; 1855 static const int kCapacityMask = kLength - 1;
1699 static const int kMapHashShift = 2; 1856 static const int kMapHashShift = 2;
1857 static const int kNotFound = -1;
1700 1858
1701 private: 1859 private:
1860 KeyedLookupCache() {
1861 for (int i = 0; i < kLength; ++i) {
1862 keys_[i].map = NULL;
1863 keys_[i].name = NULL;
1864 field_offsets_[i] = kNotFound;
1865 }
1866 }
1867
1702 static inline int Hash(Map* map, String* name); 1868 static inline int Hash(Map* map, String* name);
1703 1869
1704 // Get the address of the keys and field_offsets arrays. Used in 1870 // Get the address of the keys and field_offsets arrays. Used in
1705 // generated code to perform cache lookups. 1871 // generated code to perform cache lookups.
1706 static Address keys_address() { 1872 Address keys_address() {
1707 return reinterpret_cast<Address>(&keys_); 1873 return reinterpret_cast<Address>(&keys_);
1708 } 1874 }
1709 1875
1710 static Address field_offsets_address() { 1876 Address field_offsets_address() {
1711 return reinterpret_cast<Address>(&field_offsets_); 1877 return reinterpret_cast<Address>(&field_offsets_);
1712 } 1878 }
1713 1879
1714 struct Key { 1880 struct Key {
1715 Map* map; 1881 Map* map;
1716 String* name; 1882 String* name;
1717 }; 1883 };
1718 static Key keys_[kLength]; 1884
1719 static int field_offsets_[kLength]; 1885 Key keys_[kLength];
1886 int field_offsets_[kLength];
1720 1887
1721 friend class ExternalReference; 1888 friend class ExternalReference;
1889 friend class Isolate;
1890 DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
1722 }; 1891 };
1723 1892
1724 1893
1725 // Cache for mapping (array, property name) into descriptor index. 1894 // Cache for mapping (array, property name) into descriptor index.
1726 // The cache contains both positive and negative results. 1895 // The cache contains both positive and negative results.
1727 // Descriptor index equals kNotFound means the property is absent. 1896 // Descriptor index equals kNotFound means the property is absent.
1728 // Cleared at startup and prior to any gc. 1897 // Cleared at startup and prior to any gc.
1729 class DescriptorLookupCache { 1898 class DescriptorLookupCache {
1730 public: 1899 public:
1731 // Lookup descriptor index for (map, name). 1900 // Lookup descriptor index for (map, name).
1732 // If absent, kAbsent is returned. 1901 // If absent, kAbsent is returned.
1733 static int Lookup(DescriptorArray* array, String* name) { 1902 int Lookup(DescriptorArray* array, String* name) {
1734 if (!StringShape(name).IsSymbol()) return kAbsent; 1903 if (!StringShape(name).IsSymbol()) return kAbsent;
1735 int index = Hash(array, name); 1904 int index = Hash(array, name);
1736 Key& key = keys_[index]; 1905 Key& key = keys_[index];
1737 if ((key.array == array) && (key.name == name)) return results_[index]; 1906 if ((key.array == array) && (key.name == name)) return results_[index];
1738 return kAbsent; 1907 return kAbsent;
1739 } 1908 }
1740 1909
1741 // Update an element in the cache. 1910 // Update an element in the cache.
1742 static void Update(DescriptorArray* array, String* name, int result) { 1911 void Update(DescriptorArray* array, String* name, int result) {
1743 ASSERT(result != kAbsent); 1912 ASSERT(result != kAbsent);
1744 if (StringShape(name).IsSymbol()) { 1913 if (StringShape(name).IsSymbol()) {
1745 int index = Hash(array, name); 1914 int index = Hash(array, name);
1746 Key& key = keys_[index]; 1915 Key& key = keys_[index];
1747 key.array = array; 1916 key.array = array;
1748 key.name = name; 1917 key.name = name;
1749 results_[index] = result; 1918 results_[index] = result;
1750 } 1919 }
1751 } 1920 }
1752 1921
1753 // Clear the cache. 1922 // Clear the cache.
1754 static void Clear(); 1923 void Clear();
1755 1924
1756 static const int kAbsent = -2; 1925 static const int kAbsent = -2;
1757 private: 1926 private:
1927 DescriptorLookupCache() {
1928 for (int i = 0; i < kLength; ++i) {
1929 keys_[i].array = NULL;
1930 keys_[i].name = NULL;
1931 results_[i] = kAbsent;
1932 }
1933 }
1934
1758 static int Hash(DescriptorArray* array, String* name) { 1935 static int Hash(DescriptorArray* array, String* name) {
1759 // Uses only lower 32 bits if pointers are larger. 1936 // Uses only lower 32 bits if pointers are larger.
1760 uint32_t array_hash = 1937 uint32_t array_hash =
1761 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2; 1938 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
1762 uint32_t name_hash = 1939 uint32_t name_hash =
1763 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2; 1940 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1764 return (array_hash ^ name_hash) % kLength; 1941 return (array_hash ^ name_hash) % kLength;
1765 } 1942 }
1766 1943
1767 static const int kLength = 64; 1944 static const int kLength = 64;
1768 struct Key { 1945 struct Key {
1769 DescriptorArray* array; 1946 DescriptorArray* array;
1770 String* name; 1947 String* name;
1771 }; 1948 };
1772 1949
1773 static Key keys_[kLength]; 1950 Key keys_[kLength];
1774 static int results_[kLength]; 1951 int results_[kLength];
1952
1953 friend class Isolate;
1954 DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
1775 }; 1955 };
1776 1956
1777 1957
1778 // ----------------------------------------------------------------------------
1779 // Marking stack for tracing live objects.
1780
1781 class MarkingStack {
1782 public:
1783 void Initialize(Address low, Address high) {
1784 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1785 high_ = reinterpret_cast<HeapObject**>(high);
1786 overflowed_ = false;
1787 }
1788
1789 bool is_full() { return top_ >= high_; }
1790
1791 bool is_empty() { return top_ <= low_; }
1792
1793 bool overflowed() { return overflowed_; }
1794
1795 void clear_overflowed() { overflowed_ = false; }
1796
1797 // Push the (marked) object on the marking stack if there is room,
1798 // otherwise mark the object as overflowed and wait for a rescan of the
1799 // heap.
1800 void Push(HeapObject* object) {
1801 ASSERT(object->IsHeapObject());
1802 if (is_full()) {
1803 object->SetOverflow();
1804 overflowed_ = true;
1805 } else {
1806 *(top_++) = object;
1807 }
1808 }
1809
1810 HeapObject* Pop() {
1811 ASSERT(!is_empty());
1812 HeapObject* object = *(--top_);
1813 ASSERT(object->IsHeapObject());
1814 return object;
1815 }
1816
1817 HeapObject** low() { return low_; }
1818 HeapObject** top() { return top_; }
1819 void set_top(HeapObject** top) { top_ = top; }
1820
1821 private:
1822 HeapObject** low_;
1823 HeapObject** top_;
1824 HeapObject** high_;
1825 bool overflowed_;
1826 };
1827
1828
1829 // A helper class to document/test C++ scopes where we do not 1958 // A helper class to document/test C++ scopes where we do not
1830 // expect a GC. Usage: 1959 // expect a GC. Usage:
1831 // 1960 //
1832 // /* Allocation not allowed: we cannot handle a GC in this scope. */ 1961 // /* Allocation not allowed: we cannot handle a GC in this scope. */
1833 // { AssertNoAllocation nogc; 1962 // { AssertNoAllocation nogc;
1834 // ... 1963 // ...
1835 // } 1964 // }
1836 1965
1837 #ifdef DEBUG 1966 #ifdef DEBUG
1838 1967
1839 class DisallowAllocationFailure { 1968 class DisallowAllocationFailure {
1840 public: 1969 public:
1841 DisallowAllocationFailure() { 1970 DisallowAllocationFailure() {
1842 old_state_ = Heap::disallow_allocation_failure_; 1971 old_state_ = HEAP->disallow_allocation_failure_;
1843 Heap::disallow_allocation_failure_ = true; 1972 HEAP->disallow_allocation_failure_ = true;
1844 } 1973 }
1845 ~DisallowAllocationFailure() { 1974 ~DisallowAllocationFailure() {
1846 Heap::disallow_allocation_failure_ = old_state_; 1975 HEAP->disallow_allocation_failure_ = old_state_;
1847 } 1976 }
1848 private: 1977 private:
1849 bool old_state_; 1978 bool old_state_;
1850 }; 1979 };
1851 1980
1852 class AssertNoAllocation { 1981 class AssertNoAllocation {
1853 public: 1982 public:
1854 AssertNoAllocation() { 1983 AssertNoAllocation() {
1855 old_state_ = Heap::allow_allocation(false); 1984 old_state_ = HEAP->allow_allocation(false);
1856 } 1985 }
1857 1986
1858 ~AssertNoAllocation() { 1987 ~AssertNoAllocation() {
1859 Heap::allow_allocation(old_state_); 1988 HEAP->allow_allocation(old_state_);
1860 } 1989 }
1861 1990
1862 private: 1991 private:
1863 bool old_state_; 1992 bool old_state_;
1864 }; 1993 };
1865 1994
1866 class DisableAssertNoAllocation { 1995 class DisableAssertNoAllocation {
1867 public: 1996 public:
1868 DisableAssertNoAllocation() { 1997 DisableAssertNoAllocation() {
1869 old_state_ = Heap::allow_allocation(true); 1998 old_state_ = HEAP->allow_allocation(true);
1870 } 1999 }
1871 2000
1872 ~DisableAssertNoAllocation() { 2001 ~DisableAssertNoAllocation() {
1873 Heap::allow_allocation(old_state_); 2002 HEAP->allow_allocation(old_state_);
1874 } 2003 }
1875 2004
1876 private: 2005 private:
1877 bool old_state_; 2006 bool old_state_;
1878 }; 2007 };
1879 2008
1880 #else // ndef DEBUG 2009 #else // ndef DEBUG
1881 2010
1882 class AssertNoAllocation { 2011 class AssertNoAllocation {
1883 public: 2012 public:
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1920 ASSERT(scope_ < kNumberOfScopes); // scope_ is unsigned. 2049 ASSERT(scope_ < kNumberOfScopes); // scope_ is unsigned.
1921 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_; 2050 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
1922 } 2051 }
1923 2052
1924 private: 2053 private:
1925 GCTracer* tracer_; 2054 GCTracer* tracer_;
1926 ScopeId scope_; 2055 ScopeId scope_;
1927 double start_time_; 2056 double start_time_;
1928 }; 2057 };
1929 2058
1930 GCTracer(); 2059 explicit GCTracer(Heap* heap);
1931 ~GCTracer(); 2060 ~GCTracer();
1932 2061
1933 // Sets the collector. 2062 // Sets the collector.
1934 void set_collector(GarbageCollector collector) { collector_ = collector; } 2063 void set_collector(GarbageCollector collector) { collector_ = collector; }
1935 2064
1936 // Sets the GC count. 2065 // Sets the GC count.
1937 void set_gc_count(unsigned int count) { gc_count_ = count; } 2066 void set_gc_count(unsigned int count) { gc_count_ = count; }
1938 2067
1939 // Sets the full GC count. 2068 // Sets the full GC count.
1940 void set_full_gc_count(int count) { full_gc_count_ = count; } 2069 void set_full_gc_count(int count) { full_gc_count_ = count; }
1941 2070
1942 // Sets the flag that this is a compacting full GC. 2071 // Sets the flag that this is a compacting full GC.
1943 void set_is_compacting() { is_compacting_ = true; } 2072 void set_is_compacting() { is_compacting_ = true; }
1944 bool is_compacting() const { return is_compacting_; } 2073 bool is_compacting() const { return is_compacting_; }
1945 2074
1946 // Increment and decrement the count of marked objects. 2075 // Increment and decrement the count of marked objects.
1947 void increment_marked_count() { ++marked_count_; } 2076 void increment_marked_count() { ++marked_count_; }
1948 void decrement_marked_count() { --marked_count_; } 2077 void decrement_marked_count() { --marked_count_; }
1949 2078
1950 int marked_count() { return marked_count_; } 2079 int marked_count() { return marked_count_; }
1951 2080
1952 void increment_promoted_objects_size(int object_size) { 2081 void increment_promoted_objects_size(int object_size) {
1953 promoted_objects_size_ += object_size; 2082 promoted_objects_size_ += object_size;
1954 } 2083 }
1955 2084
1956 // Returns maximum GC pause.
1957 static int get_max_gc_pause() { return max_gc_pause_; }
1958
1959 // Returns maximum size of objects alive after GC.
1960 static intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1961
1962 // Returns minimal interval between two subsequent collections.
1963 static int get_min_in_mutator() { return min_in_mutator_; }
1964
1965 private: 2085 private:
1966 // Returns a string matching the collector. 2086 // Returns a string matching the collector.
1967 const char* CollectorString(); 2087 const char* CollectorString();
1968 2088
1969 // Returns size of object in heap (in MB). 2089 // Returns size of object in heap (in MB).
1970 double SizeOfHeapObjects() { 2090 double SizeOfHeapObjects() {
1971 return (static_cast<double>(Heap::SizeOfObjects())) / MB; 2091 return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
1972 } 2092 }
1973 2093
1974 double start_time_; // Timestamp set in the constructor. 2094 double start_time_; // Timestamp set in the constructor.
1975 intptr_t start_size_; // Size of objects in heap set in constructor. 2095 intptr_t start_size_; // Size of objects in heap set in constructor.
1976 GarbageCollector collector_; // Type of collector. 2096 GarbageCollector collector_; // Type of collector.
1977 2097
1978 // A count (including this one, eg, the first collection is 1) of the 2098 // A count (including this one, eg, the first collection is 1) of the
1979 // number of garbage collections. 2099 // number of garbage collections.
1980 unsigned int gc_count_; 2100 unsigned int gc_count_;
1981 2101
1982 // A count (including this one) of the number of full garbage collections. 2102 // A count (including this one) of the number of full garbage collections.
1983 int full_gc_count_; 2103 int full_gc_count_;
1984 2104
1985 // True if the current GC is a compacting full collection, false 2105 // True if the current GC is a compacting full collection, false
1986 // otherwise. 2106 // otherwise.
1987 bool is_compacting_; 2107 bool is_compacting_;
1988 2108
1989 // True if the *previous* full GC cwas a compacting collection (will be
1990 // false if there has not been a previous full GC).
1991 bool previous_has_compacted_;
1992
1993 // On a full GC, a count of the number of marked objects. Incremented 2109 // On a full GC, a count of the number of marked objects. Incremented
1994 // when an object is marked and decremented when an object's mark bit is 2110 // when an object is marked and decremented when an object's mark bit is
1995 // cleared. Will be zero on a scavenge collection. 2111 // cleared. Will be zero on a scavenge collection.
1996 int marked_count_; 2112 int marked_count_;
1997 2113
1998 // Amounts of time spent in different scopes during GC. 2114 // Amounts of time spent in different scopes during GC.
1999 double scopes_[Scope::kNumberOfScopes]; 2115 double scopes_[Scope::kNumberOfScopes];
2000 2116
2001 // Total amount of space either wasted or contained in one of free lists 2117 // Total amount of space either wasted or contained in one of free lists
2002 // before the current GC. 2118 // before the current GC.
2003 intptr_t in_free_list_or_wasted_before_gc_; 2119 intptr_t in_free_list_or_wasted_before_gc_;
2004 2120
2005 // Difference between space used in the heap at the beginning of the current 2121 // Difference between space used in the heap at the beginning of the current
2006 // collection and the end of the previous collection. 2122 // collection and the end of the previous collection.
2007 intptr_t allocated_since_last_gc_; 2123 intptr_t allocated_since_last_gc_;
2008 2124
2009 // Amount of time spent in mutator that is time elapsed between end of the 2125 // Amount of time spent in mutator that is time elapsed between end of the
2010 // previous collection and the beginning of the current one. 2126 // previous collection and the beginning of the current one.
2011 double spent_in_mutator_; 2127 double spent_in_mutator_;
2012 2128
2013 // Size of objects promoted during the current collection. 2129 // Size of objects promoted during the current collection.
2014 intptr_t promoted_objects_size_; 2130 intptr_t promoted_objects_size_;
2015 2131
2016 // Incremental marking steps counters. 2132 // Incremental marking steps counters.
2017 int steps_count_; 2133 int steps_count_;
2018 2134
2019 double steps_took_; 2135 double steps_took_;
2020 2136
2021 // Maximum GC pause. 2137 Heap* heap_;
2022 static int max_gc_pause_;
2023
2024 // Maximum size of objects alive after GC.
2025 static intptr_t max_alive_after_gc_;
2026
2027 // Minimal interval between two subsequent collections.
2028 static int min_in_mutator_;
2029
2030 // Size of objects alive after last GC.
2031 static intptr_t alive_after_last_gc_;
2032
2033 static double last_gc_end_timestamp_;
2034 }; 2138 };
2035 2139
2036 2140
2037 class TranscendentalCache { 2141 class TranscendentalCache {
2038 public: 2142 public:
2039 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches}; 2143 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
2040 static const int kTranscendentalTypeBits = 3; 2144 static const int kTranscendentalTypeBits = 3;
2041 STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches); 2145 STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
2042 2146
2043 explicit TranscendentalCache(Type t);
2044
2045 // Returns a heap number with f(input), where f is a math function specified 2147 // Returns a heap number with f(input), where f is a math function specified
2046 // by the 'type' argument. 2148 // by the 'type' argument.
2047 MUST_USE_RESULT static inline MaybeObject* Get(Type type, double input) { 2149 MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
2048 TranscendentalCache* cache = caches_[type];
2049 if (cache == NULL) {
2050 caches_[type] = cache = new TranscendentalCache(type);
2051 }
2052 return cache->Get(input);
2053 }
2054 2150
2055 // The cache contains raw Object pointers. This method disposes of 2151 // The cache contains raw Object pointers. This method disposes of
2056 // them before a garbage collection. 2152 // them before a garbage collection.
2057 static void Clear(); 2153 void Clear();
2058 2154
2059 private: 2155 private:
2060 MUST_USE_RESULT inline MaybeObject* Get(double input) { 2156 class SubCache {
2061 Converter c; 2157 static const int kCacheSize = 512;
2062 c.dbl = input; 2158
2063 int hash = Hash(c); 2159 explicit SubCache(Type t);
2064 Element e = elements_[hash]; 2160
2065 if (e.in[0] == c.integers[0] && 2161 MUST_USE_RESULT inline MaybeObject* Get(double input);
2066 e.in[1] == c.integers[1]) { 2162
2067 ASSERT(e.output != NULL); 2163 inline double Calculate(double input);
2068 Counters::transcendental_cache_hit.Increment(); 2164
2069 return e.output; 2165 struct Element {
2166 uint32_t in[2];
2167 Object* output;
2168 };
2169
2170 union Converter {
2171 double dbl;
2172 uint32_t integers[2];
2173 };
2174
2175 inline static int Hash(const Converter& c) {
2176 uint32_t hash = (c.integers[0] ^ c.integers[1]);
2177 hash ^= static_cast<int32_t>(hash) >> 16;
2178 hash ^= static_cast<int32_t>(hash) >> 8;
2179 return (hash & (kCacheSize - 1));
2070 } 2180 }
2071 double answer = Calculate(input); 2181
2072 Counters::transcendental_cache_miss.Increment(); 2182 Element elements_[kCacheSize];
2073 Object* heap_number; 2183 Type type_;
2074 { MaybeObject* maybe_heap_number = Heap::AllocateHeapNumber(answer); 2184 Isolate* isolate_;
2075 if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number; 2185
2076 } 2186 // Allow access to the caches_ array as an ExternalReference.
2077 elements_[hash].in[0] = c.integers[0]; 2187 friend class ExternalReference;
2078 elements_[hash].in[1] = c.integers[1]; 2188 // Inline implementation of the cache.
2079 elements_[hash].output = heap_number; 2189 friend class TranscendentalCacheStub;
2080 return heap_number; 2190 // For evaluating value.
2191 friend class TranscendentalCache;
2192
2193 DISALLOW_COPY_AND_ASSIGN(SubCache);
2194 };
2195
2196 TranscendentalCache() {
2197 for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
2081 } 2198 }
2082 2199
2083 inline double Calculate(double input) { 2200 // Used to create an external reference.
2084 switch (type_) { 2201 inline Address cache_array_address();
2085 case ACOS:
2086 return acos(input);
2087 case ASIN:
2088 return asin(input);
2089 case ATAN:
2090 return atan(input);
2091 case COS:
2092 return cos(input);
2093 case EXP:
2094 return exp(input);
2095 case LOG:
2096 return log(input);
2097 case SIN:
2098 return sin(input);
2099 case TAN:
2100 return tan(input);
2101 default:
2102 return 0.0; // Never happens.
2103 }
2104 }
2105 static const int kCacheSize = 512;
2106 struct Element {
2107 uint32_t in[2];
2108 Object* output;
2109 };
2110 union Converter {
2111 double dbl;
2112 uint32_t integers[2];
2113 };
2114 inline static int Hash(const Converter& c) {
2115 uint32_t hash = (c.integers[0] ^ c.integers[1]);
2116 hash ^= static_cast<int32_t>(hash) >> 16;
2117 hash ^= static_cast<int32_t>(hash) >> 8;
2118 return (hash & (kCacheSize - 1));
2119 }
2120 2202
2121 static Address cache_array_address() { 2203 // Instantiation
2122 // Used to create an external reference. 2204 friend class Isolate;
2123 return reinterpret_cast<Address>(caches_); 2205 // Inline implementation of the caching.
2124 } 2206 friend class TranscendentalCacheStub;
2125
2126 // Allow access to the caches_ array as an ExternalReference. 2207 // Allow access to the caches_ array as an ExternalReference.
2127 friend class ExternalReference; 2208 friend class ExternalReference;
2128 // Inline implementation of the cache.
2129 friend class TranscendentalCacheStub;
2130 2209
2131 static TranscendentalCache* caches_[kNumberOfCaches]; 2210 SubCache* caches_[kNumberOfCaches];
2132 Element elements_[kCacheSize]; 2211 DISALLOW_COPY_AND_ASSIGN(TranscendentalCache);
2133 Type type_;
2134 }; 2212 };
2135 2213
2136 2214
2137 // External strings table is a place where all external strings are
2138 // registered. We need to keep track of such strings to properly
2139 // finalize them.
2140 class ExternalStringTable : public AllStatic {
2141 public:
2142 // Registers an external string.
2143 inline static void AddString(String* string);
2144
2145 inline static void Iterate(ObjectVisitor* v);
2146
2147 // Restores internal invariant and gets rid of collected strings.
2148 // Must be called after each Iterate() that modified the strings.
2149 static void CleanUp();
2150
2151 // Destroys all allocated memory.
2152 static void TearDown();
2153
2154 private:
2155 friend class Heap;
2156
2157 inline static void Verify();
2158
2159 inline static void AddOldString(String* string);
2160
2161 // Notifies the table that only a prefix of the new list is valid.
2162 inline static void ShrinkNewStrings(int position);
2163
2164 // To speed up scavenge collections new space string are kept
2165 // separate from old space strings.
2166 static List<Object*> new_space_strings_;
2167 static List<Object*> old_space_strings_;
2168 };
2169
2170
2171 // Abstract base class for checking whether a weak object should be retained. 2215 // Abstract base class for checking whether a weak object should be retained.
2172 class WeakObjectRetainer { 2216 class WeakObjectRetainer {
2173 public: 2217 public:
2174 virtual ~WeakObjectRetainer() {} 2218 virtual ~WeakObjectRetainer() {}
2175 2219
2176 // Return whether this object should be retained. If NULL is returned the 2220 // Return whether this object should be retained. If NULL is returned the
2177 // object has no references. Otherwise the address of the retained object 2221 // object has no references. Otherwise the address of the retained object
2178 // should be returned as in some GC situations the object has been moved. 2222 // should be returned as in some GC situations the object has been moved.
2179 virtual Object* RetainAs(Object* object) = 0; 2223 virtual Object* RetainAs(Object* object) = 0;
2180 }; 2224 };
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
2270 List<Object*> object_stack_; 2314 List<Object*> object_stack_;
2271 2315
2272 AssertNoAllocation no_alloc; // i.e. no gc allowed. 2316 AssertNoAllocation no_alloc; // i.e. no gc allowed.
2273 2317
2274 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); 2318 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2275 }; 2319 };
2276 #endif // DEBUG || LIVE_OBJECT_LIST 2320 #endif // DEBUG || LIVE_OBJECT_LIST
2277 2321
2278 } } // namespace v8::internal 2322 } } // namespace v8::internal
2279 2323
2324 #undef HEAP
2325
2280 #endif // V8_HEAP_H_ 2326 #endif // V8_HEAP_H_
OLDNEW
« no previous file with comments | « src/hashmap.h ('k') | src/heap.cc » ('j') | src/heap.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698