Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(14)

Side by Side Diff: src/heap.h

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/hashmap.h ('k') | src/heap.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 14 matching lines...) Expand all
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #ifndef V8_HEAP_H_ 28 #ifndef V8_HEAP_H_
29 #define V8_HEAP_H_ 29 #define V8_HEAP_H_
30 30
31 #include <math.h> 31 #include <math.h>
32 32
33 #include "globals.h" 33 #include "globals.h"
34 #include "list.h" 34 #include "list.h"
35 #include "mark-compact.h"
35 #include "spaces.h" 36 #include "spaces.h"
36 #include "splay-tree-inl.h" 37 #include "splay-tree-inl.h"
37 #include "v8-counters.h" 38 #include "v8-counters.h"
38 39
39 namespace v8 { 40 namespace v8 {
40 namespace internal { 41 namespace internal {
41 42
43 // TODO(isolates): remove HEAP here
44 #define HEAP (_inline_get_heap_())
45 class Heap;
46 inline Heap* _inline_get_heap_();
47
42 48
43 // Defines all the roots in Heap. 49 // Defines all the roots in Heap.
44 #define STRONG_ROOT_LIST(V) \ 50 #define STRONG_ROOT_LIST(V) \
45 /* Put the byte array map early. We need it to be in place by the time */ \ 51 /* Put the byte array map early. We need it to be in place by the time */ \
46 /* the deserializer hits the next page, since it wants to put a byte */ \ 52 /* the deserializer hits the next page, since it wants to put a byte */ \
47 /* array in the unused space at the end of the page. */ \ 53 /* array in the unused space at the end of the page. */ \
48 V(Map, byte_array_map, ByteArrayMap) \ 54 V(Map, byte_array_map, ByteArrayMap) \
49 V(Map, one_pointer_filler_map, OnePointerFillerMap) \ 55 V(Map, one_pointer_filler_map, OnePointerFillerMap) \
50 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \ 56 V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
51 /* Cluster the most popular ones in a few cache lines here at the top. */ \ 57 /* Cluster the most popular ones in a few cache lines here at the top. */ \
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after
219 "KeyedStoreExternalUnsignedShortArray") \ 225 "KeyedStoreExternalUnsignedShortArray") \
220 V(KeyedStoreExternalIntArray_symbol, "KeyedStoreExternalIntArray") \ 226 V(KeyedStoreExternalIntArray_symbol, "KeyedStoreExternalIntArray") \
221 V(KeyedStoreExternalUnsignedIntArray_symbol, \ 227 V(KeyedStoreExternalUnsignedIntArray_symbol, \
222 "KeyedStoreExternalUnsignedIntArray") \ 228 "KeyedStoreExternalUnsignedIntArray") \
223 V(KeyedStoreExternalFloatArray_symbol, "KeyedStoreExternalFloatArray") \ 229 V(KeyedStoreExternalFloatArray_symbol, "KeyedStoreExternalFloatArray") \
224 V(KeyedStoreExternalPixelArray_symbol, "KeyedStoreExternalPixelArray") 230 V(KeyedStoreExternalPixelArray_symbol, "KeyedStoreExternalPixelArray")
225 231
226 // Forward declarations. 232 // Forward declarations.
227 class GCTracer; 233 class GCTracer;
228 class HeapStats; 234 class HeapStats;
235 class Isolate;
229 class WeakObjectRetainer; 236 class WeakObjectRetainer;
230 237
231 238
232 typedef String* (*ExternalStringTableUpdaterCallback)(Object** pointer); 239 typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
240 Object** pointer);
233 241
234 typedef bool (*DirtyRegionCallback)(Address start, 242 typedef bool (*DirtyRegionCallback)(Heap* heap,
243 Address start,
235 Address end, 244 Address end,
236 ObjectSlotCallback copy_object_func); 245 ObjectSlotCallback copy_object_func);
237 246
238 247
239 // The all static Heap captures the interface to the global object heap. 248 // The all static Heap captures the interface to the global object heap.
240 // All JavaScript contexts by this process share the same object heap. 249 // All JavaScript contexts by this process share the same object heap.
241 250
242 class Heap : public AllStatic { 251 #ifdef DEBUG
252 class HeapDebugUtils;
253 #endif
254
255
256 // A queue of objects promoted during scavenge. Each object is accompanied
257 // by it's size to avoid dereferencing a map pointer for scanning.
258 class PromotionQueue {
259 public:
260 PromotionQueue() : front_(NULL), rear_(NULL) { }
261
262 void Initialize(Address start_address) {
263 front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
264 }
265
266 bool is_empty() { return front_ <= rear_; }
267
268 inline void insert(HeapObject* target, int size);
269
270 void remove(HeapObject** target, int* size) {
271 *target = reinterpret_cast<HeapObject*>(*(--front_));
272 *size = static_cast<int>(*(--front_));
273 // Assert no underflow.
274 ASSERT(front_ >= rear_);
275 }
276
277 private:
278 // The front of the queue is higher in memory than the rear.
279 intptr_t* front_;
280 intptr_t* rear_;
281
282 DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
283 };
284
285
286 // External strings table is a place where all external strings are
287 // registered. We need to keep track of such strings to properly
288 // finalize them.
289 class ExternalStringTable {
290 public:
291 // Registers an external string.
292 inline void AddString(String* string);
293
294 inline void Iterate(ObjectVisitor* v);
295
296 // Restores internal invariant and gets rid of collected strings.
297 // Must be called after each Iterate() that modified the strings.
298 void CleanUp();
299
300 // Destroys all allocated memory.
301 void TearDown();
302
303 private:
304 ExternalStringTable() { }
305
306 friend class Heap;
307
308 inline void Verify();
309
310 inline void AddOldString(String* string);
311
312 // Notifies the table that only a prefix of the new list is valid.
313 inline void ShrinkNewStrings(int position);
314
315 // To speed up scavenge collections new space string are kept
316 // separate from old space strings.
317 List<Object*> new_space_strings_;
318 List<Object*> old_space_strings_;
319
320 Heap* heap_;
321
322 DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
323 };
324
325
326 class Heap {
243 public: 327 public:
244 // Configure heap size before setup. Return false if the heap has been 328 // Configure heap size before setup. Return false if the heap has been
245 // setup already. 329 // setup already.
246 static bool ConfigureHeap(int max_semispace_size, 330 bool ConfigureHeap(int max_semispace_size,
247 int max_old_gen_size, 331 int max_old_gen_size,
248 int max_executable_size); 332 int max_executable_size);
249 static bool ConfigureHeapDefault(); 333 bool ConfigureHeapDefault();
250 334
251 // Initializes the global object heap. If create_heap_objects is true, 335 // Initializes the global object heap. If create_heap_objects is true,
252 // also creates the basic non-mutable objects. 336 // also creates the basic non-mutable objects.
253 // Returns whether it succeeded. 337 // Returns whether it succeeded.
254 static bool Setup(bool create_heap_objects); 338 bool Setup(bool create_heap_objects);
255 339
256 // Destroys all memory allocated by the heap. 340 // Destroys all memory allocated by the heap.
257 static void TearDown(); 341 void TearDown();
258 342
259 // Set the stack limit in the roots_ array. Some architectures generate 343 // Set the stack limit in the roots_ array. Some architectures generate
260 // code that looks here, because it is faster than loading from the static 344 // code that looks here, because it is faster than loading from the static
261 // jslimit_/real_jslimit_ variable in the StackGuard. 345 // jslimit_/real_jslimit_ variable in the StackGuard.
262 static void SetStackLimits(); 346 void SetStackLimits();
263 347
264 // Returns whether Setup has been called. 348 // Returns whether Setup has been called.
265 static bool HasBeenSetup(); 349 bool HasBeenSetup();
266 350
267 // Returns the maximum amount of memory reserved for the heap. For 351 // Returns the maximum amount of memory reserved for the heap. For
268 // the young generation, we reserve 4 times the amount needed for a 352 // the young generation, we reserve 4 times the amount needed for a
269 // semi space. The young generation consists of two semi spaces and 353 // semi space. The young generation consists of two semi spaces and
270 // we reserve twice the amount needed for those in order to ensure 354 // we reserve twice the amount needed for those in order to ensure
271 // that new space can be aligned to its size. 355 // that new space can be aligned to its size.
272 static intptr_t MaxReserved() { 356 intptr_t MaxReserved() {
273 return 4 * reserved_semispace_size_ + max_old_generation_size_; 357 return 4 * reserved_semispace_size_ + max_old_generation_size_;
274 } 358 }
275 static int MaxSemiSpaceSize() { return max_semispace_size_; } 359 int MaxSemiSpaceSize() { return max_semispace_size_; }
276 static int ReservedSemiSpaceSize() { return reserved_semispace_size_; } 360 int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
277 static int InitialSemiSpaceSize() { return initial_semispace_size_; } 361 int InitialSemiSpaceSize() { return initial_semispace_size_; }
278 static intptr_t MaxOldGenerationSize() { return max_old_generation_size_; } 362 intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
279 static intptr_t MaxExecutableSize() { return max_executable_size_; } 363 intptr_t MaxExecutableSize() { return max_executable_size_; }
280 364
281 // Returns the capacity of the heap in bytes w/o growing. Heap grows when 365 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
282 // more spaces are needed until it reaches the limit. 366 // more spaces are needed until it reaches the limit.
283 static intptr_t Capacity(); 367 intptr_t Capacity();
284 368
285 // Returns the amount of memory currently committed for the heap. 369 // Returns the amount of memory currently committed for the heap.
286 static intptr_t CommittedMemory(); 370 intptr_t CommittedMemory();
287 371
288 // Returns the amount of executable memory currently committed for the heap. 372 // Returns the amount of executable memory currently committed for the heap.
289 static intptr_t CommittedMemoryExecutable(); 373 intptr_t CommittedMemoryExecutable();
290 374
291 // Returns the available bytes in space w/o growing. 375 // Returns the available bytes in space w/o growing.
292 // Heap doesn't guarantee that it can allocate an object that requires 376 // Heap doesn't guarantee that it can allocate an object that requires
293 // all available bytes. Check MaxHeapObjectSize() instead. 377 // all available bytes. Check MaxHeapObjectSize() instead.
294 static intptr_t Available(); 378 intptr_t Available();
295 379
296 // Returns the maximum object size in paged space. 380 // Returns the maximum object size in paged space.
297 static inline int MaxObjectSizeInPagedSpace(); 381 inline int MaxObjectSizeInPagedSpace();
298 382
299 // Returns of size of all objects residing in the heap. 383 // Returns of size of all objects residing in the heap.
300 static intptr_t SizeOfObjects(); 384 intptr_t SizeOfObjects();
301 385
302 // Return the starting address and a mask for the new space. And-masking an 386 // Return the starting address and a mask for the new space. And-masking an
303 // address with the mask will result in the start address of the new space 387 // address with the mask will result in the start address of the new space
304 // for all addresses in either semispace. 388 // for all addresses in either semispace.
305 static Address NewSpaceStart() { return new_space_.start(); } 389 Address NewSpaceStart() { return new_space_.start(); }
306 static uintptr_t NewSpaceMask() { return new_space_.mask(); } 390 uintptr_t NewSpaceMask() { return new_space_.mask(); }
307 static Address NewSpaceTop() { return new_space_.top(); } 391 Address NewSpaceTop() { return new_space_.top(); }
308 392
309 static NewSpace* new_space() { return &new_space_; } 393 NewSpace* new_space() { return &new_space_; }
310 static OldSpace* old_pointer_space() { return old_pointer_space_; } 394 OldSpace* old_pointer_space() { return old_pointer_space_; }
311 static OldSpace* old_data_space() { return old_data_space_; } 395 OldSpace* old_data_space() { return old_data_space_; }
312 static OldSpace* code_space() { return code_space_; } 396 OldSpace* code_space() { return code_space_; }
313 static MapSpace* map_space() { return map_space_; } 397 MapSpace* map_space() { return map_space_; }
314 static CellSpace* cell_space() { return cell_space_; } 398 CellSpace* cell_space() { return cell_space_; }
315 static LargeObjectSpace* lo_space() { return lo_space_; } 399 LargeObjectSpace* lo_space() { return lo_space_; }
316 400
317 static bool always_allocate() { return always_allocate_scope_depth_ != 0; } 401 bool always_allocate() { return always_allocate_scope_depth_ != 0; }
318 static Address always_allocate_scope_depth_address() { 402 Address always_allocate_scope_depth_address() {
319 return reinterpret_cast<Address>(&always_allocate_scope_depth_); 403 return reinterpret_cast<Address>(&always_allocate_scope_depth_);
320 } 404 }
321 static bool linear_allocation() { 405 bool linear_allocation() {
322 return linear_allocation_scope_depth_ != 0; 406 return linear_allocation_scope_depth_ != 0;
323 } 407 }
324 408
325 static Address* NewSpaceAllocationTopAddress() { 409 Address* NewSpaceAllocationTopAddress() {
326 return new_space_.allocation_top_address(); 410 return new_space_.allocation_top_address();
327 } 411 }
328 static Address* NewSpaceAllocationLimitAddress() { 412 Address* NewSpaceAllocationLimitAddress() {
329 return new_space_.allocation_limit_address(); 413 return new_space_.allocation_limit_address();
330 } 414 }
331 415
332 // Uncommit unused semi space. 416 // Uncommit unused semi space.
333 static bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); } 417 bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
334 418
335 #ifdef ENABLE_HEAP_PROTECTION 419 #ifdef ENABLE_HEAP_PROTECTION
336 // Protect/unprotect the heap by marking all spaces read-only/writable. 420 // Protect/unprotect the heap by marking all spaces read-only/writable.
337 static void Protect(); 421 void Protect();
338 static void Unprotect(); 422 void Unprotect();
339 #endif 423 #endif
340 424
341 // Allocates and initializes a new JavaScript object based on a 425 // Allocates and initializes a new JavaScript object based on a
342 // constructor. 426 // constructor.
343 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 427 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
344 // failed. 428 // failed.
345 // Please note this does not perform a garbage collection. 429 // Please note this does not perform a garbage collection.
346 MUST_USE_RESULT static MaybeObject* AllocateJSObject( 430 MUST_USE_RESULT MaybeObject* AllocateJSObject(
347 JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED); 431 JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
348 432
349 // Allocates and initializes a new global object based on a constructor. 433 // Allocates and initializes a new global object based on a constructor.
350 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 434 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
351 // failed. 435 // failed.
352 // Please note this does not perform a garbage collection. 436 // Please note this does not perform a garbage collection.
353 MUST_USE_RESULT static MaybeObject* AllocateGlobalObject( 437 MUST_USE_RESULT MaybeObject* AllocateGlobalObject(JSFunction* constructor);
354 JSFunction* constructor);
355 438
356 // Returns a deep copy of the JavaScript object. 439 // Returns a deep copy of the JavaScript object.
357 // Properties and elements are copied too. 440 // Properties and elements are copied too.
358 // Returns failure if allocation failed. 441 // Returns failure if allocation failed.
359 MUST_USE_RESULT static MaybeObject* CopyJSObject(JSObject* source); 442 MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source);
360 443
361 // Allocates the function prototype. 444 // Allocates the function prototype.
362 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 445 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
363 // failed. 446 // failed.
364 // Please note this does not perform a garbage collection. 447 // Please note this does not perform a garbage collection.
365 MUST_USE_RESULT static MaybeObject* AllocateFunctionPrototype( 448 MUST_USE_RESULT MaybeObject* AllocateFunctionPrototype(JSFunction* function);
366 JSFunction* function);
367 449
368 // Reinitialize an JSGlobalProxy based on a constructor. The object 450 // Reinitialize an JSGlobalProxy based on a constructor. The object
369 // must have the same size as objects allocated using the 451 // must have the same size as objects allocated using the
370 // constructor. The object is reinitialized and behaves as an 452 // constructor. The object is reinitialized and behaves as an
371 // object that has been freshly allocated using the constructor. 453 // object that has been freshly allocated using the constructor.
372 MUST_USE_RESULT static MaybeObject* ReinitializeJSGlobalProxy( 454 MUST_USE_RESULT MaybeObject* ReinitializeJSGlobalProxy(
373 JSFunction* constructor, 455 JSFunction* constructor, JSGlobalProxy* global);
374 JSGlobalProxy* global);
375 456
376 // Allocates and initializes a new JavaScript object based on a map. 457 // Allocates and initializes a new JavaScript object based on a map.
377 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 458 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
378 // failed. 459 // failed.
379 // Please note this does not perform a garbage collection. 460 // Please note this does not perform a garbage collection.
380 MUST_USE_RESULT static MaybeObject* AllocateJSObjectFromMap( 461 MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap(
381 Map* map, PretenureFlag pretenure = NOT_TENURED); 462 Map* map, PretenureFlag pretenure = NOT_TENURED);
382 463
383 // Allocates a heap object based on the map. 464 // Allocates a heap object based on the map.
384 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 465 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
385 // failed. 466 // failed.
386 // Please note this function does not perform a garbage collection. 467 // Please note this function does not perform a garbage collection.
387 MUST_USE_RESULT static MaybeObject* Allocate(Map* map, AllocationSpace space); 468 MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space);
388 469
389 // Allocates a JS Map in the heap. 470 // Allocates a JS Map in the heap.
390 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 471 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
391 // failed. 472 // failed.
392 // Please note this function does not perform a garbage collection. 473 // Please note this function does not perform a garbage collection.
393 MUST_USE_RESULT static MaybeObject* AllocateMap(InstanceType instance_type, 474 MUST_USE_RESULT MaybeObject* AllocateMap(InstanceType instance_type,
394 int instance_size); 475 int instance_size);
395 476
396 // Allocates a partial map for bootstrapping. 477 // Allocates a partial map for bootstrapping.
397 MUST_USE_RESULT static MaybeObject* AllocatePartialMap( 478 MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
398 InstanceType instance_type, 479 int instance_size);
399 int instance_size);
400 480
401 // Allocate a map for the specified function 481 // Allocate a map for the specified function
402 MUST_USE_RESULT static MaybeObject* AllocateInitialMap(JSFunction* fun); 482 MUST_USE_RESULT MaybeObject* AllocateInitialMap(JSFunction* fun);
403 483
404 // Allocates an empty code cache. 484 // Allocates an empty code cache.
405 MUST_USE_RESULT static MaybeObject* AllocateCodeCache(); 485 MUST_USE_RESULT MaybeObject* AllocateCodeCache();
406 486
407 // Clear the Instanceof cache (used when a prototype changes). 487 // Clear the Instanceof cache (used when a prototype changes).
408 static void ClearInstanceofCache() { 488 inline void ClearInstanceofCache();
409 set_instanceof_cache_function(the_hole_value());
410 }
411 489
412 // Allocates and fully initializes a String. There are two String 490 // Allocates and fully initializes a String. There are two String
413 // encodings: ASCII and two byte. One should choose between the three string 491 // encodings: ASCII and two byte. One should choose between the three string
414 // allocation functions based on the encoding of the string buffer used to 492 // allocation functions based on the encoding of the string buffer used to
415 // initialized the string. 493 // initialized the string.
416 // - ...FromAscii initializes the string from a buffer that is ASCII 494 // - ...FromAscii initializes the string from a buffer that is ASCII
417 // encoded (it does not check that the buffer is ASCII encoded) and the 495 // encoded (it does not check that the buffer is ASCII encoded) and the
418 // result will be ASCII encoded. 496 // result will be ASCII encoded.
419 // - ...FromUTF8 initializes the string from a buffer that is UTF-8 497 // - ...FromUTF8 initializes the string from a buffer that is UTF-8
420 // encoded. If the characters are all single-byte characters, the 498 // encoded. If the characters are all single-byte characters, the
421 // result will be ASCII encoded, otherwise it will converted to two 499 // result will be ASCII encoded, otherwise it will converted to two
422 // byte. 500 // byte.
423 // - ...FromTwoByte initializes the string from a buffer that is two-byte 501 // - ...FromTwoByte initializes the string from a buffer that is two-byte
424 // encoded. If the characters are all single-byte characters, the 502 // encoded. If the characters are all single-byte characters, the
425 // result will be converted to ASCII, otherwise it will be left as 503 // result will be converted to ASCII, otherwise it will be left as
426 // two-byte. 504 // two-byte.
427 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 505 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
428 // failed. 506 // failed.
429 // Please note this does not perform a garbage collection. 507 // Please note this does not perform a garbage collection.
430 MUST_USE_RESULT static MaybeObject* AllocateStringFromAscii( 508 MUST_USE_RESULT MaybeObject* AllocateStringFromAscii(
431 Vector<const char> str, 509 Vector<const char> str,
432 PretenureFlag pretenure = NOT_TENURED); 510 PretenureFlag pretenure = NOT_TENURED);
433 MUST_USE_RESULT static inline MaybeObject* AllocateStringFromUtf8( 511 MUST_USE_RESULT inline MaybeObject* AllocateStringFromUtf8(
434 Vector<const char> str, 512 Vector<const char> str,
435 PretenureFlag pretenure = NOT_TENURED); 513 PretenureFlag pretenure = NOT_TENURED);
436 MUST_USE_RESULT static MaybeObject* AllocateStringFromUtf8Slow( 514 MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow(
437 Vector<const char> str, 515 Vector<const char> str,
438 PretenureFlag pretenure = NOT_TENURED); 516 PretenureFlag pretenure = NOT_TENURED);
439 MUST_USE_RESULT static MaybeObject* AllocateStringFromTwoByte( 517 MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte(
440 Vector<const uc16> str, 518 Vector<const uc16> str,
441 PretenureFlag pretenure = NOT_TENURED); 519 PretenureFlag pretenure = NOT_TENURED);
442 520
443 // Allocates a symbol in old space based on the character stream. 521 // Allocates a symbol in old space based on the character stream.
444 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 522 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
445 // failed. 523 // failed.
446 // Please note this function does not perform a garbage collection. 524 // Please note this function does not perform a garbage collection.
447 MUST_USE_RESULT static inline MaybeObject* AllocateSymbol( 525 MUST_USE_RESULT inline MaybeObject* AllocateSymbol(Vector<const char> str,
448 Vector<const char> str, 526 int chars,
449 int chars, 527 uint32_t hash_field);
450 uint32_t hash_field);
451 528
452 MUST_USE_RESULT static inline MaybeObject* AllocateAsciiSymbol( 529 MUST_USE_RESULT inline MaybeObject* AllocateAsciiSymbol(
453 Vector<const char> str, 530 Vector<const char> str,
454 uint32_t hash_field); 531 uint32_t hash_field);
455 532
456 MUST_USE_RESULT static inline MaybeObject* AllocateTwoByteSymbol( 533 MUST_USE_RESULT inline MaybeObject* AllocateTwoByteSymbol(
457 Vector<const uc16> str, 534 Vector<const uc16> str,
458 uint32_t hash_field); 535 uint32_t hash_field);
459 536
460 MUST_USE_RESULT static MaybeObject* AllocateInternalSymbol( 537 MUST_USE_RESULT MaybeObject* AllocateInternalSymbol(
461 unibrow::CharacterStream* buffer, int chars, uint32_t hash_field); 538 unibrow::CharacterStream* buffer, int chars, uint32_t hash_field);
462 539
463 MUST_USE_RESULT static MaybeObject* AllocateExternalSymbol( 540 MUST_USE_RESULT MaybeObject* AllocateExternalSymbol(
464 Vector<const char> str, 541 Vector<const char> str,
465 int chars); 542 int chars);
466 543
467
468 // Allocates and partially initializes a String. There are two String 544 // Allocates and partially initializes a String. There are two String
469 // encodings: ASCII and two byte. These functions allocate a string of the 545 // encodings: ASCII and two byte. These functions allocate a string of the
470 // given length and set its map and length fields. The characters of the 546 // given length and set its map and length fields. The characters of the
471 // string are uninitialized. 547 // string are uninitialized.
472 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 548 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
473 // failed. 549 // failed.
474 // Please note this does not perform a garbage collection. 550 // Please note this does not perform a garbage collection.
475 MUST_USE_RESULT static MaybeObject* AllocateRawAsciiString( 551 MUST_USE_RESULT MaybeObject* AllocateRawAsciiString(
476 int length, 552 int length,
477 PretenureFlag pretenure = NOT_TENURED); 553 PretenureFlag pretenure = NOT_TENURED);
478 MUST_USE_RESULT static MaybeObject* AllocateRawTwoByteString( 554 MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString(
479 int length, 555 int length,
480 PretenureFlag pretenure = NOT_TENURED); 556 PretenureFlag pretenure = NOT_TENURED);
481 557
482 // Computes a single character string where the character has code. 558 // Computes a single character string where the character has code.
483 // A cache is used for ascii codes. 559 // A cache is used for ascii codes.
484 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 560 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
485 // failed. Please note this does not perform a garbage collection. 561 // failed. Please note this does not perform a garbage collection.
486 MUST_USE_RESULT static MaybeObject* LookupSingleCharacterStringFromCode( 562 MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
487 uint16_t code); 563 uint16_t code);
488 564
489 // Allocate a byte array of the specified length 565 // Allocate a byte array of the specified length
490 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 566 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
491 // failed. 567 // failed.
492 // Please note this does not perform a garbage collection. 568 // Please note this does not perform a garbage collection.
493 MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length, 569 MUST_USE_RESULT MaybeObject* AllocateByteArray(int length,
494 PretenureFlag pretenure); 570 PretenureFlag pretenure);
495 571
496 // Allocate a non-tenured byte array of the specified length 572 // Allocate a non-tenured byte array of the specified length
497 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 573 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
498 // failed. 574 // failed.
499 // Please note this does not perform a garbage collection. 575 // Please note this does not perform a garbage collection.
500 MUST_USE_RESULT static MaybeObject* AllocateByteArray(int length); 576 MUST_USE_RESULT MaybeObject* AllocateByteArray(int length);
501 577
502 // Allocates an external array of the specified length and type. 578 // Allocates an external array of the specified length and type.
503 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 579 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
504 // failed. 580 // failed.
505 // Please note this does not perform a garbage collection. 581 // Please note this does not perform a garbage collection.
506 MUST_USE_RESULT static MaybeObject* AllocateExternalArray( 582 MUST_USE_RESULT MaybeObject* AllocateExternalArray(
507 int length, 583 int length,
508 ExternalArrayType array_type, 584 ExternalArrayType array_type,
509 void* external_pointer, 585 void* external_pointer,
510 PretenureFlag pretenure); 586 PretenureFlag pretenure);
511 587
512 // Allocate a tenured JS global property cell. 588 // Allocate a tenured JS global property cell.
513 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 589 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
514 // failed. 590 // failed.
515 // Please note this does not perform a garbage collection. 591 // Please note this does not perform a garbage collection.
516 MUST_USE_RESULT static MaybeObject* AllocateJSGlobalPropertyCell( 592 MUST_USE_RESULT MaybeObject* AllocateJSGlobalPropertyCell(Object* value);
517 Object* value);
518 593
519 // Allocates a fixed array initialized with undefined values 594 // Allocates a fixed array initialized with undefined values
520 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 595 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
521 // failed. 596 // failed.
522 // Please note this does not perform a garbage collection. 597 // Please note this does not perform a garbage collection.
523 MUST_USE_RESULT static MaybeObject* AllocateFixedArray( 598 MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length,
524 int length, 599 PretenureFlag pretenure);
525 PretenureFlag pretenure);
526 // Allocates a fixed array initialized with undefined values 600 // Allocates a fixed array initialized with undefined values
527 MUST_USE_RESULT static MaybeObject* AllocateFixedArray(int length); 601 MUST_USE_RESULT MaybeObject* AllocateFixedArray(int length);
528 602
529 // Allocates an uninitialized fixed array. It must be filled by the caller. 603 // Allocates an uninitialized fixed array. It must be filled by the caller.
530 // 604 //
531 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 605 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
532 // failed. 606 // failed.
533 // Please note this does not perform a garbage collection. 607 // Please note this does not perform a garbage collection.
534 MUST_USE_RESULT static MaybeObject* AllocateUninitializedFixedArray( 608 MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length);
535 int length);
536 609
537 // Make a copy of src and return it. Returns 610 // Make a copy of src and return it. Returns
538 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 611 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
539 MUST_USE_RESULT static inline MaybeObject* CopyFixedArray(FixedArray* src); 612 MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src);
540 613
541 // Make a copy of src, set the map, and return the copy. Returns 614 // Make a copy of src, set the map, and return the copy. Returns
542 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. 615 // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
543 MUST_USE_RESULT static MaybeObject* CopyFixedArrayWithMap(FixedArray* src, 616 MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
544 Map* map);
545 617
546 // Allocates a fixed array initialized with the hole values. 618 // Allocates a fixed array initialized with the hole values.
547 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 619 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
548 // failed. 620 // failed.
549 // Please note this does not perform a garbage collection. 621 // Please note this does not perform a garbage collection.
550 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithHoles( 622 MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithHoles(
551 int length, 623 int length,
552 PretenureFlag pretenure = NOT_TENURED); 624 PretenureFlag pretenure = NOT_TENURED);
553 625
554 // AllocateHashTable is identical to AllocateFixedArray except 626 // AllocateHashTable is identical to AllocateFixedArray except
555 // that the resulting object has hash_table_map as map. 627 // that the resulting object has hash_table_map as map.
556 MUST_USE_RESULT static MaybeObject* AllocateHashTable( 628 MUST_USE_RESULT MaybeObject* AllocateHashTable(
557 int length, PretenureFlag pretenure = NOT_TENURED); 629 int length, PretenureFlag pretenure = NOT_TENURED);
558 630
559 // Allocate a global (but otherwise uninitialized) context. 631 // Allocate a global (but otherwise uninitialized) context.
560 MUST_USE_RESULT static MaybeObject* AllocateGlobalContext(); 632 MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
561 633
562 // Allocate a function context. 634 // Allocate a function context.
563 MUST_USE_RESULT static MaybeObject* AllocateFunctionContext( 635 MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
564 int length, 636 JSFunction* closure);
565 JSFunction* closure);
566 637
567 // Allocate a 'with' context. 638 // Allocate a 'with' context.
568 MUST_USE_RESULT static MaybeObject* AllocateWithContext( 639 MUST_USE_RESULT MaybeObject* AllocateWithContext(Context* previous,
569 Context* previous, 640 JSObject* extension,
570 JSObject* extension, 641 bool is_catch_context);
571 bool is_catch_context);
572 642
573 // Allocates a new utility object in the old generation. 643 // Allocates a new utility object in the old generation.
574 MUST_USE_RESULT static MaybeObject* AllocateStruct(InstanceType type); 644 MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
575 645
576 // Allocates a function initialized with a shared part. 646 // Allocates a function initialized with a shared part.
577 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 647 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
578 // failed. 648 // failed.
579 // Please note this does not perform a garbage collection. 649 // Please note this does not perform a garbage collection.
580 MUST_USE_RESULT static MaybeObject* AllocateFunction( 650 MUST_USE_RESULT MaybeObject* AllocateFunction(
581 Map* function_map, 651 Map* function_map,
582 SharedFunctionInfo* shared, 652 SharedFunctionInfo* shared,
583 Object* prototype, 653 Object* prototype,
584 PretenureFlag pretenure = TENURED); 654 PretenureFlag pretenure = TENURED);
585 655
586 // Arguments object size. 656 // Arguments object size.
587 static const int kArgumentsObjectSize = 657 static const int kArgumentsObjectSize =
588 JSObject::kHeaderSize + 2 * kPointerSize; 658 JSObject::kHeaderSize + 2 * kPointerSize;
589 // Strict mode arguments has no callee so it is smaller. 659 // Strict mode arguments has no callee so it is smaller.
590 static const int kArgumentsObjectSizeStrict = 660 static const int kArgumentsObjectSizeStrict =
591 JSObject::kHeaderSize + 1 * kPointerSize; 661 JSObject::kHeaderSize + 1 * kPointerSize;
592 // Indicies for direct access into argument objects. 662 // Indicies for direct access into argument objects.
593 static const int kArgumentsLengthIndex = 0; 663 static const int kArgumentsLengthIndex = 0;
594 // callee is only valid in non-strict mode. 664 // callee is only valid in non-strict mode.
595 static const int kArgumentsCalleeIndex = 1; 665 static const int kArgumentsCalleeIndex = 1;
596 666
597 // Allocates an arguments object - optionally with an elements array. 667 // Allocates an arguments object - optionally with an elements array.
598 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 668 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
599 // failed. 669 // failed.
600 // Please note this does not perform a garbage collection. 670 // Please note this does not perform a garbage collection.
601 MUST_USE_RESULT static MaybeObject* AllocateArgumentsObject(Object* callee, 671 MUST_USE_RESULT MaybeObject* AllocateArgumentsObject(
602 int length); 672 Object* callee, int length);
603 673
604 // Same as NewNumberFromDouble, but may return a preallocated/immutable 674 // Same as NewNumberFromDouble, but may return a preallocated/immutable
605 // number object (e.g., minus_zero_value_, nan_value_) 675 // number object (e.g., minus_zero_value_, nan_value_)
606 MUST_USE_RESULT static MaybeObject* NumberFromDouble( 676 MUST_USE_RESULT MaybeObject* NumberFromDouble(
607 double value, PretenureFlag pretenure = NOT_TENURED); 677 double value, PretenureFlag pretenure = NOT_TENURED);
608 678
609 // Allocated a HeapNumber from value. 679 // Allocated a HeapNumber from value.
610 MUST_USE_RESULT static MaybeObject* AllocateHeapNumber( 680 MUST_USE_RESULT MaybeObject* AllocateHeapNumber(
611 double value, 681 double value,
612 PretenureFlag pretenure); 682 PretenureFlag pretenure);
613 // pretenure = NOT_TENURED. 683 // pretenure = NOT_TENURED
614 MUST_USE_RESULT static MaybeObject* AllocateHeapNumber(double value); 684 MUST_USE_RESULT MaybeObject* AllocateHeapNumber(double value);
615 685
616 // Converts an int into either a Smi or a HeapNumber object. 686 // Converts an int into either a Smi or a HeapNumber object.
617 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 687 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
618 // failed. 688 // failed.
619 // Please note this does not perform a garbage collection. 689 // Please note this does not perform a garbage collection.
620 MUST_USE_RESULT static inline MaybeObject* NumberFromInt32(int32_t value); 690 MUST_USE_RESULT inline MaybeObject* NumberFromInt32(int32_t value);
621 691
622 // Converts an int into either a Smi or a HeapNumber object. 692 // Converts an int into either a Smi or a HeapNumber object.
623 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 693 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
624 // failed. 694 // failed.
625 // Please note this does not perform a garbage collection. 695 // Please note this does not perform a garbage collection.
626 MUST_USE_RESULT static inline MaybeObject* NumberFromUint32(uint32_t value); 696 MUST_USE_RESULT inline MaybeObject* NumberFromUint32(uint32_t value);
627 697
628 // Allocates a new proxy object. 698 // Allocates a new proxy object.
629 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 699 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
630 // failed. 700 // failed.
631 // Please note this does not perform a garbage collection. 701 // Please note this does not perform a garbage collection.
632 MUST_USE_RESULT static MaybeObject* AllocateProxy( 702 MUST_USE_RESULT MaybeObject* AllocateProxy(
633 Address proxy, 703 Address proxy, PretenureFlag pretenure = NOT_TENURED);
634 PretenureFlag pretenure = NOT_TENURED);
635 704
636 // Allocates a new SharedFunctionInfo object. 705 // Allocates a new SharedFunctionInfo object.
637 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 706 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
638 // failed. 707 // failed.
639 // Please note this does not perform a garbage collection. 708 // Please note this does not perform a garbage collection.
640 MUST_USE_RESULT static MaybeObject* AllocateSharedFunctionInfo(Object* name); 709 MUST_USE_RESULT MaybeObject* AllocateSharedFunctionInfo(Object* name);
641 710
642 // Allocates a new JSMessageObject object. 711 // Allocates a new JSMessageObject object.
643 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 712 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
644 // failed. 713 // failed.
645 // Please note that this does not perform a garbage collection. 714 // Please note that this does not perform a garbage collection.
646 MUST_USE_RESULT static MaybeObject* AllocateJSMessageObject( 715 MUST_USE_RESULT MaybeObject* AllocateJSMessageObject(
647 String* type, 716 String* type,
648 JSArray* arguments, 717 JSArray* arguments,
649 int start_position, 718 int start_position,
650 int end_position, 719 int end_position,
651 Object* script, 720 Object* script,
652 Object* stack_trace, 721 Object* stack_trace,
653 Object* stack_frames); 722 Object* stack_frames);
654 723
655 // Allocates a new cons string object. 724 // Allocates a new cons string object.
656 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 725 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
657 // failed. 726 // failed.
658 // Please note this does not perform a garbage collection. 727 // Please note this does not perform a garbage collection.
659 MUST_USE_RESULT static MaybeObject* AllocateConsString(String* first, 728 MUST_USE_RESULT MaybeObject* AllocateConsString(String* first,
660 String* second); 729 String* second);
661 730
662 // Allocates a new sub string object which is a substring of an underlying 731 // Allocates a new sub string object which is a substring of an underlying
663 // string buffer stretching from the index start (inclusive) to the index 732 // string buffer stretching from the index start (inclusive) to the index
664 // end (exclusive). 733 // end (exclusive).
665 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 734 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
666 // failed. 735 // failed.
667 // Please note this does not perform a garbage collection. 736 // Please note this does not perform a garbage collection.
668 MUST_USE_RESULT static MaybeObject* AllocateSubString( 737 MUST_USE_RESULT MaybeObject* AllocateSubString(
669 String* buffer, 738 String* buffer,
670 int start, 739 int start,
671 int end, 740 int end,
672 PretenureFlag pretenure = NOT_TENURED); 741 PretenureFlag pretenure = NOT_TENURED);
673 742
674 // Allocate a new external string object, which is backed by a string 743 // Allocate a new external string object, which is backed by a string
675 // resource that resides outside the V8 heap. 744 // resource that resides outside the V8 heap.
676 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 745 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
677 // failed. 746 // failed.
678 // Please note this does not perform a garbage collection. 747 // Please note this does not perform a garbage collection.
679 MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromAscii( 748 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromAscii(
680 ExternalAsciiString::Resource* resource); 749 ExternalAsciiString::Resource* resource);
681 MUST_USE_RESULT static MaybeObject* AllocateExternalStringFromTwoByte( 750 MUST_USE_RESULT MaybeObject* AllocateExternalStringFromTwoByte(
682 ExternalTwoByteString::Resource* resource); 751 ExternalTwoByteString::Resource* resource);
683 752
684 // Finalizes an external string by deleting the associated external 753 // Finalizes an external string by deleting the associated external
685 // data and clearing the resource pointer. 754 // data and clearing the resource pointer.
686 static inline void FinalizeExternalString(String* string); 755 inline void FinalizeExternalString(String* string);
687 756
688 // Allocates an uninitialized object. The memory is non-executable if the 757 // Allocates an uninitialized object. The memory is non-executable if the
689 // hardware and OS allow. 758 // hardware and OS allow.
690 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 759 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
691 // failed. 760 // failed.
692 // Please note this function does not perform a garbage collection. 761 // Please note this function does not perform a garbage collection.
693 MUST_USE_RESULT static inline MaybeObject* AllocateRaw( 762 MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes,
694 int size_in_bytes, 763 AllocationSpace space,
695 AllocationSpace space, 764 AllocationSpace retry_space);
696 AllocationSpace retry_space);
697 765
698 // Initialize a filler object to keep the ability to iterate over the heap 766 // Initialize a filler object to keep the ability to iterate over the heap
699 // when shortening objects. 767 // when shortening objects.
700 static void CreateFillerObjectAt(Address addr, int size); 768 void CreateFillerObjectAt(Address addr, int size);
701 769
702 // Makes a new native code object 770 // Makes a new native code object
703 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 771 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
704 // failed. On success, the pointer to the Code object is stored in the 772 // failed. On success, the pointer to the Code object is stored in the
705 // self_reference. This allows generated code to reference its own Code 773 // self_reference. This allows generated code to reference its own Code
706 // object by containing this pointer. 774 // object by containing this pointer.
707 // Please note this function does not perform a garbage collection. 775 // Please note this function does not perform a garbage collection.
708 MUST_USE_RESULT static MaybeObject* CreateCode(const CodeDesc& desc, 776 MUST_USE_RESULT MaybeObject* CreateCode(const CodeDesc& desc,
709 Code::Flags flags, 777 Code::Flags flags,
710 Handle<Object> self_reference, 778 Handle<Object> self_reference,
711 bool immovable = false); 779 bool immovable = false);
712 780
713 MUST_USE_RESULT static MaybeObject* CopyCode(Code* code); 781 MUST_USE_RESULT MaybeObject* CopyCode(Code* code);
714 782
715 // Copy the code and scope info part of the code object, but insert 783 // Copy the code and scope info part of the code object, but insert
716 // the provided data as the relocation information. 784 // the provided data as the relocation information.
717 MUST_USE_RESULT static MaybeObject* CopyCode(Code* code, 785 MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector<byte> reloc_info);
718 Vector<byte> reloc_info);
719 786
720 // Finds the symbol for string in the symbol table. 787 // Finds the symbol for string in the symbol table.
721 // If not found, a new symbol is added to the table and returned. 788 // If not found, a new symbol is added to the table and returned.
722 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation 789 // Returns Failure::RetryAfterGC(requested_bytes, space) if allocation
723 // failed. 790 // failed.
724 // Please note this function does not perform a garbage collection. 791 // Please note this function does not perform a garbage collection.
725 MUST_USE_RESULT static MaybeObject* LookupSymbol(Vector<const char> str); 792 MUST_USE_RESULT MaybeObject* LookupSymbol(Vector<const char> str);
726 MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(Vector<const char> str); 793 MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(Vector<const char> str);
727 MUST_USE_RESULT static MaybeObject* LookupTwoByteSymbol( 794 MUST_USE_RESULT MaybeObject* LookupTwoByteSymbol(
728 Vector<const uc16> str); 795 Vector<const uc16> str);
729 MUST_USE_RESULT static MaybeObject* LookupAsciiSymbol(const char* str) { 796 MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(const char* str) {
730 return LookupSymbol(CStrVector(str)); 797 return LookupSymbol(CStrVector(str));
731 } 798 }
732 MUST_USE_RESULT static MaybeObject* LookupSymbol(String* str); 799 MUST_USE_RESULT MaybeObject* LookupSymbol(String* str);
733 static bool LookupSymbolIfExists(String* str, String** symbol); 800 bool LookupSymbolIfExists(String* str, String** symbol);
734 static bool LookupTwoCharsSymbolIfExists(String* str, String** symbol); 801 bool LookupTwoCharsSymbolIfExists(String* str, String** symbol);
735 802
736 // Compute the matching symbol map for a string if possible. 803 // Compute the matching symbol map for a string if possible.
737 // NULL is returned if string is in new space or not flattened. 804 // NULL is returned if string is in new space or not flattened.
738 static Map* SymbolMapForString(String* str); 805 Map* SymbolMapForString(String* str);
739 806
740 // Tries to flatten a string before compare operation. 807 // Tries to flatten a string before compare operation.
741 // 808 //
742 // Returns a failure in case it was decided that flattening was 809 // Returns a failure in case it was decided that flattening was
743 // necessary and failed. Note, if flattening is not necessary the 810 // necessary and failed. Note, if flattening is not necessary the
744 // string might stay non-flat even when not a failure is returned. 811 // string might stay non-flat even when not a failure is returned.
745 // 812 //
746 // Please note this function does not perform a garbage collection. 813 // Please note this function does not perform a garbage collection.
747 MUST_USE_RESULT static inline MaybeObject* PrepareForCompare(String* str); 814 MUST_USE_RESULT inline MaybeObject* PrepareForCompare(String* str);
748 815
749 // Converts the given boolean condition to JavaScript boolean value. 816 // Converts the given boolean condition to JavaScript boolean value.
750 static Object* ToBoolean(bool condition) { 817 inline Object* ToBoolean(bool condition);
751 return condition ? true_value() : false_value();
752 }
753 818
754 // Code that should be run before and after each GC. Includes some 819 // Code that should be run before and after each GC. Includes some
755 // reporting/verification activities when compiled with DEBUG set. 820 // reporting/verification activities when compiled with DEBUG set.
756 static void GarbageCollectionPrologue(); 821 void GarbageCollectionPrologue();
757 static void GarbageCollectionEpilogue(); 822 void GarbageCollectionEpilogue();
758 823
759 // Performs garbage collection operation. 824 // Performs garbage collection operation.
760 // Returns whether there is a chance that another major GC could 825 // Returns whether there is a chance that another major GC could
761 // collect more garbage. 826 // collect more garbage.
762 static bool CollectGarbage(AllocationSpace space, GarbageCollector collector); 827 bool CollectGarbage(AllocationSpace space, GarbageCollector collector);
763 828
764 // Performs garbage collection operation. 829 // Performs garbage collection operation.
765 // Returns whether there is a chance that another major GC could 830 // Returns whether there is a chance that another major GC could
766 // collect more garbage. 831 // collect more garbage.
767 inline static bool CollectGarbage(AllocationSpace space); 832 inline bool CollectGarbage(AllocationSpace space);
768 833
769 // Performs a full garbage collection. Force compaction if the 834 // Performs a full garbage collection. Force compaction if the
770 // parameter is true. 835 // parameter is true.
771 static void CollectAllGarbage(bool force_compaction); 836 void CollectAllGarbage(bool force_compaction);
772 837
773 // Last hope GC, should try to squeeze as much as possible. 838 // Last hope GC, should try to squeeze as much as possible.
774 static void CollectAllAvailableGarbage(); 839 void CollectAllAvailableGarbage();
775 840
776 // Notify the heap that a context has been disposed. 841 // Notify the heap that a context has been disposed.
777 static int NotifyContextDisposed() { return ++contexts_disposed_; } 842 int NotifyContextDisposed() { return ++contexts_disposed_; }
778 843
779 // Utility to invoke the scavenger. This is needed in test code to 844 // Utility to invoke the scavenger. This is needed in test code to
780 // ensure correct callback for weak global handles. 845 // ensure correct callback for weak global handles.
781 static void PerformScavenge(); 846 void PerformScavenge();
847
848 PromotionQueue* promotion_queue() { return &promotion_queue_; }
782 849
783 #ifdef DEBUG 850 #ifdef DEBUG
784 // Utility used with flag gc-greedy. 851 // Utility used with flag gc-greedy.
785 static void GarbageCollectionGreedyCheck(); 852 void GarbageCollectionGreedyCheck();
786 #endif 853 #endif
787 854
788 static void AddGCPrologueCallback( 855 void AddGCPrologueCallback(
789 GCEpilogueCallback callback, GCType gc_type_filter); 856 GCEpilogueCallback callback, GCType gc_type_filter);
790 static void RemoveGCPrologueCallback(GCEpilogueCallback callback); 857 void RemoveGCPrologueCallback(GCEpilogueCallback callback);
791 858
792 static void AddGCEpilogueCallback( 859 void AddGCEpilogueCallback(
793 GCEpilogueCallback callback, GCType gc_type_filter); 860 GCEpilogueCallback callback, GCType gc_type_filter);
794 static void RemoveGCEpilogueCallback(GCEpilogueCallback callback); 861 void RemoveGCEpilogueCallback(GCEpilogueCallback callback);
795 862
796 static void SetGlobalGCPrologueCallback(GCCallback callback) { 863 void SetGlobalGCPrologueCallback(GCCallback callback) {
797 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL)); 864 ASSERT((callback == NULL) ^ (global_gc_prologue_callback_ == NULL));
798 global_gc_prologue_callback_ = callback; 865 global_gc_prologue_callback_ = callback;
799 } 866 }
800 static void SetGlobalGCEpilogueCallback(GCCallback callback) { 867 void SetGlobalGCEpilogueCallback(GCCallback callback) {
801 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL)); 868 ASSERT((callback == NULL) ^ (global_gc_epilogue_callback_ == NULL));
802 global_gc_epilogue_callback_ = callback; 869 global_gc_epilogue_callback_ = callback;
803 } 870 }
804 871
805 // Heap root getters. We have versions with and without type::cast() here. 872 // Heap root getters. We have versions with and without type::cast() here.
806 // You can't use type::cast during GC because the assert fails. 873 // You can't use type::cast during GC because the assert fails.
807 #define ROOT_ACCESSOR(type, name, camel_name) \ 874 #define ROOT_ACCESSOR(type, name, camel_name) \
808 static inline type* name() { \ 875 type* name() { \
809 return type::cast(roots_[k##camel_name##RootIndex]); \ 876 return type::cast(roots_[k##camel_name##RootIndex]); \
810 } \ 877 } \
811 static inline type* raw_unchecked_##name() { \ 878 type* raw_unchecked_##name() { \
812 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \ 879 return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \
813 } 880 }
814 ROOT_LIST(ROOT_ACCESSOR) 881 ROOT_LIST(ROOT_ACCESSOR)
815 #undef ROOT_ACCESSOR 882 #undef ROOT_ACCESSOR
816 883
817 // Utility type maps 884 // Utility type maps
818 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \ 885 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
819 static inline Map* name##_map() { \ 886 Map* name##_map() { \
820 return Map::cast(roots_[k##Name##MapRootIndex]); \ 887 return Map::cast(roots_[k##Name##MapRootIndex]); \
821 } 888 }
822 STRUCT_LIST(STRUCT_MAP_ACCESSOR) 889 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
823 #undef STRUCT_MAP_ACCESSOR 890 #undef STRUCT_MAP_ACCESSOR
824 891
825 #define SYMBOL_ACCESSOR(name, str) static inline String* name() { \ 892 #define SYMBOL_ACCESSOR(name, str) String* name() { \
826 return String::cast(roots_[k##name##RootIndex]); \ 893 return String::cast(roots_[k##name##RootIndex]); \
827 } 894 }
828 SYMBOL_LIST(SYMBOL_ACCESSOR) 895 SYMBOL_LIST(SYMBOL_ACCESSOR)
829 #undef SYMBOL_ACCESSOR 896 #undef SYMBOL_ACCESSOR
830 897
831 // The hidden_symbol is special because it is the empty string, but does 898 // The hidden_symbol is special because it is the empty string, but does
832 // not match the empty string. 899 // not match the empty string.
833 static String* hidden_symbol() { return hidden_symbol_; } 900 String* hidden_symbol() { return hidden_symbol_; }
834 901
835 static void set_global_contexts_list(Object* object) { 902 void set_global_contexts_list(Object* object) {
836 global_contexts_list_ = object; 903 global_contexts_list_ = object;
837 } 904 }
838 static Object* global_contexts_list() { return global_contexts_list_; } 905 Object* global_contexts_list() { return global_contexts_list_; }
839 906
840 // Iterates over all roots in the heap. 907 // Iterates over all roots in the heap.
841 static void IterateRoots(ObjectVisitor* v, VisitMode mode); 908 void IterateRoots(ObjectVisitor* v, VisitMode mode);
842 // Iterates over all strong roots in the heap. 909 // Iterates over all strong roots in the heap.
843 static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); 910 void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
844 // Iterates over all the other roots in the heap. 911 // Iterates over all the other roots in the heap.
845 static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode); 912 void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
846 913
847 enum ExpectedPageWatermarkState { 914 enum ExpectedPageWatermarkState {
848 WATERMARK_SHOULD_BE_VALID, 915 WATERMARK_SHOULD_BE_VALID,
849 WATERMARK_CAN_BE_INVALID 916 WATERMARK_CAN_BE_INVALID
850 }; 917 };
851 918
852 // For each dirty region on a page in use from an old space call 919 // For each dirty region on a page in use from an old space call
853 // visit_dirty_region callback. 920 // visit_dirty_region callback.
854 // If either visit_dirty_region or callback can cause an allocation 921 // If either visit_dirty_region or callback can cause an allocation
855 // in old space and changes in allocation watermark then 922 // in old space and changes in allocation watermark then
856 // can_preallocate_during_iteration should be set to true. 923 // can_preallocate_during_iteration should be set to true.
857 // All pages will be marked as having invalid watermark upon 924 // All pages will be marked as having invalid watermark upon
858 // iteration completion. 925 // iteration completion.
859 static void IterateDirtyRegions( 926 void IterateDirtyRegions(
860 PagedSpace* space, 927 PagedSpace* space,
861 DirtyRegionCallback visit_dirty_region, 928 DirtyRegionCallback visit_dirty_region,
862 ObjectSlotCallback callback, 929 ObjectSlotCallback callback,
863 ExpectedPageWatermarkState expected_page_watermark_state); 930 ExpectedPageWatermarkState expected_page_watermark_state);
864 931
865 // Interpret marks as a bitvector of dirty marks for regions of size 932 // Interpret marks as a bitvector of dirty marks for regions of size
866 // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering 933 // Page::kRegionSize aligned by Page::kRegionAlignmentMask and covering
867 // memory interval from start to top. For each dirty region call a 934 // memory interval from start to top. For each dirty region call a
868 // visit_dirty_region callback. Return updated bitvector of dirty marks. 935 // visit_dirty_region callback. Return updated bitvector of dirty marks.
869 static uint32_t IterateDirtyRegions(uint32_t marks, 936 uint32_t IterateDirtyRegions(uint32_t marks,
870 Address start, 937 Address start,
871 Address end, 938 Address end,
872 DirtyRegionCallback visit_dirty_region, 939 DirtyRegionCallback visit_dirty_region,
873 ObjectSlotCallback callback); 940 ObjectSlotCallback callback);
874 941
875 // Iterate pointers to from semispace of new space found in memory interval 942 // Iterate pointers to from semispace of new space found in memory interval
876 // from start to end. 943 // from start to end.
877 // Update dirty marks for page containing start address. 944 // Update dirty marks for page containing start address.
878 static void IterateAndMarkPointersToFromSpace(Address start, 945 void IterateAndMarkPointersToFromSpace(Address start,
879 Address end, 946 Address end,
880 ObjectSlotCallback callback); 947 ObjectSlotCallback callback);
881 948
882 // Iterate pointers to new space found in memory interval from start to end. 949 // Iterate pointers to new space found in memory interval from start to end.
883 // Return true if pointers to new space was found. 950 // Return true if pointers to new space was found.
884 static bool IteratePointersInDirtyRegion(Address start, 951 static bool IteratePointersInDirtyRegion(Heap* heap,
952 Address start,
885 Address end, 953 Address end,
886 ObjectSlotCallback callback); 954 ObjectSlotCallback callback);
887 955
888 956
889 // Iterate pointers to new space found in memory interval from start to end. 957 // Iterate pointers to new space found in memory interval from start to end.
890 // This interval is considered to belong to the map space. 958 // This interval is considered to belong to the map space.
891 // Return true if pointers to new space was found. 959 // Return true if pointers to new space was found.
892 static bool IteratePointersInDirtyMapsRegion(Address start, 960 static bool IteratePointersInDirtyMapsRegion(Heap* heap,
961 Address start,
893 Address end, 962 Address end,
894 ObjectSlotCallback callback); 963 ObjectSlotCallback callback);
895 964
896 965
897 // Returns whether the object resides in new space. 966 // Returns whether the object resides in new space.
898 static inline bool InNewSpace(Object* object); 967 inline bool InNewSpace(Object* object);
899 static inline bool InFromSpace(Object* object); 968 inline bool InFromSpace(Object* object);
900 static inline bool InToSpace(Object* object); 969 inline bool InToSpace(Object* object);
901 970
902 // Checks whether an address/object in the heap (including auxiliary 971 // Checks whether an address/object in the heap (including auxiliary
903 // area and unused area). 972 // area and unused area).
904 static bool Contains(Address addr); 973 bool Contains(Address addr);
905 static bool Contains(HeapObject* value); 974 bool Contains(HeapObject* value);
906 975
907 // Checks whether an address/object in a space. 976 // Checks whether an address/object in a space.
908 // Currently used by tests, serialization and heap verification only. 977 // Currently used by tests, serialization and heap verification only.
909 static bool InSpace(Address addr, AllocationSpace space); 978 bool InSpace(Address addr, AllocationSpace space);
910 static bool InSpace(HeapObject* value, AllocationSpace space); 979 bool InSpace(HeapObject* value, AllocationSpace space);
911 980
912 // Finds out which space an object should get promoted to based on its type. 981 // Finds out which space an object should get promoted to based on its type.
913 static inline OldSpace* TargetSpace(HeapObject* object); 982 inline OldSpace* TargetSpace(HeapObject* object);
914 static inline AllocationSpace TargetSpaceId(InstanceType type); 983 inline AllocationSpace TargetSpaceId(InstanceType type);
915 984
916 // Sets the stub_cache_ (only used when expanding the dictionary). 985 // Sets the stub_cache_ (only used when expanding the dictionary).
917 static void public_set_code_stubs(NumberDictionary* value) { 986 void public_set_code_stubs(NumberDictionary* value) {
918 roots_[kCodeStubsRootIndex] = value; 987 roots_[kCodeStubsRootIndex] = value;
919 } 988 }
920 989
921 // Support for computing object sizes for old objects during GCs. Returns 990 // Support for computing object sizes for old objects during GCs. Returns
922 // a function that is guaranteed to be safe for computing object sizes in 991 // a function that is guaranteed to be safe for computing object sizes in
923 // the current GC phase. 992 // the current GC phase.
924 static HeapObjectCallback GcSafeSizeOfOldObjectFunction() { 993 HeapObjectCallback GcSafeSizeOfOldObjectFunction() {
925 return gc_safe_size_of_old_object_; 994 return gc_safe_size_of_old_object_;
926 } 995 }
927 996
928 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary). 997 // Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
929 static void public_set_non_monomorphic_cache(NumberDictionary* value) { 998 void public_set_non_monomorphic_cache(NumberDictionary* value) {
930 roots_[kNonMonomorphicCacheRootIndex] = value; 999 roots_[kNonMonomorphicCacheRootIndex] = value;
931 } 1000 }
932 1001
933 static void public_set_empty_script(Script* script) { 1002 void public_set_empty_script(Script* script) {
934 roots_[kEmptyScriptRootIndex] = script; 1003 roots_[kEmptyScriptRootIndex] = script;
935 } 1004 }
936 1005
937 // Update the next script id. 1006 // Update the next script id.
938 static inline void SetLastScriptId(Object* last_script_id); 1007 inline void SetLastScriptId(Object* last_script_id);
939 1008
940 // Generated code can embed this address to get access to the roots. 1009 // Generated code can embed this address to get access to the roots.
941 static Object** roots_address() { return roots_; } 1010 Object** roots_address() { return roots_; }
942 1011
943 // Get address of global contexts list for serialization support. 1012 // Get address of global contexts list for serialization support.
944 static Object** global_contexts_list_address() { 1013 Object** global_contexts_list_address() {
945 return &global_contexts_list_; 1014 return &global_contexts_list_;
946 } 1015 }
947 1016
948 #ifdef DEBUG 1017 #ifdef DEBUG
949 static void Print(); 1018 void Print();
950 static void PrintHandles(); 1019 void PrintHandles();
951 1020
952 // Verify the heap is in its normal state before or after a GC. 1021 // Verify the heap is in its normal state before or after a GC.
953 static void Verify(); 1022 void Verify();
954 1023
955 // Report heap statistics. 1024 // Report heap statistics.
956 static void ReportHeapStatistics(const char* title); 1025 void ReportHeapStatistics(const char* title);
957 static void ReportCodeStatistics(const char* title); 1026 void ReportCodeStatistics(const char* title);
958 1027
959 // Fill in bogus values in from space 1028 // Fill in bogus values in from space
960 static void ZapFromSpace(); 1029 void ZapFromSpace();
961 #endif 1030 #endif
962 1031
963 #if defined(ENABLE_LOGGING_AND_PROFILING) 1032 #if defined(ENABLE_LOGGING_AND_PROFILING)
964 // Print short heap statistics. 1033 // Print short heap statistics.
965 static void PrintShortHeapStatistics(); 1034 void PrintShortHeapStatistics();
966 #endif 1035 #endif
967 1036
968 // Makes a new symbol object 1037 // Makes a new symbol object
969 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation 1038 // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
970 // failed. 1039 // failed.
971 // Please note this function does not perform a garbage collection. 1040 // Please note this function does not perform a garbage collection.
972 MUST_USE_RESULT static MaybeObject* CreateSymbol(const char* str, 1041 MUST_USE_RESULT MaybeObject* CreateSymbol(
973 int length, 1042 const char* str, int length, int hash);
974 int hash); 1043 MUST_USE_RESULT MaybeObject* CreateSymbol(String* str);
975 MUST_USE_RESULT static MaybeObject* CreateSymbol(String* str);
976 1044
977 // Write barrier support for address[offset] = o. 1045 // Write barrier support for address[offset] = o.
978 static inline void RecordWrite(Address address, int offset); 1046 inline void RecordWrite(Address address, int offset);
979 1047
980 // Write barrier support for address[start : start + len[ = o. 1048 // Write barrier support for address[start : start + len[ = o.
981 static inline void RecordWrites(Address address, int start, int len); 1049 inline void RecordWrites(Address address, int start, int len);
982 1050
983 // Given an address occupied by a live code object, return that object. 1051 // Given an address occupied by a live code object, return that object.
984 static Object* FindCodeObject(Address a); 1052 Object* FindCodeObject(Address a);
985 1053
986 // Invoke Shrink on shrinkable spaces. 1054 // Invoke Shrink on shrinkable spaces.
987 static void Shrink(); 1055 void Shrink();
988 1056
989 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT }; 1057 enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
990 static inline HeapState gc_state() { return gc_state_; } 1058 inline HeapState gc_state() { return gc_state_; }
991 1059
992 #ifdef DEBUG 1060 #ifdef DEBUG
993 static bool IsAllocationAllowed() { return allocation_allowed_; } 1061 bool IsAllocationAllowed() { return allocation_allowed_; }
994 static inline bool allow_allocation(bool enable); 1062 inline bool allow_allocation(bool enable);
995 1063
996 static bool disallow_allocation_failure() { 1064 bool disallow_allocation_failure() {
997 return disallow_allocation_failure_; 1065 return disallow_allocation_failure_;
998 } 1066 }
999 1067
1000 static void TracePathToObject(Object* target); 1068 void TracePathToObject(Object* target);
1001 static void TracePathToGlobal(); 1069 void TracePathToGlobal();
1002 #endif 1070 #endif
1003 1071
1004 // Callback function passed to Heap::Iterate etc. Copies an object if 1072 // Callback function passed to Heap::Iterate etc. Copies an object if
1005 // necessary, the object might be promoted to an old space. The caller must 1073 // necessary, the object might be promoted to an old space. The caller must
1006 // ensure the precondition that the object is (a) a heap object and (b) in 1074 // ensure the precondition that the object is (a) a heap object and (b) in
1007 // the heap's from space. 1075 // the heap's from space.
1008 static void ScavengePointer(HeapObject** p); 1076 static inline void ScavengePointer(HeapObject** p);
1009 static inline void ScavengeObject(HeapObject** p, HeapObject* object); 1077 static inline void ScavengeObject(HeapObject** p, HeapObject* object);
1010 1078
1011 // Commits from space if it is uncommitted. 1079 // Commits from space if it is uncommitted.
1012 static void EnsureFromSpaceIsCommitted(); 1080 void EnsureFromSpaceIsCommitted();
1013 1081
1014 // Support for partial snapshots. After calling this we can allocate a 1082 // Support for partial snapshots. After calling this we can allocate a
1015 // certain number of bytes using only linear allocation (with a 1083 // certain number of bytes using only linear allocation (with a
1016 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists 1084 // LinearAllocationScope and an AlwaysAllocateScope) without using freelists
1017 // or causing a GC. It returns true of space was reserved or false if a GC is 1085 // or causing a GC. It returns true of space was reserved or false if a GC is
1018 // needed. For paged spaces the space requested must include the space wasted 1086 // needed. For paged spaces the space requested must include the space wasted
1019 // at the end of each page when allocating linearly. 1087 // at the end of each page when allocating linearly.
1020 static void ReserveSpace( 1088 void ReserveSpace(
1021 int new_space_size, 1089 int new_space_size,
1022 int pointer_space_size, 1090 int pointer_space_size,
1023 int data_space_size, 1091 int data_space_size,
1024 int code_space_size, 1092 int code_space_size,
1025 int map_space_size, 1093 int map_space_size,
1026 int cell_space_size, 1094 int cell_space_size,
1027 int large_object_size); 1095 int large_object_size);
1028 1096
1029 // 1097 //
1030 // Support for the API. 1098 // Support for the API.
1031 // 1099 //
1032 1100
1033 static bool CreateApiObjects(); 1101 bool CreateApiObjects();
1034 1102
1035 // Attempt to find the number in a small cache. If we finds it, return 1103 // Attempt to find the number in a small cache. If we finds it, return
1036 // the string representation of the number. Otherwise return undefined. 1104 // the string representation of the number. Otherwise return undefined.
1037 static Object* GetNumberStringCache(Object* number); 1105 Object* GetNumberStringCache(Object* number);
1038 1106
1039 // Update the cache with a new number-string pair. 1107 // Update the cache with a new number-string pair.
1040 static void SetNumberStringCache(Object* number, String* str); 1108 void SetNumberStringCache(Object* number, String* str);
1041 1109
1042 // Adjusts the amount of registered external memory. 1110 // Adjusts the amount of registered external memory.
1043 // Returns the adjusted value. 1111 // Returns the adjusted value.
1044 static inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes); 1112 inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
1045 1113
1046 // Allocate uninitialized fixed array. 1114 // Allocate uninitialized fixed array.
1047 MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray(int length); 1115 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
1048 MUST_USE_RESULT static MaybeObject* AllocateRawFixedArray( 1116 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length,
1049 int length, 1117 PretenureFlag pretenure);
1050 PretenureFlag pretenure);
1051 1118
1052 // True if we have reached the allocation limit in the old generation that 1119 // True if we have reached the allocation limit in the old generation that
1053 // should force the next GC (caused normally) to be a full one. 1120 // should force the next GC (caused normally) to be a full one.
1054 static bool OldGenerationPromotionLimitReached() { 1121 bool OldGenerationPromotionLimitReached() {
1055 return (PromotedSpaceSize() + PromotedExternalMemorySize()) 1122 return (PromotedSpaceSize() + PromotedExternalMemorySize())
1056 > old_gen_promotion_limit_; 1123 > old_gen_promotion_limit_;
1057 } 1124 }
1058 1125
1059 static intptr_t OldGenerationSpaceAvailable() { 1126 intptr_t OldGenerationSpaceAvailable() {
1060 return old_gen_allocation_limit_ - 1127 return old_gen_allocation_limit_ -
1061 (PromotedSpaceSize() + PromotedExternalMemorySize()); 1128 (PromotedSpaceSize() + PromotedExternalMemorySize());
1062 } 1129 }
1063 1130
1064 // True if we have reached the allocation limit in the old generation that 1131 // True if we have reached the allocation limit in the old generation that
1065 // should artificially cause a GC right now. 1132 // should artificially cause a GC right now.
1066 static bool OldGenerationAllocationLimitReached() { 1133 bool OldGenerationAllocationLimitReached() {
1067 return OldGenerationSpaceAvailable() < 0; 1134 return OldGenerationSpaceAvailable() < 0;
1068 } 1135 }
1069 1136
1070 // Can be called when the embedding application is idle. 1137 // Can be called when the embedding application is idle.
1071 static bool IdleNotification(); 1138 bool IdleNotification();
1072 1139
1073 // Declare all the root indices. 1140 // Declare all the root indices.
1074 enum RootListIndex { 1141 enum RootListIndex {
1075 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, 1142 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1076 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) 1143 STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
1077 #undef ROOT_INDEX_DECLARATION 1144 #undef ROOT_INDEX_DECLARATION
1078 1145
1079 // Utility type maps 1146 // Utility type maps
1080 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex, 1147 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1081 STRUCT_LIST(DECLARE_STRUCT_MAP) 1148 STRUCT_LIST(DECLARE_STRUCT_MAP)
1082 #undef DECLARE_STRUCT_MAP 1149 #undef DECLARE_STRUCT_MAP
1083 1150
1084 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex, 1151 #define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex,
1085 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION) 1152 SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
1086 #undef SYMBOL_DECLARATION 1153 #undef SYMBOL_DECLARATION
1087 1154
1088 kSymbolTableRootIndex, 1155 kSymbolTableRootIndex,
1089 kStrongRootListLength = kSymbolTableRootIndex, 1156 kStrongRootListLength = kSymbolTableRootIndex,
1090 kRootListLength 1157 kRootListLength
1091 }; 1158 };
1092 1159
1093 MUST_USE_RESULT static MaybeObject* NumberToString( 1160 MUST_USE_RESULT MaybeObject* NumberToString(
1094 Object* number, 1161 Object* number, bool check_number_string_cache = true);
1095 bool check_number_string_cache = true);
1096 1162
1097 static Map* MapForExternalArrayType(ExternalArrayType array_type); 1163 Map* MapForExternalArrayType(ExternalArrayType array_type);
1098 static RootListIndex RootIndexForExternalArrayType( 1164 RootListIndex RootIndexForExternalArrayType(
1099 ExternalArrayType array_type); 1165 ExternalArrayType array_type);
1100 1166
1101 static void RecordStats(HeapStats* stats, bool take_snapshot = false); 1167 void RecordStats(HeapStats* stats, bool take_snapshot = false);
1102 1168
1103 // Copy block of memory from src to dst. Size of block should be aligned 1169 // Copy block of memory from src to dst. Size of block should be aligned
1104 // by pointer size. 1170 // by pointer size.
1105 static inline void CopyBlock(Address dst, Address src, int byte_size); 1171 static inline void CopyBlock(Address dst, Address src, int byte_size);
1106 1172
1107 static inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst, 1173 inline void CopyBlockToOldSpaceAndUpdateRegionMarks(Address dst,
1108 Address src, 1174 Address src,
1109 int byte_size); 1175 int byte_size);
1110 1176
1111 // Optimized version of memmove for blocks with pointer size aligned sizes and 1177 // Optimized version of memmove for blocks with pointer size aligned sizes and
1112 // pointer size aligned addresses. 1178 // pointer size aligned addresses.
1113 static inline void MoveBlock(Address dst, Address src, int byte_size); 1179 static inline void MoveBlock(Address dst, Address src, int byte_size);
1114 1180
1115 static inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst, 1181 inline void MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
1116 Address src, 1182 Address src,
1117 int byte_size); 1183 int byte_size);
1118 1184
1119 // Check new space expansion criteria and expand semispaces if it was hit. 1185 // Check new space expansion criteria and expand semispaces if it was hit.
1120 static void CheckNewSpaceExpansionCriteria(); 1186 void CheckNewSpaceExpansionCriteria();
1121 1187
1122 static inline void IncrementYoungSurvivorsCounter(int survived) { 1188 inline void IncrementYoungSurvivorsCounter(int survived) {
1123 young_survivors_after_last_gc_ = survived; 1189 young_survivors_after_last_gc_ = survived;
1124 survived_since_last_expansion_ += survived; 1190 survived_since_last_expansion_ += survived;
1125 } 1191 }
1126 1192
1127 static void UpdateNewSpaceReferencesInExternalStringTable( 1193 void UpdateNewSpaceReferencesInExternalStringTable(
1128 ExternalStringTableUpdaterCallback updater_func); 1194 ExternalStringTableUpdaterCallback updater_func);
1129 1195
1130 static void ProcessWeakReferences(WeakObjectRetainer* retainer); 1196 void ProcessWeakReferences(WeakObjectRetainer* retainer);
1131 1197
1132 // Helper function that governs the promotion policy from new space to 1198 // Helper function that governs the promotion policy from new space to
1133 // old. If the object's old address lies below the new space's age 1199 // old. If the object's old address lies below the new space's age
1134 // mark or if we've already filled the bottom 1/16th of the to space, 1200 // mark or if we've already filled the bottom 1/16th of the to space,
1135 // we try to promote this object. 1201 // we try to promote this object.
1136 static inline bool ShouldBePromoted(Address old_address, int object_size); 1202 inline bool ShouldBePromoted(Address old_address, int object_size);
1137 1203
1138 static int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; } 1204 int MaxObjectSizeInNewSpace() { return kMaxObjectSizeInNewSpace; }
1139 1205
1140 static void ClearJSFunctionResultCaches(); 1206 void ClearJSFunctionResultCaches();
1141 1207
1142 static void ClearNormalizedMapCaches(); 1208 void ClearNormalizedMapCaches();
1143 1209
1144 static GCTracer* tracer() { return tracer_; } 1210 GCTracer* tracer() { return tracer_; }
1145 1211
1146 static void CallGlobalGCPrologueCallback() { 1212 // Returns maximum GC pause.
1213 int get_max_gc_pause() { return max_gc_pause_; }
1214
1215 // Returns maximum size of objects alive after GC.
1216 intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1217
1218 // Returns minimal interval between two subsequent collections.
1219 int get_min_in_mutator() { return min_in_mutator_; }
1220
1221 MarkCompactCollector* mark_compact_collector() {
1222 return &mark_compact_collector_;
1223 }
1224
1225 ExternalStringTable* external_string_table() {
1226 return &external_string_table_;
1227 }
1228
1229 inline Isolate* isolate();
1230 bool is_safe_to_read_maps() { return is_safe_to_read_maps_; }
1231
1232 void CallGlobalGCPrologueCallback() {
1147 if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_(); 1233 if (global_gc_prologue_callback_ != NULL) global_gc_prologue_callback_();
1148 } 1234 }
1149 1235
1150 static void CallGlobalGCEpilogueCallback() { 1236 void CallGlobalGCEpilogueCallback() {
1151 if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_(); 1237 if (global_gc_epilogue_callback_ != NULL) global_gc_epilogue_callback_();
1152 } 1238 }
1153 1239
1154 private: 1240 private:
1155 static int reserved_semispace_size_; 1241 Heap();
1156 static int max_semispace_size_; 1242
1157 static int initial_semispace_size_; 1243 // This can be calculated directly from a pointer to the heap; however, it is
1158 static intptr_t max_old_generation_size_; 1244 // more expedient to get at the isolate directly from within Heap methods.
1159 static intptr_t max_executable_size_; 1245 Isolate* isolate_;
1160 static intptr_t code_range_size_; 1246
1247 int reserved_semispace_size_;
1248 int max_semispace_size_;
1249 int initial_semispace_size_;
1250 intptr_t max_old_generation_size_;
1251 intptr_t max_executable_size_;
1252 intptr_t code_range_size_;
1161 1253
1162 // For keeping track of how much data has survived 1254 // For keeping track of how much data has survived
1163 // scavenge since last new space expansion. 1255 // scavenge since last new space expansion.
1164 static int survived_since_last_expansion_; 1256 int survived_since_last_expansion_;
1165 1257
1166 static int always_allocate_scope_depth_; 1258 int always_allocate_scope_depth_;
1167 static int linear_allocation_scope_depth_; 1259 int linear_allocation_scope_depth_;
1168 1260
1169 // For keeping track of context disposals. 1261 // For keeping track of context disposals.
1170 static int contexts_disposed_; 1262 int contexts_disposed_;
1171 1263
1172 #if defined(V8_TARGET_ARCH_X64) 1264 #if defined(V8_TARGET_ARCH_X64)
1173 static const int kMaxObjectSizeInNewSpace = 1024*KB; 1265 static const int kMaxObjectSizeInNewSpace = 1024*KB;
1174 #else 1266 #else
1175 static const int kMaxObjectSizeInNewSpace = 512*KB; 1267 static const int kMaxObjectSizeInNewSpace = 512*KB;
1176 #endif 1268 #endif
1177 1269
1178 static NewSpace new_space_; 1270 NewSpace new_space_;
1179 static OldSpace* old_pointer_space_; 1271 OldSpace* old_pointer_space_;
1180 static OldSpace* old_data_space_; 1272 OldSpace* old_data_space_;
1181 static OldSpace* code_space_; 1273 OldSpace* code_space_;
1182 static MapSpace* map_space_; 1274 MapSpace* map_space_;
1183 static CellSpace* cell_space_; 1275 CellSpace* cell_space_;
1184 static LargeObjectSpace* lo_space_; 1276 LargeObjectSpace* lo_space_;
1185 static HeapState gc_state_; 1277 HeapState gc_state_;
1186 1278
1187 // Returns the size of object residing in non new spaces. 1279 // Returns the size of object residing in non new spaces.
1188 static intptr_t PromotedSpaceSize(); 1280 intptr_t PromotedSpaceSize();
1189 1281
1190 // Returns the amount of external memory registered since last global gc. 1282 // Returns the amount of external memory registered since last global gc.
1191 static int PromotedExternalMemorySize(); 1283 int PromotedExternalMemorySize();
1192 1284
1193 static int mc_count_; // how many mark-compact collections happened 1285 int mc_count_; // how many mark-compact collections happened
1194 static int ms_count_; // how many mark-sweep collections happened 1286 int ms_count_; // how many mark-sweep collections happened
1195 static unsigned int gc_count_; // how many gc happened 1287 unsigned int gc_count_; // how many gc happened
1196 1288
1197 // Total length of the strings we failed to flatten since the last GC. 1289 // Total length of the strings we failed to flatten since the last GC.
1198 static int unflattened_strings_length_; 1290 int unflattened_strings_length_;
1199 1291
1200 #define ROOT_ACCESSOR(type, name, camel_name) \ 1292 #define ROOT_ACCESSOR(type, name, camel_name) \
1201 static inline void set_##name(type* value) { \ 1293 inline void set_##name(type* value) { \
1202 roots_[k##camel_name##RootIndex] = value; \ 1294 roots_[k##camel_name##RootIndex] = value; \
1203 } 1295 }
1204 ROOT_LIST(ROOT_ACCESSOR) 1296 ROOT_LIST(ROOT_ACCESSOR)
1205 #undef ROOT_ACCESSOR 1297 #undef ROOT_ACCESSOR
1206 1298
1207 #ifdef DEBUG 1299 #ifdef DEBUG
1208 static bool allocation_allowed_; 1300 bool allocation_allowed_;
1209 1301
1210 // If the --gc-interval flag is set to a positive value, this 1302 // If the --gc-interval flag is set to a positive value, this
1211 // variable holds the value indicating the number of allocations 1303 // variable holds the value indicating the number of allocations
1212 // remain until the next failure and garbage collection. 1304 // remain until the next failure and garbage collection.
1213 static int allocation_timeout_; 1305 int allocation_timeout_;
1214 1306
1215 // Do we expect to be able to handle allocation failure at this 1307 // Do we expect to be able to handle allocation failure at this
1216 // time? 1308 // time?
1217 static bool disallow_allocation_failure_; 1309 bool disallow_allocation_failure_;
1310
1311 HeapDebugUtils* debug_utils_;
1218 #endif // DEBUG 1312 #endif // DEBUG
1219 1313
1220 // Limit that triggers a global GC on the next (normally caused) GC. This 1314 // Limit that triggers a global GC on the next (normally caused) GC. This
1221 // is checked when we have already decided to do a GC to help determine 1315 // is checked when we have already decided to do a GC to help determine
1222 // which collector to invoke. 1316 // which collector to invoke.
1223 static intptr_t old_gen_promotion_limit_; 1317 intptr_t old_gen_promotion_limit_;
1224 1318
1225 // Limit that triggers a global GC as soon as is reasonable. This is 1319 // Limit that triggers a global GC as soon as is reasonable. This is
1226 // checked before expanding a paged space in the old generation and on 1320 // checked before expanding a paged space in the old generation and on
1227 // every allocation in large object space. 1321 // every allocation in large object space.
1228 static intptr_t old_gen_allocation_limit_; 1322 intptr_t old_gen_allocation_limit_;
1229 1323
1230 // Limit on the amount of externally allocated memory allowed 1324 // Limit on the amount of externally allocated memory allowed
1231 // between global GCs. If reached a global GC is forced. 1325 // between global GCs. If reached a global GC is forced.
1232 static intptr_t external_allocation_limit_; 1326 intptr_t external_allocation_limit_;
1233 1327
1234 // The amount of external memory registered through the API kept alive 1328 // The amount of external memory registered through the API kept alive
1235 // by global handles 1329 // by global handles
1236 static int amount_of_external_allocated_memory_; 1330 int amount_of_external_allocated_memory_;
1237 1331
1238 // Caches the amount of external memory registered at the last global gc. 1332 // Caches the amount of external memory registered at the last global gc.
1239 static int amount_of_external_allocated_memory_at_last_global_gc_; 1333 int amount_of_external_allocated_memory_at_last_global_gc_;
1240 1334
1241 // Indicates that an allocation has failed in the old generation since the 1335 // Indicates that an allocation has failed in the old generation since the
1242 // last GC. 1336 // last GC.
1243 static int old_gen_exhausted_; 1337 int old_gen_exhausted_;
1244 1338
1245 static Object* roots_[kRootListLength]; 1339 Object* roots_[kRootListLength];
1246 1340
1247 static Object* global_contexts_list_; 1341 Object* global_contexts_list_;
1248 1342
1249 struct StringTypeTable { 1343 struct StringTypeTable {
1250 InstanceType type; 1344 InstanceType type;
1251 int size; 1345 int size;
1252 RootListIndex index; 1346 RootListIndex index;
1253 }; 1347 };
1254 1348
1255 struct ConstantSymbolTable { 1349 struct ConstantSymbolTable {
1256 const char* contents; 1350 const char* contents;
1257 RootListIndex index; 1351 RootListIndex index;
1258 }; 1352 };
1259 1353
1260 struct StructTable { 1354 struct StructTable {
1261 InstanceType type; 1355 InstanceType type;
1262 int size; 1356 int size;
1263 RootListIndex index; 1357 RootListIndex index;
1264 }; 1358 };
1265 1359
1266 static const StringTypeTable string_type_table[]; 1360 static const StringTypeTable string_type_table[];
1267 static const ConstantSymbolTable constant_symbol_table[]; 1361 static const ConstantSymbolTable constant_symbol_table[];
1268 static const StructTable struct_table[]; 1362 static const StructTable struct_table[];
1269 1363
1270 // The special hidden symbol which is an empty string, but does not match 1364 // The special hidden symbol which is an empty string, but does not match
1271 // any string when looked up in properties. 1365 // any string when looked up in properties.
1272 static String* hidden_symbol_; 1366 String* hidden_symbol_;
1273 1367
1274 // GC callback function, called before and after mark-compact GC. 1368 // GC callback function, called before and after mark-compact GC.
1275 // Allocations in the callback function are disallowed. 1369 // Allocations in the callback function are disallowed.
1276 struct GCPrologueCallbackPair { 1370 struct GCPrologueCallbackPair {
1277 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type) 1371 GCPrologueCallbackPair(GCPrologueCallback callback, GCType gc_type)
1278 : callback(callback), gc_type(gc_type) { 1372 : callback(callback), gc_type(gc_type) {
1279 } 1373 }
1280 bool operator==(const GCPrologueCallbackPair& pair) const { 1374 bool operator==(const GCPrologueCallbackPair& pair) const {
1281 return pair.callback == callback; 1375 return pair.callback == callback;
1282 } 1376 }
1283 GCPrologueCallback callback; 1377 GCPrologueCallback callback;
1284 GCType gc_type; 1378 GCType gc_type;
1285 }; 1379 };
1286 static List<GCPrologueCallbackPair> gc_prologue_callbacks_; 1380 List<GCPrologueCallbackPair> gc_prologue_callbacks_;
1287 1381
1288 struct GCEpilogueCallbackPair { 1382 struct GCEpilogueCallbackPair {
1289 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type) 1383 GCEpilogueCallbackPair(GCEpilogueCallback callback, GCType gc_type)
1290 : callback(callback), gc_type(gc_type) { 1384 : callback(callback), gc_type(gc_type) {
1291 } 1385 }
1292 bool operator==(const GCEpilogueCallbackPair& pair) const { 1386 bool operator==(const GCEpilogueCallbackPair& pair) const {
1293 return pair.callback == callback; 1387 return pair.callback == callback;
1294 } 1388 }
1295 GCEpilogueCallback callback; 1389 GCEpilogueCallback callback;
1296 GCType gc_type; 1390 GCType gc_type;
1297 }; 1391 };
1298 static List<GCEpilogueCallbackPair> gc_epilogue_callbacks_; 1392 List<GCEpilogueCallbackPair> gc_epilogue_callbacks_;
1299 1393
1300 static GCCallback global_gc_prologue_callback_; 1394 GCCallback global_gc_prologue_callback_;
1301 static GCCallback global_gc_epilogue_callback_; 1395 GCCallback global_gc_epilogue_callback_;
1302 1396
1303 // Support for computing object sizes during GC. 1397 // Support for computing object sizes during GC.
1304 static HeapObjectCallback gc_safe_size_of_old_object_; 1398 HeapObjectCallback gc_safe_size_of_old_object_;
1305 static int GcSafeSizeOfOldObject(HeapObject* object); 1399 static int GcSafeSizeOfOldObject(HeapObject* object);
1306 static int GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object); 1400 static int GcSafeSizeOfOldObjectWithEncodedMap(HeapObject* object);
1307 1401
1308 // Update the GC state. Called from the mark-compact collector. 1402 // Update the GC state. Called from the mark-compact collector.
1309 static void MarkMapPointersAsEncoded(bool encoded) { 1403 void MarkMapPointersAsEncoded(bool encoded) {
1310 gc_safe_size_of_old_object_ = encoded 1404 gc_safe_size_of_old_object_ = encoded
1311 ? &GcSafeSizeOfOldObjectWithEncodedMap 1405 ? &GcSafeSizeOfOldObjectWithEncodedMap
1312 : &GcSafeSizeOfOldObject; 1406 : &GcSafeSizeOfOldObject;
1313 } 1407 }
1314 1408
1315 // Checks whether a global GC is necessary 1409 // Checks whether a global GC is necessary
1316 static GarbageCollector SelectGarbageCollector(AllocationSpace space); 1410 GarbageCollector SelectGarbageCollector(AllocationSpace space);
1317 1411
1318 // Performs garbage collection 1412 // Performs garbage collection
1319 // Returns whether there is a chance another major GC could 1413 // Returns whether there is a chance another major GC could
1320 // collect more garbage. 1414 // collect more garbage.
1321 static bool PerformGarbageCollection(GarbageCollector collector, 1415 bool PerformGarbageCollection(GarbageCollector collector,
1322 GCTracer* tracer); 1416 GCTracer* tracer);
1417
1418 static const intptr_t kMinimumPromotionLimit = 2 * MB;
1419 static const intptr_t kMinimumAllocationLimit = 8 * MB;
1420
1421 inline void UpdateOldSpaceLimits();
1323 1422
1324 // Allocate an uninitialized object in map space. The behavior is identical 1423 // Allocate an uninitialized object in map space. The behavior is identical
1325 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't 1424 // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
1326 // have to test the allocation space argument and (b) can reduce code size 1425 // have to test the allocation space argument and (b) can reduce code size
1327 // (since both AllocateRaw and AllocateRawMap are inlined). 1426 // (since both AllocateRaw and AllocateRawMap are inlined).
1328 MUST_USE_RESULT static inline MaybeObject* AllocateRawMap(); 1427 MUST_USE_RESULT inline MaybeObject* AllocateRawMap();
1329 1428
1330 // Allocate an uninitialized object in the global property cell space. 1429 // Allocate an uninitialized object in the global property cell space.
1331 MUST_USE_RESULT static inline MaybeObject* AllocateRawCell(); 1430 MUST_USE_RESULT inline MaybeObject* AllocateRawCell();
1332 1431
1333 // Initializes a JSObject based on its map. 1432 // Initializes a JSObject based on its map.
1334 static void InitializeJSObjectFromMap(JSObject* obj, 1433 void InitializeJSObjectFromMap(JSObject* obj,
1335 FixedArray* properties, 1434 FixedArray* properties,
1336 Map* map); 1435 Map* map);
1337 1436
1338 static bool CreateInitialMaps(); 1437 bool CreateInitialMaps();
1339 static bool CreateInitialObjects(); 1438 bool CreateInitialObjects();
1340 1439
1341 // These two Create*EntryStub functions are here and forced to not be inlined 1440 // These five Create*EntryStub functions are here and forced to not be inlined
1342 // because of a gcc-4.4 bug that assigns wrong vtable entries. 1441 // because of a gcc-4.4 bug that assigns wrong vtable entries.
1343 NO_INLINE(static void CreateJSEntryStub()); 1442 NO_INLINE(void CreateJSEntryStub());
1344 NO_INLINE(static void CreateJSConstructEntryStub()); 1443 NO_INLINE(void CreateJSConstructEntryStub());
1345 1444
1346 static void CreateFixedStubs(); 1445 void CreateFixedStubs();
1347 1446
1348 MUST_USE_RESULT static MaybeObject* CreateOddball(const char* to_string, 1447 MaybeObject* CreateOddball(const char* to_string,
1349 Object* to_number); 1448 Object* to_number,
1449 byte kind);
1350 1450
1351 // Allocate empty fixed array. 1451 // Allocate empty fixed array.
1352 MUST_USE_RESULT static MaybeObject* AllocateEmptyFixedArray(); 1452 MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
1353 1453
1354 // Performs a minor collection in new generation. 1454 // Performs a minor collection in new generation.
1355 static void Scavenge(); 1455 void Scavenge();
1356 1456
1357 static String* UpdateNewSpaceReferenceInExternalStringTableEntry( 1457 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
1458 Heap* heap,
1358 Object** pointer); 1459 Object** pointer);
1359 1460
1360 static Address DoScavenge(ObjectVisitor* scavenge_visitor, 1461 Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
1361 Address new_space_front);
1362 1462
1363 // Performs a major collection in the whole heap. 1463 // Performs a major collection in the whole heap.
1364 static void MarkCompact(GCTracer* tracer); 1464 void MarkCompact(GCTracer* tracer);
1365 1465
1366 // Code to be run before and after mark-compact. 1466 // Code to be run before and after mark-compact.
1367 static void MarkCompactPrologue(bool is_compacting); 1467 void MarkCompactPrologue(bool is_compacting);
1368 1468
1369 // Completely clear the Instanceof cache (to stop it keeping objects alive 1469 // Completely clear the Instanceof cache (to stop it keeping objects alive
1370 // around a GC). 1470 // around a GC).
1371 static void CompletelyClearInstanceofCache() { 1471 inline void CompletelyClearInstanceofCache();
1372 set_instanceof_cache_map(the_hole_value());
1373 set_instanceof_cache_function(the_hole_value());
1374 }
1375 1472
1376 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 1473 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1377 // Record statistics before and after garbage collection. 1474 // Record statistics before and after garbage collection.
1378 static void ReportStatisticsBeforeGC(); 1475 void ReportStatisticsBeforeGC();
1379 static void ReportStatisticsAfterGC(); 1476 void ReportStatisticsAfterGC();
1380 #endif 1477 #endif
1381 1478
1382 // Slow part of scavenge object. 1479 // Slow part of scavenge object.
1383 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object); 1480 static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
1384 1481
1385 // Initializes a function with a shared part and prototype. 1482 // Initializes a function with a shared part and prototype.
1386 // Returns the function. 1483 // Returns the function.
1387 // Note: this code was factored out of AllocateFunction such that 1484 // Note: this code was factored out of AllocateFunction such that
1388 // other parts of the VM could use it. Specifically, a function that creates 1485 // other parts of the VM could use it. Specifically, a function that creates
1389 // instances of type JS_FUNCTION_TYPE benefit from the use of this function. 1486 // instances of type JS_FUNCTION_TYPE benefit from the use of this function.
1390 // Please note this does not perform a garbage collection. 1487 // Please note this does not perform a garbage collection.
1391 MUST_USE_RESULT static inline MaybeObject* InitializeFunction( 1488 MUST_USE_RESULT inline MaybeObject* InitializeFunction(
1392 JSFunction* function, 1489 JSFunction* function,
1393 SharedFunctionInfo* shared, 1490 SharedFunctionInfo* shared,
1394 Object* prototype); 1491 Object* prototype);
1395 1492
1396 static GCTracer* tracer_; 1493 GCTracer* tracer_;
1397 1494
1398 1495
1399 // Initializes the number to string cache based on the max semispace size. 1496 // Initializes the number to string cache based on the max semispace size.
1400 MUST_USE_RESULT static MaybeObject* InitializeNumberStringCache(); 1497 MUST_USE_RESULT MaybeObject* InitializeNumberStringCache();
1401 // Flush the number to string cache. 1498 // Flush the number to string cache.
1402 static void FlushNumberStringCache(); 1499 void FlushNumberStringCache();
1403 1500
1404 static void UpdateSurvivalRateTrend(int start_new_space_size); 1501 void UpdateSurvivalRateTrend(int start_new_space_size);
1405 1502
1406 enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING }; 1503 enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
1407 1504
1408 static const int kYoungSurvivalRateThreshold = 90; 1505 static const int kYoungSurvivalRateThreshold = 90;
1409 static const int kYoungSurvivalRateAllowedDeviation = 15; 1506 static const int kYoungSurvivalRateAllowedDeviation = 15;
1410 1507
1411 static int young_survivors_after_last_gc_; 1508 int young_survivors_after_last_gc_;
1412 static int high_survival_rate_period_length_; 1509 int high_survival_rate_period_length_;
1413 static double survival_rate_; 1510 double survival_rate_;
1414 static SurvivalRateTrend previous_survival_rate_trend_; 1511 SurvivalRateTrend previous_survival_rate_trend_;
1415 static SurvivalRateTrend survival_rate_trend_; 1512 SurvivalRateTrend survival_rate_trend_;
1416 1513
1417 static void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) { 1514 void set_survival_rate_trend(SurvivalRateTrend survival_rate_trend) {
1418 ASSERT(survival_rate_trend != FLUCTUATING); 1515 ASSERT(survival_rate_trend != FLUCTUATING);
1419 previous_survival_rate_trend_ = survival_rate_trend_; 1516 previous_survival_rate_trend_ = survival_rate_trend_;
1420 survival_rate_trend_ = survival_rate_trend; 1517 survival_rate_trend_ = survival_rate_trend;
1421 } 1518 }
1422 1519
1423 static SurvivalRateTrend survival_rate_trend() { 1520 SurvivalRateTrend survival_rate_trend() {
1424 if (survival_rate_trend_ == STABLE) { 1521 if (survival_rate_trend_ == STABLE) {
1425 return STABLE; 1522 return STABLE;
1426 } else if (previous_survival_rate_trend_ == STABLE) { 1523 } else if (previous_survival_rate_trend_ == STABLE) {
1427 return survival_rate_trend_; 1524 return survival_rate_trend_;
1428 } else if (survival_rate_trend_ != previous_survival_rate_trend_) { 1525 } else if (survival_rate_trend_ != previous_survival_rate_trend_) {
1429 return FLUCTUATING; 1526 return FLUCTUATING;
1430 } else { 1527 } else {
1431 return survival_rate_trend_; 1528 return survival_rate_trend_;
1432 } 1529 }
1433 } 1530 }
1434 1531
1435 static bool IsStableOrIncreasingSurvivalTrend() { 1532 bool IsStableOrIncreasingSurvivalTrend() {
1436 switch (survival_rate_trend()) { 1533 switch (survival_rate_trend()) {
1437 case STABLE: 1534 case STABLE:
1438 case INCREASING: 1535 case INCREASING:
1439 return true; 1536 return true;
1440 default: 1537 default:
1441 return false; 1538 return false;
1442 } 1539 }
1443 } 1540 }
1444 1541
1445 static bool IsIncreasingSurvivalTrend() { 1542 bool IsIncreasingSurvivalTrend() {
1446 return survival_rate_trend() == INCREASING; 1543 return survival_rate_trend() == INCREASING;
1447 } 1544 }
1448 1545
1449 static bool IsHighSurvivalRate() { 1546 bool IsHighSurvivalRate() {
1450 return high_survival_rate_period_length_ > 0; 1547 return high_survival_rate_period_length_ > 0;
1451 } 1548 }
1452 1549
1453 static const int kInitialSymbolTableSize = 2048; 1550 static const int kInitialSymbolTableSize = 2048;
1454 static const int kInitialEvalCacheSize = 64; 1551 static const int kInitialEvalCacheSize = 64;
1455 1552
1553 // Maximum GC pause.
1554 int max_gc_pause_;
1555
1556 // Maximum size of objects alive after GC.
1557 intptr_t max_alive_after_gc_;
1558
1559 // Minimal interval between two subsequent collections.
1560 int min_in_mutator_;
1561
1562 // Size of objects alive after last GC.
1563 intptr_t alive_after_last_gc_;
1564
1565 double last_gc_end_timestamp_;
1566
1567 MarkCompactCollector mark_compact_collector_;
1568
1569 // This field contains the meaning of the WATERMARK_INVALIDATED flag.
1570 // Instead of clearing this flag from all pages we just flip
1571 // its meaning at the beginning of a scavenge.
1572 intptr_t page_watermark_invalidated_mark_;
1573
1574 int number_idle_notifications_;
1575 unsigned int last_idle_notification_gc_count_;
1576 bool last_idle_notification_gc_count_init_;
1577
1578 // Shared state read by the scavenge collector and set by ScavengeObject.
1579 PromotionQueue promotion_queue_;
1580
1581 // Flag is set when the heap has been configured. The heap can be repeatedly
1582 // configured through the API until it is setup.
1583 bool configured_;
1584
1585 ExternalStringTable external_string_table_;
1586
1587 bool is_safe_to_read_maps_;
1588
1456 friend class Factory; 1589 friend class Factory;
1590 friend class GCTracer;
1457 friend class DisallowAllocationFailure; 1591 friend class DisallowAllocationFailure;
1458 friend class AlwaysAllocateScope; 1592 friend class AlwaysAllocateScope;
1459 friend class LinearAllocationScope; 1593 friend class LinearAllocationScope;
1594 friend class Page;
1595 friend class Isolate;
1460 friend class MarkCompactCollector; 1596 friend class MarkCompactCollector;
1597 friend class MapCompact;
1598
1599 DISALLOW_COPY_AND_ASSIGN(Heap);
1461 }; 1600 };
1462 1601
1463 1602
1464 class HeapStats { 1603 class HeapStats {
1465 public: 1604 public:
1466 static const int kStartMarker = 0xDECADE00; 1605 static const int kStartMarker = 0xDECADE00;
1467 static const int kEndMarker = 0xDECADE01; 1606 static const int kEndMarker = 0xDECADE01;
1468 1607
1469 int* start_marker; // 0 1608 int* start_marker; // 0
1470 int* new_space_size; // 1 1609 int* new_space_size; // 1
(...skipping 23 matching lines...) Expand all
1494 }; 1633 };
1495 1634
1496 1635
1497 class AlwaysAllocateScope { 1636 class AlwaysAllocateScope {
1498 public: 1637 public:
1499 AlwaysAllocateScope() { 1638 AlwaysAllocateScope() {
1500 // We shouldn't hit any nested scopes, because that requires 1639 // We shouldn't hit any nested scopes, because that requires
1501 // non-handle code to call handle code. The code still works but 1640 // non-handle code to call handle code. The code still works but
1502 // performance will degrade, so we want to catch this situation 1641 // performance will degrade, so we want to catch this situation
1503 // in debug mode. 1642 // in debug mode.
1504 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1643 ASSERT(HEAP->always_allocate_scope_depth_ == 0);
1505 Heap::always_allocate_scope_depth_++; 1644 HEAP->always_allocate_scope_depth_++;
1506 } 1645 }
1507 1646
1508 ~AlwaysAllocateScope() { 1647 ~AlwaysAllocateScope() {
1509 Heap::always_allocate_scope_depth_--; 1648 HEAP->always_allocate_scope_depth_--;
1510 ASSERT(Heap::always_allocate_scope_depth_ == 0); 1649 ASSERT(HEAP->always_allocate_scope_depth_ == 0);
1511 } 1650 }
1512 }; 1651 };
1513 1652
1514 1653
1515 class LinearAllocationScope { 1654 class LinearAllocationScope {
1516 public: 1655 public:
1517 LinearAllocationScope() { 1656 LinearAllocationScope() {
1518 Heap::linear_allocation_scope_depth_++; 1657 HEAP->linear_allocation_scope_depth_++;
1519 } 1658 }
1520 1659
1521 ~LinearAllocationScope() { 1660 ~LinearAllocationScope() {
1522 Heap::linear_allocation_scope_depth_--; 1661 HEAP->linear_allocation_scope_depth_--;
1523 ASSERT(Heap::linear_allocation_scope_depth_ >= 0); 1662 ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
1524 } 1663 }
1525 }; 1664 };
1526 1665
1527 1666
1528 #ifdef DEBUG 1667 #ifdef DEBUG
1529 // Visitor class to verify interior pointers in spaces that do not contain 1668 // Visitor class to verify interior pointers in spaces that do not contain
1530 // or care about intergenerational references. All heap object pointers have to 1669 // or care about intergenerational references. All heap object pointers have to
1531 // point into the heap to a location that has a map pointer at its first word. 1670 // point into the heap to a location that has a map pointer at its first word.
1532 // Caveat: Heap::Contains is an approximation because it can return true for 1671 // Caveat: Heap::Contains is an approximation because it can return true for
1533 // objects in a heap space but above the allocation pointer. 1672 // objects in a heap space but above the allocation pointer.
1534 class VerifyPointersVisitor: public ObjectVisitor { 1673 class VerifyPointersVisitor: public ObjectVisitor {
1535 public: 1674 public:
1536 void VisitPointers(Object** start, Object** end) { 1675 void VisitPointers(Object** start, Object** end) {
1537 for (Object** current = start; current < end; current++) { 1676 for (Object** current = start; current < end; current++) {
1538 if ((*current)->IsHeapObject()) { 1677 if ((*current)->IsHeapObject()) {
1539 HeapObject* object = HeapObject::cast(*current); 1678 HeapObject* object = HeapObject::cast(*current);
1540 ASSERT(Heap::Contains(object)); 1679 ASSERT(HEAP->Contains(object));
1541 ASSERT(object->map()->IsMap()); 1680 ASSERT(object->map()->IsMap());
1542 } 1681 }
1543 } 1682 }
1544 } 1683 }
1545 }; 1684 };
1546 1685
1547 1686
1548 // Visitor class to verify interior pointers in spaces that use region marks 1687 // Visitor class to verify interior pointers in spaces that use region marks
1549 // to keep track of intergenerational references. 1688 // to keep track of intergenerational references.
1550 // As VerifyPointersVisitor but also checks that dirty marks are set 1689 // As VerifyPointersVisitor but also checks that dirty marks are set
1551 // for regions covering intergenerational references. 1690 // for regions covering intergenerational references.
1552 class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor { 1691 class VerifyPointersAndDirtyRegionsVisitor: public ObjectVisitor {
1553 public: 1692 public:
1554 void VisitPointers(Object** start, Object** end) { 1693 void VisitPointers(Object** start, Object** end) {
1555 for (Object** current = start; current < end; current++) { 1694 for (Object** current = start; current < end; current++) {
1556 if ((*current)->IsHeapObject()) { 1695 if ((*current)->IsHeapObject()) {
1557 HeapObject* object = HeapObject::cast(*current); 1696 HeapObject* object = HeapObject::cast(*current);
1558 ASSERT(Heap::Contains(object)); 1697 ASSERT(HEAP->Contains(object));
1559 ASSERT(object->map()->IsMap()); 1698 ASSERT(object->map()->IsMap());
1560 if (Heap::InNewSpace(object)) { 1699 if (HEAP->InNewSpace(object)) {
1561 ASSERT(Heap::InToSpace(object)); 1700 ASSERT(HEAP->InToSpace(object));
1562 Address addr = reinterpret_cast<Address>(current); 1701 Address addr = reinterpret_cast<Address>(current);
1563 ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr)); 1702 ASSERT(Page::FromAddress(addr)->IsRegionDirty(addr));
1564 } 1703 }
1565 } 1704 }
1566 } 1705 }
1567 } 1706 }
1568 }; 1707 };
1569 #endif 1708 #endif
1570 1709
1571 1710
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
1665 // Object iterator for the space currently being iterated. 1804 // Object iterator for the space currently being iterated.
1666 ObjectIterator* object_iterator_; 1805 ObjectIterator* object_iterator_;
1667 }; 1806 };
1668 1807
1669 1808
1670 // Cache for mapping (map, property name) into field offset. 1809 // Cache for mapping (map, property name) into field offset.
1671 // Cleared at startup and prior to mark sweep collection. 1810 // Cleared at startup and prior to mark sweep collection.
1672 class KeyedLookupCache { 1811 class KeyedLookupCache {
1673 public: 1812 public:
1674 // Lookup field offset for (map, name). If absent, -1 is returned. 1813 // Lookup field offset for (map, name). If absent, -1 is returned.
1675 static int Lookup(Map* map, String* name); 1814 int Lookup(Map* map, String* name);
1676 1815
1677 // Update an element in the cache. 1816 // Update an element in the cache.
1678 static void Update(Map* map, String* name, int field_offset); 1817 void Update(Map* map, String* name, int field_offset);
1679 1818
1680 // Clear the cache. 1819 // Clear the cache.
1681 static void Clear(); 1820 void Clear();
1682 1821
1683 static const int kLength = 64; 1822 static const int kLength = 64;
1684 static const int kCapacityMask = kLength - 1; 1823 static const int kCapacityMask = kLength - 1;
1685 static const int kMapHashShift = 2; 1824 static const int kMapHashShift = 2;
1825 static const int kNotFound = -1;
1686 1826
1687 private: 1827 private:
1828 KeyedLookupCache() {
1829 for (int i = 0; i < kLength; ++i) {
1830 keys_[i].map = NULL;
1831 keys_[i].name = NULL;
1832 field_offsets_[i] = kNotFound;
1833 }
1834 }
1835
1688 static inline int Hash(Map* map, String* name); 1836 static inline int Hash(Map* map, String* name);
1689 1837
1690 // Get the address of the keys and field_offsets arrays. Used in 1838 // Get the address of the keys and field_offsets arrays. Used in
1691 // generated code to perform cache lookups. 1839 // generated code to perform cache lookups.
1692 static Address keys_address() { 1840 Address keys_address() {
1693 return reinterpret_cast<Address>(&keys_); 1841 return reinterpret_cast<Address>(&keys_);
1694 } 1842 }
1695 1843
1696 static Address field_offsets_address() { 1844 Address field_offsets_address() {
1697 return reinterpret_cast<Address>(&field_offsets_); 1845 return reinterpret_cast<Address>(&field_offsets_);
1698 } 1846 }
1699 1847
1700 struct Key { 1848 struct Key {
1701 Map* map; 1849 Map* map;
1702 String* name; 1850 String* name;
1703 }; 1851 };
1704 static Key keys_[kLength]; 1852
1705 static int field_offsets_[kLength]; 1853 Key keys_[kLength];
1854 int field_offsets_[kLength];
1706 1855
1707 friend class ExternalReference; 1856 friend class ExternalReference;
1857 friend class Isolate;
1858 DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
1708 }; 1859 };
1709 1860
1710 1861
1711 // Cache for mapping (array, property name) into descriptor index. 1862 // Cache for mapping (array, property name) into descriptor index.
1712 // The cache contains both positive and negative results. 1863 // The cache contains both positive and negative results.
1713 // Descriptor index equals kNotFound means the property is absent. 1864 // Descriptor index equals kNotFound means the property is absent.
1714 // Cleared at startup and prior to any gc. 1865 // Cleared at startup and prior to any gc.
1715 class DescriptorLookupCache { 1866 class DescriptorLookupCache {
1716 public: 1867 public:
1717 // Lookup descriptor index for (map, name). 1868 // Lookup descriptor index for (map, name).
1718 // If absent, kAbsent is returned. 1869 // If absent, kAbsent is returned.
1719 static int Lookup(DescriptorArray* array, String* name) { 1870 int Lookup(DescriptorArray* array, String* name) {
1720 if (!StringShape(name).IsSymbol()) return kAbsent; 1871 if (!StringShape(name).IsSymbol()) return kAbsent;
1721 int index = Hash(array, name); 1872 int index = Hash(array, name);
1722 Key& key = keys_[index]; 1873 Key& key = keys_[index];
1723 if ((key.array == array) && (key.name == name)) return results_[index]; 1874 if ((key.array == array) && (key.name == name)) return results_[index];
1724 return kAbsent; 1875 return kAbsent;
1725 } 1876 }
1726 1877
1727 // Update an element in the cache. 1878 // Update an element in the cache.
1728 static void Update(DescriptorArray* array, String* name, int result) { 1879 void Update(DescriptorArray* array, String* name, int result) {
1729 ASSERT(result != kAbsent); 1880 ASSERT(result != kAbsent);
1730 if (StringShape(name).IsSymbol()) { 1881 if (StringShape(name).IsSymbol()) {
1731 int index = Hash(array, name); 1882 int index = Hash(array, name);
1732 Key& key = keys_[index]; 1883 Key& key = keys_[index];
1733 key.array = array; 1884 key.array = array;
1734 key.name = name; 1885 key.name = name;
1735 results_[index] = result; 1886 results_[index] = result;
1736 } 1887 }
1737 } 1888 }
1738 1889
1739 // Clear the cache. 1890 // Clear the cache.
1740 static void Clear(); 1891 void Clear();
1741 1892
1742 static const int kAbsent = -2; 1893 static const int kAbsent = -2;
1743 private: 1894 private:
1895 DescriptorLookupCache() {
1896 for (int i = 0; i < kLength; ++i) {
1897 keys_[i].array = NULL;
1898 keys_[i].name = NULL;
1899 results_[i] = kAbsent;
1900 }
1901 }
1902
1744 static int Hash(DescriptorArray* array, String* name) { 1903 static int Hash(DescriptorArray* array, String* name) {
1745 // Uses only lower 32 bits if pointers are larger. 1904 // Uses only lower 32 bits if pointers are larger.
1746 uint32_t array_hash = 1905 uint32_t array_hash =
1747 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2; 1906 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(array)) >> 2;
1748 uint32_t name_hash = 1907 uint32_t name_hash =
1749 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2; 1908 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name)) >> 2;
1750 return (array_hash ^ name_hash) % kLength; 1909 return (array_hash ^ name_hash) % kLength;
1751 } 1910 }
1752 1911
1753 static const int kLength = 64; 1912 static const int kLength = 64;
1754 struct Key { 1913 struct Key {
1755 DescriptorArray* array; 1914 DescriptorArray* array;
1756 String* name; 1915 String* name;
1757 }; 1916 };
1758 1917
1759 static Key keys_[kLength]; 1918 Key keys_[kLength];
1760 static int results_[kLength]; 1919 int results_[kLength];
1920
1921 friend class Isolate;
1922 DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
1761 }; 1923 };
1762 1924
1763 1925
1764 // ----------------------------------------------------------------------------
1765 // Marking stack for tracing live objects.
1766
1767 class MarkingStack {
1768 public:
1769 void Initialize(Address low, Address high) {
1770 top_ = low_ = reinterpret_cast<HeapObject**>(low);
1771 high_ = reinterpret_cast<HeapObject**>(high);
1772 overflowed_ = false;
1773 }
1774
1775 bool is_full() { return top_ >= high_; }
1776
1777 bool is_empty() { return top_ <= low_; }
1778
1779 bool overflowed() { return overflowed_; }
1780
1781 void clear_overflowed() { overflowed_ = false; }
1782
1783 // Push the (marked) object on the marking stack if there is room,
1784 // otherwise mark the object as overflowed and wait for a rescan of the
1785 // heap.
1786 void Push(HeapObject* object) {
1787 CHECK(object->IsHeapObject());
1788 if (is_full()) {
1789 object->SetOverflow();
1790 overflowed_ = true;
1791 } else {
1792 *(top_++) = object;
1793 }
1794 }
1795
1796 HeapObject* Pop() {
1797 ASSERT(!is_empty());
1798 HeapObject* object = *(--top_);
1799 CHECK(object->IsHeapObject());
1800 return object;
1801 }
1802
1803 private:
1804 HeapObject** low_;
1805 HeapObject** top_;
1806 HeapObject** high_;
1807 bool overflowed_;
1808 };
1809
1810
1811 // A helper class to document/test C++ scopes where we do not 1926 // A helper class to document/test C++ scopes where we do not
1812 // expect a GC. Usage: 1927 // expect a GC. Usage:
1813 // 1928 //
1814 // /* Allocation not allowed: we cannot handle a GC in this scope. */ 1929 // /* Allocation not allowed: we cannot handle a GC in this scope. */
1815 // { AssertNoAllocation nogc; 1930 // { AssertNoAllocation nogc;
1816 // ... 1931 // ...
1817 // } 1932 // }
1818 1933
1819 #ifdef DEBUG 1934 #ifdef DEBUG
1820 1935
1821 class DisallowAllocationFailure { 1936 class DisallowAllocationFailure {
1822 public: 1937 public:
1823 DisallowAllocationFailure() { 1938 DisallowAllocationFailure() {
1824 old_state_ = Heap::disallow_allocation_failure_; 1939 old_state_ = HEAP->disallow_allocation_failure_;
1825 Heap::disallow_allocation_failure_ = true; 1940 HEAP->disallow_allocation_failure_ = true;
1826 } 1941 }
1827 ~DisallowAllocationFailure() { 1942 ~DisallowAllocationFailure() {
1828 Heap::disallow_allocation_failure_ = old_state_; 1943 HEAP->disallow_allocation_failure_ = old_state_;
1829 } 1944 }
1830 private: 1945 private:
1831 bool old_state_; 1946 bool old_state_;
1832 }; 1947 };
1833 1948
1834 class AssertNoAllocation { 1949 class AssertNoAllocation {
1835 public: 1950 public:
1836 AssertNoAllocation() { 1951 AssertNoAllocation() {
1837 old_state_ = Heap::allow_allocation(false); 1952 old_state_ = HEAP->allow_allocation(false);
1838 } 1953 }
1839 1954
1840 ~AssertNoAllocation() { 1955 ~AssertNoAllocation() {
1841 Heap::allow_allocation(old_state_); 1956 HEAP->allow_allocation(old_state_);
1842 } 1957 }
1843 1958
1844 private: 1959 private:
1845 bool old_state_; 1960 bool old_state_;
1846 }; 1961 };
1847 1962
1848 class DisableAssertNoAllocation { 1963 class DisableAssertNoAllocation {
1849 public: 1964 public:
1850 DisableAssertNoAllocation() { 1965 DisableAssertNoAllocation() {
1851 old_state_ = Heap::allow_allocation(true); 1966 old_state_ = HEAP->allow_allocation(true);
1852 } 1967 }
1853 1968
1854 ~DisableAssertNoAllocation() { 1969 ~DisableAssertNoAllocation() {
1855 Heap::allow_allocation(old_state_); 1970 HEAP->allow_allocation(old_state_);
1856 } 1971 }
1857 1972
1858 private: 1973 private:
1859 bool old_state_; 1974 bool old_state_;
1860 }; 1975 };
1861 1976
1862 #else // ndef DEBUG 1977 #else // ndef DEBUG
1863 1978
1864 class AssertNoAllocation { 1979 class AssertNoAllocation {
1865 public: 1980 public:
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1902 ASSERT(scope_ < kNumberOfScopes); // scope_ is unsigned. 2017 ASSERT(scope_ < kNumberOfScopes); // scope_ is unsigned.
1903 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_; 2018 tracer_->scopes_[scope_] += OS::TimeCurrentMillis() - start_time_;
1904 } 2019 }
1905 2020
1906 private: 2021 private:
1907 GCTracer* tracer_; 2022 GCTracer* tracer_;
1908 ScopeId scope_; 2023 ScopeId scope_;
1909 double start_time_; 2024 double start_time_;
1910 }; 2025 };
1911 2026
1912 GCTracer(); 2027 explicit GCTracer(Heap* heap);
1913 ~GCTracer(); 2028 ~GCTracer();
1914 2029
1915 // Sets the collector. 2030 // Sets the collector.
1916 void set_collector(GarbageCollector collector) { collector_ = collector; } 2031 void set_collector(GarbageCollector collector) { collector_ = collector; }
1917 2032
1918 // Sets the GC count. 2033 // Sets the GC count.
1919 void set_gc_count(unsigned int count) { gc_count_ = count; } 2034 void set_gc_count(unsigned int count) { gc_count_ = count; }
1920 2035
1921 // Sets the full GC count. 2036 // Sets the full GC count.
1922 void set_full_gc_count(int count) { full_gc_count_ = count; } 2037 void set_full_gc_count(int count) { full_gc_count_ = count; }
1923 2038
1924 // Sets the flag that this is a compacting full GC. 2039 // Sets the flag that this is a compacting full GC.
1925 void set_is_compacting() { is_compacting_ = true; } 2040 void set_is_compacting() { is_compacting_ = true; }
1926 bool is_compacting() const { return is_compacting_; } 2041 bool is_compacting() const { return is_compacting_; }
1927 2042
1928 // Increment and decrement the count of marked objects. 2043 // Increment and decrement the count of marked objects.
1929 void increment_marked_count() { ++marked_count_; } 2044 void increment_marked_count() { ++marked_count_; }
1930 void decrement_marked_count() { --marked_count_; } 2045 void decrement_marked_count() { --marked_count_; }
1931 2046
1932 int marked_count() { return marked_count_; } 2047 int marked_count() { return marked_count_; }
1933 2048
1934 void increment_promoted_objects_size(int object_size) { 2049 void increment_promoted_objects_size(int object_size) {
1935 promoted_objects_size_ += object_size; 2050 promoted_objects_size_ += object_size;
1936 } 2051 }
1937 2052
1938 // Returns maximum GC pause.
1939 static int get_max_gc_pause() { return max_gc_pause_; }
1940
1941 // Returns maximum size of objects alive after GC.
1942 static intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
1943
1944 // Returns minimal interval between two subsequent collections.
1945 static int get_min_in_mutator() { return min_in_mutator_; }
1946
1947 private: 2053 private:
1948 // Returns a string matching the collector. 2054 // Returns a string matching the collector.
1949 const char* CollectorString(); 2055 const char* CollectorString();
1950 2056
1951 // Returns size of object in heap (in MB). 2057 // Returns size of object in heap (in MB).
1952 double SizeOfHeapObjects() { 2058 double SizeOfHeapObjects() {
1953 return (static_cast<double>(Heap::SizeOfObjects())) / MB; 2059 return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
1954 } 2060 }
1955 2061
1956 double start_time_; // Timestamp set in the constructor. 2062 double start_time_; // Timestamp set in the constructor.
1957 intptr_t start_size_; // Size of objects in heap set in constructor. 2063 intptr_t start_size_; // Size of objects in heap set in constructor.
1958 GarbageCollector collector_; // Type of collector. 2064 GarbageCollector collector_; // Type of collector.
1959 2065
1960 // A count (including this one, eg, the first collection is 1) of the 2066 // A count (including this one, eg, the first collection is 1) of the
1961 // number of garbage collections. 2067 // number of garbage collections.
1962 unsigned int gc_count_; 2068 unsigned int gc_count_;
1963 2069
(...skipping 28 matching lines...) Expand all
1992 // collection and the end of the previous collection. 2098 // collection and the end of the previous collection.
1993 intptr_t allocated_since_last_gc_; 2099 intptr_t allocated_since_last_gc_;
1994 2100
1995 // Amount of time spent in mutator that is time elapsed between end of the 2101 // Amount of time spent in mutator that is time elapsed between end of the
1996 // previous collection and the beginning of the current one. 2102 // previous collection and the beginning of the current one.
1997 double spent_in_mutator_; 2103 double spent_in_mutator_;
1998 2104
1999 // Size of objects promoted during the current collection. 2105 // Size of objects promoted during the current collection.
2000 intptr_t promoted_objects_size_; 2106 intptr_t promoted_objects_size_;
2001 2107
2002 // Maximum GC pause. 2108 Heap* heap_;
2003 static int max_gc_pause_;
2004
2005 // Maximum size of objects alive after GC.
2006 static intptr_t max_alive_after_gc_;
2007
2008 // Minimal interval between two subsequent collections.
2009 static int min_in_mutator_;
2010
2011 // Size of objects alive after last GC.
2012 static intptr_t alive_after_last_gc_;
2013
2014 static double last_gc_end_timestamp_;
2015 }; 2109 };
2016 2110
2017 2111
2018 class TranscendentalCache { 2112 class TranscendentalCache {
2019 public: 2113 public:
2020 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches}; 2114 enum Type {ACOS, ASIN, ATAN, COS, EXP, LOG, SIN, TAN, kNumberOfCaches};
2021 static const int kTranscendentalTypeBits = 3; 2115 static const int kTranscendentalTypeBits = 3;
2022 STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches); 2116 STATIC_ASSERT((1 << kTranscendentalTypeBits) >= kNumberOfCaches);
2023 2117
2024 explicit TranscendentalCache(Type t);
2025
2026 // Returns a heap number with f(input), where f is a math function specified 2118 // Returns a heap number with f(input), where f is a math function specified
2027 // by the 'type' argument. 2119 // by the 'type' argument.
2028 MUST_USE_RESULT static inline MaybeObject* Get(Type type, double input) { 2120 MUST_USE_RESULT inline MaybeObject* Get(Type type, double input);
2029 TranscendentalCache* cache = caches_[type];
2030 if (cache == NULL) {
2031 caches_[type] = cache = new TranscendentalCache(type);
2032 }
2033 return cache->Get(input);
2034 }
2035 2121
2036 // The cache contains raw Object pointers. This method disposes of 2122 // The cache contains raw Object pointers. This method disposes of
2037 // them before a garbage collection. 2123 // them before a garbage collection.
2038 static void Clear(); 2124 void Clear();
2039 2125
2040 private: 2126 private:
2041 MUST_USE_RESULT inline MaybeObject* Get(double input) { 2127 class SubCache {
2042 Converter c; 2128 static const int kCacheSize = 512;
2043 c.dbl = input; 2129
2044 int hash = Hash(c); 2130 explicit SubCache(Type t);
2045 Element e = elements_[hash]; 2131
2046 if (e.in[0] == c.integers[0] && 2132 MUST_USE_RESULT inline MaybeObject* Get(double input);
2047 e.in[1] == c.integers[1]) { 2133
2048 ASSERT(e.output != NULL); 2134 inline double Calculate(double input);
2049 Counters::transcendental_cache_hit.Increment(); 2135
2050 return e.output; 2136 struct Element {
2137 uint32_t in[2];
2138 Object* output;
2139 };
2140
2141 union Converter {
2142 double dbl;
2143 uint32_t integers[2];
2144 };
2145
2146 inline static int Hash(const Converter& c) {
2147 uint32_t hash = (c.integers[0] ^ c.integers[1]);
2148 hash ^= static_cast<int32_t>(hash) >> 16;
2149 hash ^= static_cast<int32_t>(hash) >> 8;
2150 return (hash & (kCacheSize - 1));
2051 } 2151 }
2052 double answer = Calculate(input); 2152
2053 Counters::transcendental_cache_miss.Increment(); 2153 Element elements_[kCacheSize];
2054 Object* heap_number; 2154 Type type_;
2055 { MaybeObject* maybe_heap_number = Heap::AllocateHeapNumber(answer); 2155 Isolate* isolate_;
2056 if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number; 2156
2057 } 2157 // Allow access to the caches_ array as an ExternalReference.
2058 elements_[hash].in[0] = c.integers[0]; 2158 friend class ExternalReference;
2059 elements_[hash].in[1] = c.integers[1]; 2159 // Inline implementation of the cache.
2060 elements_[hash].output = heap_number; 2160 friend class TranscendentalCacheStub;
2061 return heap_number; 2161 // For evaluating value.
2162 friend class TranscendentalCache;
2163
2164 DISALLOW_COPY_AND_ASSIGN(SubCache);
2165 };
2166
2167 TranscendentalCache() {
2168 for (int i = 0; i < kNumberOfCaches; ++i) caches_[i] = NULL;
2062 } 2169 }
2063 2170
2064 inline double Calculate(double input) { 2171 // Used to create an external reference.
2065 switch (type_) { 2172 inline Address cache_array_address();
2066 case ACOS:
2067 return acos(input);
2068 case ASIN:
2069 return asin(input);
2070 case ATAN:
2071 return atan(input);
2072 case COS:
2073 return cos(input);
2074 case EXP:
2075 return exp(input);
2076 case LOG:
2077 return log(input);
2078 case SIN:
2079 return sin(input);
2080 case TAN:
2081 return tan(input);
2082 default:
2083 return 0.0; // Never happens.
2084 }
2085 }
2086 static const int kCacheSize = 512;
2087 struct Element {
2088 uint32_t in[2];
2089 Object* output;
2090 };
2091 union Converter {
2092 double dbl;
2093 uint32_t integers[2];
2094 };
2095 inline static int Hash(const Converter& c) {
2096 uint32_t hash = (c.integers[0] ^ c.integers[1]);
2097 hash ^= static_cast<int32_t>(hash) >> 16;
2098 hash ^= static_cast<int32_t>(hash) >> 8;
2099 return (hash & (kCacheSize - 1));
2100 }
2101 2173
2102 static Address cache_array_address() { 2174 // Instantiation
2103 // Used to create an external reference. 2175 friend class Isolate;
2104 return reinterpret_cast<Address>(caches_); 2176 // Inline implementation of the caching.
2105 } 2177 friend class TranscendentalCacheStub;
2106
2107 // Allow access to the caches_ array as an ExternalReference. 2178 // Allow access to the caches_ array as an ExternalReference.
2108 friend class ExternalReference; 2179 friend class ExternalReference;
2109 // Inline implementation of the cache.
2110 friend class TranscendentalCacheStub;
2111 2180
2112 static TranscendentalCache* caches_[kNumberOfCaches]; 2181 SubCache* caches_[kNumberOfCaches];
2113 Element elements_[kCacheSize]; 2182 DISALLOW_COPY_AND_ASSIGN(TranscendentalCache);
2114 Type type_;
2115 }; 2183 };
2116 2184
2117 2185
2118 // External strings table is a place where all external strings are
2119 // registered. We need to keep track of such strings to properly
2120 // finalize them.
2121 class ExternalStringTable : public AllStatic {
2122 public:
2123 // Registers an external string.
2124 inline static void AddString(String* string);
2125
2126 inline static void Iterate(ObjectVisitor* v);
2127
2128 // Restores internal invariant and gets rid of collected strings.
2129 // Must be called after each Iterate() that modified the strings.
2130 static void CleanUp();
2131
2132 // Destroys all allocated memory.
2133 static void TearDown();
2134
2135 private:
2136 friend class Heap;
2137
2138 inline static void Verify();
2139
2140 inline static void AddOldString(String* string);
2141
2142 // Notifies the table that only a prefix of the new list is valid.
2143 inline static void ShrinkNewStrings(int position);
2144
2145 // To speed up scavenge collections new space string are kept
2146 // separate from old space strings.
2147 static List<Object*> new_space_strings_;
2148 static List<Object*> old_space_strings_;
2149 };
2150
2151
2152 // Abstract base class for checking whether a weak object should be retained. 2186 // Abstract base class for checking whether a weak object should be retained.
2153 class WeakObjectRetainer { 2187 class WeakObjectRetainer {
2154 public: 2188 public:
2155 virtual ~WeakObjectRetainer() {} 2189 virtual ~WeakObjectRetainer() {}
2156 2190
2157 // Return whether this object should be retained. If NULL is returned the 2191 // Return whether this object should be retained. If NULL is returned the
2158 // object has no references. Otherwise the address of the retained object 2192 // object has no references. Otherwise the address of the retained object
2159 // should be returned as in some GC situations the object has been moved. 2193 // should be returned as in some GC situations the object has been moved.
2160 virtual Object* RetainAs(Object* object) = 0; 2194 virtual Object* RetainAs(Object* object) = 0;
2161 }; 2195 };
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
2215 2249
2216 AssertNoAllocation no_alloc; // i.e. no gc allowed. 2250 AssertNoAllocation no_alloc; // i.e. no gc allowed.
2217 2251
2218 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); 2252 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
2219 }; 2253 };
2220 #endif // DEBUG || LIVE_OBJECT_LIST 2254 #endif // DEBUG || LIVE_OBJECT_LIST
2221 2255
2222 2256
2223 } } // namespace v8::internal 2257 } } // namespace v8::internal
2224 2258
2259 #undef HEAP
2260
2225 #endif // V8_HEAP_H_ 2261 #endif // V8_HEAP_H_
OLDNEW
« no previous file with comments | « src/hashmap.h ('k') | src/heap.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698