Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1426)

Side by Side Diff: src/heap.cc

Issue 7535004: Merge bleeding edge up to 8774 into the GC branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after
267 return MARK_COMPACTOR; 267 return MARK_COMPACTOR;
268 } 268 }
269 269
270 // Default 270 // Default
271 return SCAVENGER; 271 return SCAVENGER;
272 } 272 }
273 273
274 274
275 // TODO(1238405): Combine the infrastructure for --heap-stats and 275 // TODO(1238405): Combine the infrastructure for --heap-stats and
276 // --log-gc to avoid the complicated preprocessor and flag testing. 276 // --log-gc to avoid the complicated preprocessor and flag testing.
277 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
278 void Heap::ReportStatisticsBeforeGC() { 277 void Heap::ReportStatisticsBeforeGC() {
279 // Heap::ReportHeapStatistics will also log NewSpace statistics when 278 // Heap::ReportHeapStatistics will also log NewSpace statistics when
280 // compiled with ENABLE_LOGGING_AND_PROFILING and --log-gc is set. The 279 // compiled --log-gc is set. The following logic is used to avoid
281 // following logic is used to avoid double logging. 280 // double logging.
282 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING) 281 #ifdef DEBUG
283 if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics(); 282 if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
284 if (FLAG_heap_stats) { 283 if (FLAG_heap_stats) {
285 ReportHeapStatistics("Before GC"); 284 ReportHeapStatistics("Before GC");
286 } else if (FLAG_log_gc) { 285 } else if (FLAG_log_gc) {
287 new_space_.ReportStatistics(); 286 new_space_.ReportStatistics();
288 } 287 }
289 if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms(); 288 if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
290 #elif defined(DEBUG) 289 #else
291 if (FLAG_heap_stats) {
292 new_space_.CollectStatistics();
293 ReportHeapStatistics("Before GC");
294 new_space_.ClearHistograms();
295 }
296 #elif defined(ENABLE_LOGGING_AND_PROFILING)
297 if (FLAG_log_gc) { 290 if (FLAG_log_gc) {
298 new_space_.CollectStatistics(); 291 new_space_.CollectStatistics();
299 new_space_.ReportStatistics(); 292 new_space_.ReportStatistics();
300 new_space_.ClearHistograms(); 293 new_space_.ClearHistograms();
301 } 294 }
302 #endif 295 #endif // DEBUG
303 } 296 }
304 297
305 298
306 #if defined(ENABLE_LOGGING_AND_PROFILING)
307 void Heap::PrintShortHeapStatistics() { 299 void Heap::PrintShortHeapStatistics() {
308 if (!FLAG_trace_gc_verbose) return; 300 if (!FLAG_trace_gc_verbose) return;
309 PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d" 301 PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
310 ", available: %8" V8_PTR_PREFIX "d\n", 302 ", available: %8" V8_PTR_PREFIX "d\n",
311 isolate_->memory_allocator()->Size(), 303 isolate_->memory_allocator()->Size(),
312 isolate_->memory_allocator()->Available()); 304 isolate_->memory_allocator()->Available());
313 PrintF("New space, used: %8" V8_PTR_PREFIX "d" 305 PrintF("New space, used: %8" V8_PTR_PREFIX "d"
314 ", available: %8" V8_PTR_PREFIX "d\n", 306 ", available: %8" V8_PTR_PREFIX "d\n",
315 Heap::new_space_.Size(), 307 Heap::new_space_.Size(),
316 new_space_.Available()); 308 new_space_.Available());
(...skipping 25 matching lines...) Expand all
342 ", available: %8" V8_PTR_PREFIX "d" 334 ", available: %8" V8_PTR_PREFIX "d"
343 ", waste: %8" V8_PTR_PREFIX "d\n", 335 ", waste: %8" V8_PTR_PREFIX "d\n",
344 cell_space_->Size(), 336 cell_space_->Size(),
345 cell_space_->Available(), 337 cell_space_->Available(),
346 cell_space_->Waste()); 338 cell_space_->Waste());
347 PrintF("Large object space, used: %8" V8_PTR_PREFIX "d" 339 PrintF("Large object space, used: %8" V8_PTR_PREFIX "d"
348 ", available: %8" V8_PTR_PREFIX "d\n", 340 ", available: %8" V8_PTR_PREFIX "d\n",
349 lo_space_->Size(), 341 lo_space_->Size(),
350 lo_space_->Available()); 342 lo_space_->Available());
351 } 343 }
352 #endif
353 344
354 345
355 // TODO(1238405): Combine the infrastructure for --heap-stats and 346 // TODO(1238405): Combine the infrastructure for --heap-stats and
356 // --log-gc to avoid the complicated preprocessor and flag testing. 347 // --log-gc to avoid the complicated preprocessor and flag testing.
357 void Heap::ReportStatisticsAfterGC() { 348 void Heap::ReportStatisticsAfterGC() {
358 // Similar to the before GC, we use some complicated logic to ensure that 349 // Similar to the before GC, we use some complicated logic to ensure that
359 // NewSpace statistics are logged exactly once when --log-gc is turned on. 350 // NewSpace statistics are logged exactly once when --log-gc is turned on.
360 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING) 351 #if defined(DEBUG)
361 if (FLAG_heap_stats) { 352 if (FLAG_heap_stats) {
362 new_space_.CollectStatistics(); 353 new_space_.CollectStatistics();
363 ReportHeapStatistics("After GC"); 354 ReportHeapStatistics("After GC");
364 } else if (FLAG_log_gc) { 355 } else if (FLAG_log_gc) {
365 new_space_.ReportStatistics(); 356 new_space_.ReportStatistics();
366 } 357 }
367 #elif defined(DEBUG) 358 #else
368 if (FLAG_heap_stats) ReportHeapStatistics("After GC");
369 #elif defined(ENABLE_LOGGING_AND_PROFILING)
370 if (FLAG_log_gc) new_space_.ReportStatistics(); 359 if (FLAG_log_gc) new_space_.ReportStatistics();
371 #endif 360 #endif // DEBUG
372 } 361 }
373 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
374 362
375 363
376 void Heap::GarbageCollectionPrologue() { 364 void Heap::GarbageCollectionPrologue() {
377 isolate_->transcendental_cache()->Clear(); 365 isolate_->transcendental_cache()->Clear();
378 ClearJSFunctionResultCaches(); 366 ClearJSFunctionResultCaches();
379 gc_count_++; 367 gc_count_++;
380 unflattened_strings_length_ = 0; 368 unflattened_strings_length_ = 0;
381 #ifdef DEBUG 369 #ifdef DEBUG
382 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 370 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
383 allow_allocation(false); 371 allow_allocation(false);
384 372
385 if (FLAG_verify_heap) { 373 if (FLAG_verify_heap) {
386 Verify(); 374 Verify();
387 } 375 }
388 376
389 if (FLAG_gc_verbose) Print(); 377 if (FLAG_gc_verbose) Print();
390 #endif 378 #endif // DEBUG
391 379
392 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 380 #if defined(DEBUG)
393 ReportStatisticsBeforeGC(); 381 ReportStatisticsBeforeGC();
394 #endif 382 #endif // DEBUG
395 383
396 LiveObjectList::GCPrologue(); 384 LiveObjectList::GCPrologue();
397 } 385 }
398 386
399 intptr_t Heap::SizeOfObjects() { 387 intptr_t Heap::SizeOfObjects() {
400 intptr_t total = 0; 388 intptr_t total = 0;
401 AllSpaces spaces; 389 AllSpaces spaces;
402 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { 390 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
403 total += space->SizeOfObjects(); 391 total += space->SizeOfObjects();
404 } 392 }
(...skipping 16 matching lines...) Expand all
421 if (FLAG_code_stats) ReportCodeStatistics("After GC"); 409 if (FLAG_code_stats) ReportCodeStatistics("After GC");
422 #endif 410 #endif
423 411
424 isolate_->counters()->alive_after_last_gc()->Set( 412 isolate_->counters()->alive_after_last_gc()->Set(
425 static_cast<int>(SizeOfObjects())); 413 static_cast<int>(SizeOfObjects()));
426 414
427 isolate_->counters()->symbol_table_capacity()->Set( 415 isolate_->counters()->symbol_table_capacity()->Set(
428 symbol_table()->Capacity()); 416 symbol_table()->Capacity());
429 isolate_->counters()->number_of_symbols()->Set( 417 isolate_->counters()->number_of_symbols()->Set(
430 symbol_table()->NumberOfElements()); 418 symbol_table()->NumberOfElements());
431 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 419 #if defined(DEBUG)
432 ReportStatisticsAfterGC(); 420 ReportStatisticsAfterGC();
433 #endif 421 #endif // DEBUG
434 #ifdef ENABLE_DEBUGGER_SUPPORT
435 isolate_->debug()->AfterGarbageCollection(); 422 isolate_->debug()->AfterGarbageCollection();
436 #endif
437 } 423 }
438 424
439 425
440 void Heap::CollectAllGarbage(int flags) { 426 void Heap::CollectAllGarbage(int flags) {
441 // Since we are ignoring the return value, the exact choice of space does 427 // Since we are ignoring the return value, the exact choice of space does
442 // not matter, so long as we do not specify NEW_SPACE, which would not 428 // not matter, so long as we do not specify NEW_SPACE, which would not
443 // cause a full GC. 429 // cause a full GC.
444 mark_compact_collector_.SetFlags(flags); 430 mark_compact_collector_.SetFlags(flags);
445 CollectGarbage(OLD_POINTER_SPACE); 431 CollectGarbage(OLD_POINTER_SPACE);
446 mark_compact_collector_.SetFlags(kNoGCFlags); 432 mark_compact_collector_.SetFlags(kNoGCFlags);
(...skipping 934 matching lines...) Expand 10 before | Expand all | Expand 10 after
1381 } 1367 }
1382 1368
1383 static VisitorDispatchTable<ScavengingCallback>* GetTable() { 1369 static VisitorDispatchTable<ScavengingCallback>* GetTable() {
1384 return &table_; 1370 return &table_;
1385 } 1371 }
1386 1372
1387 private: 1373 private:
1388 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; 1374 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1389 enum SizeRestriction { SMALL, UNKNOWN_SIZE }; 1375 enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1390 1376
1391 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1392 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { 1377 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1393 bool should_record = false; 1378 bool should_record = false;
1394 #ifdef DEBUG 1379 #ifdef DEBUG
1395 should_record = FLAG_heap_stats; 1380 should_record = FLAG_heap_stats;
1396 #endif 1381 #endif
1397 #ifdef ENABLE_LOGGING_AND_PROFILING
1398 should_record = should_record || FLAG_log_gc; 1382 should_record = should_record || FLAG_log_gc;
1399 #endif
1400 if (should_record) { 1383 if (should_record) {
1401 if (heap->new_space()->Contains(obj)) { 1384 if (heap->new_space()->Contains(obj)) {
1402 heap->new_space()->RecordAllocation(obj); 1385 heap->new_space()->RecordAllocation(obj);
1403 } else { 1386 } else {
1404 heap->new_space()->RecordPromotion(obj); 1387 heap->new_space()->RecordPromotion(obj);
1405 } 1388 }
1406 } 1389 }
1407 } 1390 }
1408 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1409 1391
1410 // Helper function used by CopyObject to copy a source object to an 1392 // Helper function used by CopyObject to copy a source object to an
1411 // allocated target object and update the forwarding pointer in the source 1393 // allocated target object and update the forwarding pointer in the source
1412 // object. Returns the target object. 1394 // object. Returns the target object.
1413 INLINE(static HeapObject* MigrateObject(Heap* heap, 1395 INLINE(static HeapObject* MigrateObject(Heap* heap,
1414 HeapObject* source, 1396 HeapObject* source,
1415 HeapObject* target, 1397 HeapObject* target,
1416 int size)) { 1398 int size)) {
1417 // Copy the content of source to target. 1399 // Copy the content of source to target.
1418 heap->CopyBlock(target->address(), source->address(), size); 1400 heap->CopyBlock(target->address(), source->address(), size);
1419 1401
1420 // Set the forwarding address. 1402 // Set the forwarding address.
1421 source->set_map_word(MapWord::FromForwardingAddress(target)); 1403 source->set_map_word(MapWord::FromForwardingAddress(target));
1422 1404
1423 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { 1405 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
1424 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1425 // Update NewSpace stats if necessary. 1406 // Update NewSpace stats if necessary.
1426 RecordCopiedObject(heap, target); 1407 RecordCopiedObject(heap, target);
1427 #endif
1428 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address())); 1408 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1429 #if defined(ENABLE_LOGGING_AND_PROFILING)
1430 Isolate* isolate = heap->isolate(); 1409 Isolate* isolate = heap->isolate();
1431 if (isolate->logger()->is_logging() || 1410 if (isolate->logger()->is_logging() ||
1432 CpuProfiler::is_profiling(isolate)) { 1411 CpuProfiler::is_profiling(isolate)) {
1433 if (target->IsSharedFunctionInfo()) { 1412 if (target->IsSharedFunctionInfo()) {
1434 PROFILE(isolate, SharedFunctionInfoMoveEvent( 1413 PROFILE(isolate, SharedFunctionInfoMoveEvent(
1435 source->address(), target->address())); 1414 source->address(), target->address()));
1436 } 1415 }
1437 } 1416 }
1438 #endif
1439 } 1417 }
1440 1418
1441 if (marks_handling == TRANSFER_MARKS) { 1419 if (marks_handling == TRANSFER_MARKS) {
1442 if (Marking::TransferColor(source, target)) { 1420 if (Marking::TransferColor(source, target)) {
1443 MemoryChunk::IncrementLiveBytes(target->address(), size); 1421 MemoryChunk::IncrementLiveBytes(target->address(), size);
1444 } 1422 }
1445 } 1423 }
1446 1424
1447 return target; 1425 return target;
1448 } 1426 }
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
1619 ScavengingVisitor<TRANSFER_MARKS, 1597 ScavengingVisitor<TRANSFER_MARKS,
1620 LOGGING_AND_PROFILING_DISABLED>::Initialize(); 1598 LOGGING_AND_PROFILING_DISABLED>::Initialize();
1621 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize(); 1599 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
1622 ScavengingVisitor<TRANSFER_MARKS, 1600 ScavengingVisitor<TRANSFER_MARKS,
1623 LOGGING_AND_PROFILING_ENABLED>::Initialize(); 1601 LOGGING_AND_PROFILING_ENABLED>::Initialize();
1624 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize(); 1602 ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
1625 } 1603 }
1626 1604
1627 1605
1628 void Heap::SelectScavengingVisitorsTable() { 1606 void Heap::SelectScavengingVisitorsTable() {
1629 #ifdef ENABLE_LOGGING_AND_PROFILING
1630 bool logging_and_profiling = 1607 bool logging_and_profiling =
1631 isolate()->logger()->is_logging() || 1608 isolate()->logger()->is_logging() ||
1632 CpuProfiler::is_profiling(isolate()) || 1609 CpuProfiler::is_profiling(isolate()) ||
1633 (isolate()->heap_profiler() != NULL && 1610 (isolate()->heap_profiler() != NULL &&
1634 isolate()->heap_profiler()->is_profiling()); 1611 isolate()->heap_profiler()->is_profiling());
1635 #else
1636 bool logging_and_profiling = false;
1637 #endif
1638 1612
1639 if (!incremental_marking()->IsMarking()) { 1613 if (!incremental_marking()->IsMarking()) {
1640 if (!logging_and_profiling) { 1614 if (!logging_and_profiling) {
1641 scavenging_visitors_table_.CopyFrom( 1615 scavenging_visitors_table_.CopyFrom(
1642 ScavengingVisitor<IGNORE_MARKS, 1616 ScavengingVisitor<IGNORE_MARKS,
1643 LOGGING_AND_PROFILING_DISABLED>::GetTable()); 1617 LOGGING_AND_PROFILING_DISABLED>::GetTable());
1644 } else { 1618 } else {
1645 scavenging_visitors_table_.CopyFrom( 1619 scavenging_visitors_table_.CopyFrom(
1646 ScavengingVisitor<IGNORE_MARKS, 1620 ScavengingVisitor<IGNORE_MARKS,
1647 LOGGING_AND_PROFILING_ENABLED>::GetTable()); 1621 LOGGING_AND_PROFILING_ENABLED>::GetTable());
(...skipping 1714 matching lines...) Expand 10 before | Expand all | Expand 10 after
3362 3336
3363 3337
3364 MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) { 3338 MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) {
3365 // Allocate map. 3339 // Allocate map.
3366 // TODO(rossberg): Once we optimize proxies, think about a scheme to share 3340 // TODO(rossberg): Once we optimize proxies, think about a scheme to share
3367 // maps. Will probably depend on the identity of the handler object, too. 3341 // maps. Will probably depend on the identity of the handler object, too.
3368 Map* map; 3342 Map* map;
3369 MaybeObject* maybe_map_obj = AllocateMap(JS_PROXY_TYPE, JSProxy::kSize); 3343 MaybeObject* maybe_map_obj = AllocateMap(JS_PROXY_TYPE, JSProxy::kSize);
3370 if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj; 3344 if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj;
3371 map->set_prototype(prototype); 3345 map->set_prototype(prototype);
3372 map->set_pre_allocated_property_fields(1);
3373 map->set_inobject_properties(1);
3374 3346
3375 // Allocate the proxy object. 3347 // Allocate the proxy object.
3376 Object* result; 3348 Object* result;
3377 MaybeObject* maybe_result = Allocate(map, NEW_SPACE); 3349 MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3378 if (!maybe_result->ToObject(&result)) return maybe_result; 3350 if (!maybe_result->ToObject(&result)) return maybe_result;
3379 JSProxy::cast(result)->set_handler(handler); 3351 JSProxy::cast(result)->set_handler(handler);
3352 JSProxy::cast(result)->set_padding(Smi::FromInt(0));
3380 return result; 3353 return result;
3381 } 3354 }
3382 3355
3383 3356
3384 MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) { 3357 MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
3385 ASSERT(constructor->has_initial_map()); 3358 ASSERT(constructor->has_initial_map());
3386 Map* map = constructor->initial_map(); 3359 Map* map = constructor->initial_map();
3387 3360
3388 // Make sure no field properties are described in the initial map. 3361 // Make sure no field properties are described in the initial map.
3389 // This guarantees us that normalizing the properties does not 3362 // This guarantees us that normalizing the properties does not
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
3484 if (!maybe_clone->ToObject(&clone)) return maybe_clone; 3457 if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3485 } 3458 }
3486 ASSERT(InNewSpace(clone)); 3459 ASSERT(InNewSpace(clone));
3487 // Since we know the clone is allocated in new space, we can copy 3460 // Since we know the clone is allocated in new space, we can copy
3488 // the contents without worrying about updating the write barrier. 3461 // the contents without worrying about updating the write barrier.
3489 CopyBlock(HeapObject::cast(clone)->address(), 3462 CopyBlock(HeapObject::cast(clone)->address(),
3490 source->address(), 3463 source->address(),
3491 object_size); 3464 object_size);
3492 } 3465 }
3493 3466
3494 FixedArray* elements = FixedArray::cast(source->elements()); 3467 FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
3495 FixedArray* properties = FixedArray::cast(source->properties()); 3468 FixedArray* properties = FixedArray::cast(source->properties());
3496 // Update elements if necessary. 3469 // Update elements if necessary.
3497 if (elements->length() > 0) { 3470 if (elements->length() > 0) {
3498 Object* elem; 3471 Object* elem;
3499 { MaybeObject* maybe_elem = 3472 { MaybeObject* maybe_elem;
3500 (elements->map() == fixed_cow_array_map()) ? 3473 if (elements->map() == fixed_cow_array_map()) {
3501 elements : CopyFixedArray(elements); 3474 maybe_elem = FixedArray::cast(elements);
3475 } else if (source->HasFastDoubleElements()) {
3476 maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
3477 } else {
3478 maybe_elem = CopyFixedArray(FixedArray::cast(elements));
3479 }
3502 if (!maybe_elem->ToObject(&elem)) return maybe_elem; 3480 if (!maybe_elem->ToObject(&elem)) return maybe_elem;
3503 } 3481 }
3504 JSObject::cast(clone)->set_elements(FixedArray::cast(elem)); 3482 JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem));
3505 } 3483 }
3506 // Update properties if necessary. 3484 // Update properties if necessary.
3507 if (properties->length() > 0) { 3485 if (properties->length() > 0) {
3508 Object* prop; 3486 Object* prop;
3509 { MaybeObject* maybe_prop = CopyFixedArray(properties); 3487 { MaybeObject* maybe_prop = CopyFixedArray(properties);
3510 if (!maybe_prop->ToObject(&prop)) return maybe_prop; 3488 if (!maybe_prop->ToObject(&prop)) return maybe_prop;
3511 } 3489 }
3512 JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); 3490 JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
3513 } 3491 }
3514 // Return the new clone. 3492 // Return the new clone.
3515 return clone; 3493 return clone;
3516 } 3494 }
3517 3495
3518 3496
3497 MaybeObject* Heap::ReinitializeJSProxyAsJSObject(JSProxy* object) {
3498 // Allocate fresh map.
3499 // TODO(rossberg): Once we optimize proxies, cache these maps.
3500 Map* map;
3501 MaybeObject* maybe_map_obj =
3502 AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
3503 if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj;
3504
3505 // Check that the receiver has the same size as a fresh object.
3506 ASSERT(map->instance_size() == object->map()->instance_size());
3507
3508 map->set_prototype(object->map()->prototype());
3509
3510 // Allocate the backing storage for the properties.
3511 int prop_size = map->unused_property_fields() - map->inobject_properties();
3512 Object* properties;
3513 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, TENURED);
3514 if (!maybe_properties->ToObject(&properties)) return maybe_properties;
3515 }
3516
3517 // Reset the map for the object.
3518 object->set_map(map);
3519
3520 // Reinitialize the object from the constructor map.
3521 InitializeJSObjectFromMap(JSObject::cast(object),
3522 FixedArray::cast(properties), map);
3523 return object;
3524 }
3525
3526
3519 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor, 3527 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
3520 JSGlobalProxy* object) { 3528 JSGlobalProxy* object) {
3521 ASSERT(constructor->has_initial_map()); 3529 ASSERT(constructor->has_initial_map());
3522 Map* map = constructor->initial_map(); 3530 Map* map = constructor->initial_map();
3523 3531
3524 // Check that the already allocated object has the same size and type as 3532 // Check that the already allocated object has the same size and type as
3525 // objects allocated using the constructor. 3533 // objects allocated using the constructor.
3526 ASSERT(map->instance_size() == object->map()->instance_size()); 3534 ASSERT(map->instance_size() == object->map()->instance_size());
3527 ASSERT(map->instance_type() == object->map()->instance_type()); 3535 ASSERT(map->instance_type() == object->map()->instance_type());
3528 3536
(...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after
3823 result->set_length(len); 3831 result->set_length(len);
3824 3832
3825 // Copy the content 3833 // Copy the content
3826 AssertNoAllocation no_gc; 3834 AssertNoAllocation no_gc;
3827 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 3835 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
3828 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); 3836 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
3829 return result; 3837 return result;
3830 } 3838 }
3831 3839
3832 3840
3841 MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
3842 Map* map) {
3843 int len = src->length();
3844 Object* obj;
3845 { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len, NOT_TENURED);
3846 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3847 }
3848 HeapObject* dst = HeapObject::cast(obj);
3849 dst->set_map(map);
3850 CopyBlock(
3851 dst->address() + FixedDoubleArray::kLengthOffset,
3852 src->address() + FixedDoubleArray::kLengthOffset,
3853 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
3854 return obj;
3855 }
3856
3857
3833 MaybeObject* Heap::AllocateFixedArray(int length) { 3858 MaybeObject* Heap::AllocateFixedArray(int length) {
3834 ASSERT(length >= 0); 3859 ASSERT(length >= 0);
3835 if (length == 0) return empty_fixed_array(); 3860 if (length == 0) return empty_fixed_array();
3836 Object* result; 3861 Object* result;
3837 { MaybeObject* maybe_result = AllocateRawFixedArray(length); 3862 { MaybeObject* maybe_result = AllocateRawFixedArray(length);
3838 if (!maybe_result->ToObject(&result)) return maybe_result; 3863 if (!maybe_result->ToObject(&result)) return maybe_result;
3839 } 3864 }
3840 // Initialize header. 3865 // Initialize header.
3841 FixedArray* array = reinterpret_cast<FixedArray*>(result); 3866 FixedArray* array = reinterpret_cast<FixedArray*>(result);
3842 array->set_map(fixed_array_map()); 3867 array->set_map(fixed_array_map());
(...skipping 1329 matching lines...) Expand 10 before | Expand all | Expand 10 after
5172 5197
5173 5198
5174 void Heap::Shrink() { 5199 void Heap::Shrink() {
5175 // Try to shrink all paged spaces. 5200 // Try to shrink all paged spaces.
5176 PagedSpaces spaces; 5201 PagedSpaces spaces;
5177 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next()) 5202 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
5178 space->Shrink(); 5203 space->Shrink();
5179 } 5204 }
5180 5205
5181 5206
5182 #ifdef ENABLE_HEAP_PROTECTION
5183
5184 void Heap::Protect() {
5185 if (HasBeenSetup()) {
5186 AllSpaces spaces;
5187 for (Space* space = spaces.next(); space != NULL; space = spaces.next())
5188 space->Protect();
5189 }
5190 }
5191
5192
5193 void Heap::Unprotect() {
5194 if (HasBeenSetup()) {
5195 AllSpaces spaces;
5196 for (Space* space = spaces.next(); space != NULL; space = spaces.next())
5197 space->Unprotect();
5198 }
5199 }
5200
5201 #endif
5202
5203
5204 void Heap::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) { 5207 void Heap::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) {
5205 ASSERT(callback != NULL); 5208 ASSERT(callback != NULL);
5206 GCPrologueCallbackPair pair(callback, gc_type); 5209 GCPrologueCallbackPair pair(callback, gc_type);
5207 ASSERT(!gc_prologue_callbacks_.Contains(pair)); 5210 ASSERT(!gc_prologue_callbacks_.Contains(pair));
5208 return gc_prologue_callbacks_.Add(pair); 5211 return gc_prologue_callbacks_.Add(pair);
5209 } 5212 }
5210 5213
5211 5214
5212 void Heap::RemoveGCPrologueCallback(GCPrologueCallback callback) { 5215 void Heap::RemoveGCPrologueCallback(GCPrologueCallback callback) {
5213 ASSERT(callback != NULL); 5216 ASSERT(callback != NULL);
(...skipping 623 matching lines...) Expand 10 before | Expand all | Expand 10 after
5837 PrintF("stepscount=%d ", steps_count_since_last_gc_); 5840 PrintF("stepscount=%d ", steps_count_since_last_gc_);
5838 PrintF("stepstook=%d ", static_cast<int>(steps_took_since_last_gc_)); 5841 PrintF("stepstook=%d ", static_cast<int>(steps_took_since_last_gc_));
5839 } else { 5842 } else {
5840 PrintF("stepscount=%d ", steps_count_); 5843 PrintF("stepscount=%d ", steps_count_);
5841 PrintF("stepstook=%d ", static_cast<int>(steps_took_)); 5844 PrintF("stepstook=%d ", static_cast<int>(steps_took_));
5842 } 5845 }
5843 5846
5844 PrintF("\n"); 5847 PrintF("\n");
5845 } 5848 }
5846 5849
5847 #if defined(ENABLE_LOGGING_AND_PROFILING)
5848 heap_->PrintShortHeapStatistics(); 5850 heap_->PrintShortHeapStatistics();
5849 #endif
5850 } 5851 }
5851 5852
5852 5853
5853 const char* GCTracer::CollectorString() { 5854 const char* GCTracer::CollectorString() {
5854 switch (collector_) { 5855 switch (collector_) {
5855 case SCAVENGER: 5856 case SCAVENGER:
5856 return "Scavenge"; 5857 return "Scavenge";
5857 case MARK_COMPACTOR: 5858 case MARK_COMPACTOR:
5858 return "Mark-sweep"; 5859 return "Mark-sweep";
5859 } 5860 }
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after
6003 } 6004 }
6004 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); 6005 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED);
6005 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { 6006 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
6006 next = chunk->next_chunk(); 6007 next = chunk->next_chunk();
6007 isolate_->memory_allocator()->Free(chunk); 6008 isolate_->memory_allocator()->Free(chunk);
6008 } 6009 }
6009 chunks_queued_for_free_ = NULL; 6010 chunks_queued_for_free_ = NULL;
6010 } 6011 }
6011 6012
6012 } } // namespace v8::internal 6013 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698