Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(611)

Side by Side Diff: src/heap.cc

Issue 7350014: Remove the ability to compile without logging and profiling (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Addressed review comments Created 9 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/heap.h ('k') | src/heap-profiler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after
286 return MARK_COMPACTOR; 286 return MARK_COMPACTOR;
287 } 287 }
288 288
289 // Default 289 // Default
290 return SCAVENGER; 290 return SCAVENGER;
291 } 291 }
292 292
293 293
294 // TODO(1238405): Combine the infrastructure for --heap-stats and 294 // TODO(1238405): Combine the infrastructure for --heap-stats and
295 // --log-gc to avoid the complicated preprocessor and flag testing. 295 // --log-gc to avoid the complicated preprocessor and flag testing.
296 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
297 void Heap::ReportStatisticsBeforeGC() { 296 void Heap::ReportStatisticsBeforeGC() {
298 // Heap::ReportHeapStatistics will also log NewSpace statistics when 297 // Heap::ReportHeapStatistics will also log NewSpace statistics when
299 // compiled with ENABLE_LOGGING_AND_PROFILING and --log-gc is set. The 298 // compiled --log-gc is set. The following logic is used to avoid
300 // following logic is used to avoid double logging. 299 // double logging.
301 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING) 300 #ifdef DEBUG
302 if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics(); 301 if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
303 if (FLAG_heap_stats) { 302 if (FLAG_heap_stats) {
304 ReportHeapStatistics("Before GC"); 303 ReportHeapStatistics("Before GC");
305 } else if (FLAG_log_gc) { 304 } else if (FLAG_log_gc) {
306 new_space_.ReportStatistics(); 305 new_space_.ReportStatistics();
307 } 306 }
308 if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms(); 307 if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
309 #elif defined(DEBUG) 308 #else
310 if (FLAG_heap_stats) {
311 new_space_.CollectStatistics();
312 ReportHeapStatistics("Before GC");
313 new_space_.ClearHistograms();
314 }
315 #elif defined(ENABLE_LOGGING_AND_PROFILING)
316 if (FLAG_log_gc) { 309 if (FLAG_log_gc) {
317 new_space_.CollectStatistics(); 310 new_space_.CollectStatistics();
318 new_space_.ReportStatistics(); 311 new_space_.ReportStatistics();
319 new_space_.ClearHistograms(); 312 new_space_.ClearHistograms();
320 } 313 }
321 #endif 314 #endif // DEBUG
322 } 315 }
323 316
324 317
325 #if defined(ENABLE_LOGGING_AND_PROFILING)
326 void Heap::PrintShortHeapStatistics() { 318 void Heap::PrintShortHeapStatistics() {
327 if (!FLAG_trace_gc_verbose) return; 319 if (!FLAG_trace_gc_verbose) return;
328 PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d" 320 PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
329 ", available: %8" V8_PTR_PREFIX "d\n", 321 ", available: %8" V8_PTR_PREFIX "d\n",
330 isolate_->memory_allocator()->Size(), 322 isolate_->memory_allocator()->Size(),
331 isolate_->memory_allocator()->Available()); 323 isolate_->memory_allocator()->Available());
332 PrintF("New space, used: %8" V8_PTR_PREFIX "d" 324 PrintF("New space, used: %8" V8_PTR_PREFIX "d"
333 ", available: %8" V8_PTR_PREFIX "d\n", 325 ", available: %8" V8_PTR_PREFIX "d\n",
334 Heap::new_space_.Size(), 326 Heap::new_space_.Size(),
335 new_space_.Available()); 327 new_space_.Available());
(...skipping 25 matching lines...) Expand all
361 ", available: %8" V8_PTR_PREFIX "d" 353 ", available: %8" V8_PTR_PREFIX "d"
362 ", waste: %8" V8_PTR_PREFIX "d\n", 354 ", waste: %8" V8_PTR_PREFIX "d\n",
363 cell_space_->Size(), 355 cell_space_->Size(),
364 cell_space_->Available(), 356 cell_space_->Available(),
365 cell_space_->Waste()); 357 cell_space_->Waste());
366 PrintF("Large object space, used: %8" V8_PTR_PREFIX "d" 358 PrintF("Large object space, used: %8" V8_PTR_PREFIX "d"
367 ", available: %8" V8_PTR_PREFIX "d\n", 359 ", available: %8" V8_PTR_PREFIX "d\n",
368 lo_space_->Size(), 360 lo_space_->Size(),
369 lo_space_->Available()); 361 lo_space_->Available());
370 } 362 }
371 #endif
372 363
373 364
374 // TODO(1238405): Combine the infrastructure for --heap-stats and 365 // TODO(1238405): Combine the infrastructure for --heap-stats and
375 // --log-gc to avoid the complicated preprocessor and flag testing. 366 // --log-gc to avoid the complicated preprocessor and flag testing.
376 void Heap::ReportStatisticsAfterGC() { 367 void Heap::ReportStatisticsAfterGC() {
377 // Similar to the before GC, we use some complicated logic to ensure that 368 // Similar to the before GC, we use some complicated logic to ensure that
378 // NewSpace statistics are logged exactly once when --log-gc is turned on. 369 // NewSpace statistics are logged exactly once when --log-gc is turned on.
379 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING) 370 #if defined(DEBUG)
380 if (FLAG_heap_stats) { 371 if (FLAG_heap_stats) {
381 new_space_.CollectStatistics(); 372 new_space_.CollectStatistics();
382 ReportHeapStatistics("After GC"); 373 ReportHeapStatistics("After GC");
383 } else if (FLAG_log_gc) { 374 } else if (FLAG_log_gc) {
384 new_space_.ReportStatistics(); 375 new_space_.ReportStatistics();
385 } 376 }
386 #elif defined(DEBUG) 377 #else
387 if (FLAG_heap_stats) ReportHeapStatistics("After GC");
388 #elif defined(ENABLE_LOGGING_AND_PROFILING)
389 if (FLAG_log_gc) new_space_.ReportStatistics(); 378 if (FLAG_log_gc) new_space_.ReportStatistics();
390 #endif 379 #endif // DEBUG
391 } 380 }
392 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
393 381
394 382
395 void Heap::GarbageCollectionPrologue() { 383 void Heap::GarbageCollectionPrologue() {
396 isolate_->transcendental_cache()->Clear(); 384 isolate_->transcendental_cache()->Clear();
397 ClearJSFunctionResultCaches(); 385 ClearJSFunctionResultCaches();
398 gc_count_++; 386 gc_count_++;
399 unflattened_strings_length_ = 0; 387 unflattened_strings_length_ = 0;
400 #ifdef DEBUG 388 #ifdef DEBUG
401 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC); 389 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
402 allow_allocation(false); 390 allow_allocation(false);
403 391
404 if (FLAG_verify_heap) { 392 if (FLAG_verify_heap) {
405 Verify(); 393 Verify();
406 } 394 }
407 395
408 if (FLAG_gc_verbose) Print(); 396 if (FLAG_gc_verbose) Print();
409 #endif 397 #endif // DEBUG
410 398
411 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 399 #if defined(DEBUG)
412 ReportStatisticsBeforeGC(); 400 ReportStatisticsBeforeGC();
413 #endif 401 #endif // DEBUG
414 402
415 LiveObjectList::GCPrologue(); 403 LiveObjectList::GCPrologue();
416 } 404 }
417 405
418 intptr_t Heap::SizeOfObjects() { 406 intptr_t Heap::SizeOfObjects() {
419 intptr_t total = 0; 407 intptr_t total = 0;
420 AllSpaces spaces; 408 AllSpaces spaces;
421 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) { 409 for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
422 total += space->SizeOfObjects(); 410 total += space->SizeOfObjects();
423 } 411 }
(...skipping 16 matching lines...) Expand all
440 if (FLAG_code_stats) ReportCodeStatistics("After GC"); 428 if (FLAG_code_stats) ReportCodeStatistics("After GC");
441 #endif 429 #endif
442 430
443 isolate_->counters()->alive_after_last_gc()->Set( 431 isolate_->counters()->alive_after_last_gc()->Set(
444 static_cast<int>(SizeOfObjects())); 432 static_cast<int>(SizeOfObjects()));
445 433
446 isolate_->counters()->symbol_table_capacity()->Set( 434 isolate_->counters()->symbol_table_capacity()->Set(
447 symbol_table()->Capacity()); 435 symbol_table()->Capacity());
448 isolate_->counters()->number_of_symbols()->Set( 436 isolate_->counters()->number_of_symbols()->Set(
449 symbol_table()->NumberOfElements()); 437 symbol_table()->NumberOfElements());
450 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) 438 #if defined(DEBUG)
451 ReportStatisticsAfterGC(); 439 ReportStatisticsAfterGC();
452 #endif 440 #endif // DEBUG
453 #ifdef ENABLE_DEBUGGER_SUPPORT
454 isolate_->debug()->AfterGarbageCollection(); 441 isolate_->debug()->AfterGarbageCollection();
455 #endif
456 } 442 }
457 443
458 444
459 void Heap::CollectAllGarbage(bool force_compaction) { 445 void Heap::CollectAllGarbage(bool force_compaction) {
460 // Since we are ignoring the return value, the exact choice of space does 446 // Since we are ignoring the return value, the exact choice of space does
461 // not matter, so long as we do not specify NEW_SPACE, which would not 447 // not matter, so long as we do not specify NEW_SPACE, which would not
462 // cause a full GC. 448 // cause a full GC.
463 mark_compact_collector_.SetForceCompaction(force_compaction); 449 mark_compact_collector_.SetForceCompaction(force_compaction);
464 CollectGarbage(OLD_POINTER_SPACE); 450 CollectGarbage(OLD_POINTER_SPACE);
465 mark_compact_collector_.SetForceCompaction(false); 451 mark_compact_collector_.SetForceCompaction(false);
(...skipping 862 matching lines...) Expand 10 before | Expand all | Expand 10 after
1328 } 1314 }
1329 1315
1330 static VisitorDispatchTable<ScavengingCallback>* GetTable() { 1316 static VisitorDispatchTable<ScavengingCallback>* GetTable() {
1331 return &table_; 1317 return &table_;
1332 } 1318 }
1333 1319
1334 private: 1320 private:
1335 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT }; 1321 enum ObjectContents { DATA_OBJECT, POINTER_OBJECT };
1336 enum SizeRestriction { SMALL, UNKNOWN_SIZE }; 1322 enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1337 1323
1338 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1339 static void RecordCopiedObject(Heap* heap, HeapObject* obj) { 1324 static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1340 bool should_record = false; 1325 bool should_record = false;
1341 #ifdef DEBUG 1326 #ifdef DEBUG
1342 should_record = FLAG_heap_stats; 1327 should_record = FLAG_heap_stats;
1343 #endif 1328 #endif
1344 #ifdef ENABLE_LOGGING_AND_PROFILING
1345 should_record = should_record || FLAG_log_gc; 1329 should_record = should_record || FLAG_log_gc;
1346 #endif
1347 if (should_record) { 1330 if (should_record) {
1348 if (heap->new_space()->Contains(obj)) { 1331 if (heap->new_space()->Contains(obj)) {
1349 heap->new_space()->RecordAllocation(obj); 1332 heap->new_space()->RecordAllocation(obj);
1350 } else { 1333 } else {
1351 heap->new_space()->RecordPromotion(obj); 1334 heap->new_space()->RecordPromotion(obj);
1352 } 1335 }
1353 } 1336 }
1354 } 1337 }
1355 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1356 1338
1357 // Helper function used by CopyObject to copy a source object to an 1339 // Helper function used by CopyObject to copy a source object to an
1358 // allocated target object and update the forwarding pointer in the source 1340 // allocated target object and update the forwarding pointer in the source
1359 // object. Returns the target object. 1341 // object. Returns the target object.
1360 INLINE(static HeapObject* MigrateObject(Heap* heap, 1342 INLINE(static HeapObject* MigrateObject(Heap* heap,
1361 HeapObject* source, 1343 HeapObject* source,
1362 HeapObject* target, 1344 HeapObject* target,
1363 int size)) { 1345 int size)) {
1364 // Copy the content of source to target. 1346 // Copy the content of source to target.
1365 heap->CopyBlock(target->address(), source->address(), size); 1347 heap->CopyBlock(target->address(), source->address(), size);
1366 1348
1367 // Set the forwarding address. 1349 // Set the forwarding address.
1368 source->set_map_word(MapWord::FromForwardingAddress(target)); 1350 source->set_map_word(MapWord::FromForwardingAddress(target));
1369 1351
1370 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) { 1352 if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
1371 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
1372 // Update NewSpace stats if necessary. 1353 // Update NewSpace stats if necessary.
1373 RecordCopiedObject(heap, target); 1354 RecordCopiedObject(heap, target);
1374 #endif
1375 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address())); 1355 HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1376 #if defined(ENABLE_LOGGING_AND_PROFILING)
1377 Isolate* isolate = heap->isolate(); 1356 Isolate* isolate = heap->isolate();
1378 if (isolate->logger()->is_logging() || 1357 if (isolate->logger()->is_logging() ||
1379 CpuProfiler::is_profiling(isolate)) { 1358 CpuProfiler::is_profiling(isolate)) {
1380 if (target->IsSharedFunctionInfo()) { 1359 if (target->IsSharedFunctionInfo()) {
1381 PROFILE(isolate, SharedFunctionInfoMoveEvent( 1360 PROFILE(isolate, SharedFunctionInfoMoveEvent(
1382 source->address(), target->address())); 1361 source->address(), target->address()));
1383 } 1362 }
1384 } 1363 }
1385 #endif
1386 } 1364 }
1387 1365
1388 return target; 1366 return target;
1389 } 1367 }
1390 1368
1391 1369
1392 template<ObjectContents object_contents, SizeRestriction size_restriction> 1370 template<ObjectContents object_contents, SizeRestriction size_restriction>
1393 static inline void EvacuateObject(Map* map, 1371 static inline void EvacuateObject(Map* map,
1394 HeapObject** slot, 1372 HeapObject** slot,
1395 HeapObject* object, 1373 HeapObject* object,
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
1551 static void InitializeScavengingVisitorsTables() { 1529 static void InitializeScavengingVisitorsTables() {
1552 ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::Initialize(); 1530 ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::Initialize();
1553 ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::Initialize(); 1531 ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::Initialize();
1554 scavenging_visitors_table_.CopyFrom( 1532 scavenging_visitors_table_.CopyFrom(
1555 ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::GetTable()); 1533 ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>::GetTable());
1556 scavenging_visitors_table_mode_ = LOGGING_AND_PROFILING_DISABLED; 1534 scavenging_visitors_table_mode_ = LOGGING_AND_PROFILING_DISABLED;
1557 } 1535 }
1558 1536
1559 1537
1560 void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() { 1538 void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() {
1561 #ifdef ENABLE_LOGGING_AND_PROFILING
1562 if (scavenging_visitors_table_mode_ == LOGGING_AND_PROFILING_ENABLED) { 1539 if (scavenging_visitors_table_mode_ == LOGGING_AND_PROFILING_ENABLED) {
1563 // Table was already updated by some isolate. 1540 // Table was already updated by some isolate.
1564 return; 1541 return;
1565 } 1542 }
1566 1543
1567 if (isolate()->logger()->is_logging() | 1544 if (isolate()->logger()->is_logging() |
1568 CpuProfiler::is_profiling(isolate()) || 1545 CpuProfiler::is_profiling(isolate()) ||
1569 (isolate()->heap_profiler() != NULL && 1546 (isolate()->heap_profiler() != NULL &&
1570 isolate()->heap_profiler()->is_profiling())) { 1547 isolate()->heap_profiler()->is_profiling())) {
1571 // If one of the isolates is doing scavenge at this moment of time 1548 // If one of the isolates is doing scavenge at this moment of time
1572 // it might see this table in an inconsitent state when 1549 // it might see this table in an inconsitent state when
1573 // some of the callbacks point to 1550 // some of the callbacks point to
1574 // ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED> and others 1551 // ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED> and others
1575 // to ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>. 1552 // to ScavengingVisitor<LOGGING_AND_PROFILING_DISABLED>.
1576 // However this does not lead to any bugs as such isolate does not have 1553 // However this does not lead to any bugs as such isolate does not have
1577 // profiling enabled and any isolate with enabled profiling is guaranteed 1554 // profiling enabled and any isolate with enabled profiling is guaranteed
1578 // to see the table in the consistent state. 1555 // to see the table in the consistent state.
1579 scavenging_visitors_table_.CopyFrom( 1556 scavenging_visitors_table_.CopyFrom(
1580 ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::GetTable()); 1557 ScavengingVisitor<LOGGING_AND_PROFILING_ENABLED>::GetTable());
1581 1558
1582 // We use Release_Store to prevent reordering of this write before writes 1559 // We use Release_Store to prevent reordering of this write before writes
1583 // to the table. 1560 // to the table.
1584 Release_Store(&scavenging_visitors_table_mode_, 1561 Release_Store(&scavenging_visitors_table_mode_,
1585 LOGGING_AND_PROFILING_ENABLED); 1562 LOGGING_AND_PROFILING_ENABLED);
1586 } 1563 }
1587 #endif
1588 } 1564 }
1589 1565
1590 1566
1591 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { 1567 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1592 ASSERT(HEAP->InFromSpace(object)); 1568 ASSERT(HEAP->InFromSpace(object));
1593 MapWord first_word = object->map_word(); 1569 MapWord first_word = object->map_word();
1594 ASSERT(!first_word.IsForwardingAddress()); 1570 ASSERT(!first_word.IsForwardingAddress());
1595 Map* map = first_word.ToMap(); 1571 Map* map = first_word.ToMap();
1596 DoScavengeObject(map, p, object); 1572 DoScavengeObject(map, p, object);
1597 } 1573 }
(...skipping 4303 matching lines...) Expand 10 before | Expand all | Expand 10 after
5901 PrintF("holes_size_before=%" V8_PTR_PREFIX "d ", 5877 PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
5902 in_free_list_or_wasted_before_gc_); 5878 in_free_list_or_wasted_before_gc_);
5903 PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize()); 5879 PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
5904 5880
5905 PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_); 5881 PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_);
5906 PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_); 5882 PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_);
5907 5883
5908 PrintF("\n"); 5884 PrintF("\n");
5909 } 5885 }
5910 5886
5911 #if defined(ENABLE_LOGGING_AND_PROFILING)
5912 heap_->PrintShortHeapStatistics(); 5887 heap_->PrintShortHeapStatistics();
5913 #endif
5914 } 5888 }
5915 5889
5916 5890
5917 const char* GCTracer::CollectorString() { 5891 const char* GCTracer::CollectorString() {
5918 switch (collector_) { 5892 switch (collector_) {
5919 case SCAVENGER: 5893 case SCAVENGER:
5920 return "Scavenge"; 5894 return "Scavenge";
5921 case MARK_COMPACTOR: 5895 case MARK_COMPACTOR:
5922 return heap_->mark_compact_collector_.HasCompacted() ? "Mark-compact" 5896 return heap_->mark_compact_collector_.HasCompacted() ? "Mark-compact"
5923 : "Mark-sweep"; 5897 : "Mark-sweep";
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
6021 } 5995 }
6022 5996
6023 5997
6024 void ExternalStringTable::TearDown() { 5998 void ExternalStringTable::TearDown() {
6025 new_space_strings_.Free(); 5999 new_space_strings_.Free();
6026 old_space_strings_.Free(); 6000 old_space_strings_.Free();
6027 } 6001 }
6028 6002
6029 6003
6030 } } // namespace v8::internal 6004 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/heap-profiler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698