| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 505 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 516 ? isolate_->counters()->gc_scavenger() | 516 ? isolate_->counters()->gc_scavenger() |
| 517 : isolate_->counters()->gc_compactor(); | 517 : isolate_->counters()->gc_compactor(); |
| 518 rate->Start(); | 518 rate->Start(); |
| 519 next_gc_likely_to_collect_more = | 519 next_gc_likely_to_collect_more = |
| 520 PerformGarbageCollection(collector, &tracer); | 520 PerformGarbageCollection(collector, &tracer); |
| 521 rate->Stop(); | 521 rate->Stop(); |
| 522 | 522 |
| 523 GarbageCollectionEpilogue(); | 523 GarbageCollectionEpilogue(); |
| 524 } | 524 } |
| 525 | 525 |
| 526 | |
| 527 #ifdef ENABLE_LOGGING_AND_PROFILING | |
| 528 if (FLAG_log_gc) HeapProfiler::WriteSample(); | |
| 529 #endif | |
| 530 | |
| 531 return next_gc_likely_to_collect_more; | 526 return next_gc_likely_to_collect_more; |
| 532 } | 527 } |
| 533 | 528 |
| 534 | 529 |
| 535 void Heap::PerformScavenge() { | 530 void Heap::PerformScavenge() { |
| 536 GCTracer tracer(this); | 531 GCTracer tracer(this); |
| 537 PerformGarbageCollection(SCAVENGER, &tracer); | 532 PerformGarbageCollection(SCAVENGER, &tracer); |
| 538 } | 533 } |
| 539 | 534 |
| 540 | 535 |
| (...skipping 2436 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2977 // If allocation failures are disallowed, we may allocate in a different | 2972 // If allocation failures are disallowed, we may allocate in a different |
| 2978 // space when new space is full and the object is not a large object. | 2973 // space when new space is full and the object is not a large object. |
| 2979 AllocationSpace retry_space = | 2974 AllocationSpace retry_space = |
| 2980 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); | 2975 (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type()); |
| 2981 Object* result; | 2976 Object* result; |
| 2982 { MaybeObject* maybe_result = | 2977 { MaybeObject* maybe_result = |
| 2983 AllocateRaw(map->instance_size(), space, retry_space); | 2978 AllocateRaw(map->instance_size(), space, retry_space); |
| 2984 if (!maybe_result->ToObject(&result)) return maybe_result; | 2979 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2985 } | 2980 } |
| 2986 HeapObject::cast(result)->set_map(map); | 2981 HeapObject::cast(result)->set_map(map); |
| 2987 #ifdef ENABLE_LOGGING_AND_PROFILING | |
| 2988 isolate_->producer_heap_profile()->RecordJSObjectAllocation(result); | |
| 2989 #endif | |
| 2990 return result; | 2982 return result; |
| 2991 } | 2983 } |
| 2992 | 2984 |
| 2993 | 2985 |
| 2994 MaybeObject* Heap::InitializeFunction(JSFunction* function, | 2986 MaybeObject* Heap::InitializeFunction(JSFunction* function, |
| 2995 SharedFunctionInfo* shared, | 2987 SharedFunctionInfo* shared, |
| 2996 Object* prototype) { | 2988 Object* prototype) { |
| 2997 ASSERT(!prototype->IsMap()); | 2989 ASSERT(!prototype->IsMap()); |
| 2998 function->initialize_properties(); | 2990 function->initialize_properties(); |
| 2999 function->initialize_elements(); | 2991 function->initialize_elements(); |
| (...skipping 428 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3428 } | 3420 } |
| 3429 // Update properties if necessary. | 3421 // Update properties if necessary. |
| 3430 if (properties->length() > 0) { | 3422 if (properties->length() > 0) { |
| 3431 Object* prop; | 3423 Object* prop; |
| 3432 { MaybeObject* maybe_prop = CopyFixedArray(properties); | 3424 { MaybeObject* maybe_prop = CopyFixedArray(properties); |
| 3433 if (!maybe_prop->ToObject(&prop)) return maybe_prop; | 3425 if (!maybe_prop->ToObject(&prop)) return maybe_prop; |
| 3434 } | 3426 } |
| 3435 JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); | 3427 JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); |
| 3436 } | 3428 } |
| 3437 // Return the new clone. | 3429 // Return the new clone. |
| 3438 #ifdef ENABLE_LOGGING_AND_PROFILING | |
| 3439 isolate_->producer_heap_profile()->RecordJSObjectAllocation(clone); | |
| 3440 #endif | |
| 3441 return clone; | 3430 return clone; |
| 3442 } | 3431 } |
| 3443 | 3432 |
| 3444 | 3433 |
| 3445 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor, | 3434 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor, |
| 3446 JSGlobalProxy* object) { | 3435 JSGlobalProxy* object) { |
| 3447 ASSERT(constructor->has_initial_map()); | 3436 ASSERT(constructor->has_initial_map()); |
| 3448 Map* map = constructor->initial_map(); | 3437 Map* map = constructor->initial_map(); |
| 3449 | 3438 |
| 3450 // Check that the already allocated object has the same size and type as | 3439 // Check that the already allocated object has the same size and type as |
| (...skipping 1664 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5115 | 5104 |
| 5116 // Create initial objects | 5105 // Create initial objects |
| 5117 if (!CreateInitialObjects()) return false; | 5106 if (!CreateInitialObjects()) return false; |
| 5118 | 5107 |
| 5119 global_contexts_list_ = undefined_value(); | 5108 global_contexts_list_ = undefined_value(); |
| 5120 } | 5109 } |
| 5121 | 5110 |
| 5122 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); | 5111 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); |
| 5123 LOG(isolate_, IntPtrTEvent("heap-available", Available())); | 5112 LOG(isolate_, IntPtrTEvent("heap-available", Available())); |
| 5124 | 5113 |
| 5125 #ifdef ENABLE_LOGGING_AND_PROFILING | |
| 5126 // This should be called only after initial objects have been created. | |
| 5127 isolate_->producer_heap_profile()->Setup(); | |
| 5128 #endif | |
| 5129 | |
| 5130 return true; | 5114 return true; |
| 5131 } | 5115 } |
| 5132 | 5116 |
| 5133 | 5117 |
| 5134 void Heap::SetStackLimits() { | 5118 void Heap::SetStackLimits() { |
| 5135 ASSERT(isolate_ != NULL); | 5119 ASSERT(isolate_ != NULL); |
| 5136 ASSERT(isolate_ == isolate()); | 5120 ASSERT(isolate_ == isolate()); |
| 5137 // On 64 bit machines, pointers are generally out of range of Smis. We write | 5121 // On 64 bit machines, pointers are generally out of range of Smis. We write |
| 5138 // something that looks like an out of range Smi to the GC. | 5122 // something that looks like an out of range Smi to the GC. |
| 5139 | 5123 |
| (...skipping 910 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6050 } | 6034 } |
| 6051 | 6035 |
| 6052 | 6036 |
| 6053 void ExternalStringTable::TearDown() { | 6037 void ExternalStringTable::TearDown() { |
| 6054 new_space_strings_.Free(); | 6038 new_space_strings_.Free(); |
| 6055 old_space_strings_.Free(); | 6039 old_space_strings_.Free(); |
| 6056 } | 6040 } |
| 6057 | 6041 |
| 6058 | 6042 |
| 6059 } } // namespace v8::internal | 6043 } } // namespace v8::internal |
| OLD | NEW |