OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1181 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1192 } | 1192 } |
1193 return NULL; | 1193 return NULL; |
1194 } | 1194 } |
1195 | 1195 |
1196 private: | 1196 private: |
1197 Heap* heap_; | 1197 Heap* heap_; |
1198 }; | 1198 }; |
1199 | 1199 |
1200 | 1200 |
1201 void Heap::Scavenge() { | 1201 void Heap::Scavenge() { |
1202 RelocationLock relocation_lock(this); | |
1202 #ifdef DEBUG | 1203 #ifdef DEBUG |
1203 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); | 1204 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(); |
1204 #endif | 1205 #endif |
1205 | 1206 |
1206 gc_state_ = SCAVENGE; | 1207 gc_state_ = SCAVENGE; |
1207 | 1208 |
1208 // Implements Cheney's copying algorithm | 1209 // Implements Cheney's copying algorithm |
1209 LOG(isolate_, ResourceEvent("scavenge", "begin")); | 1210 LOG(isolate_, ResourceEvent("scavenge", "begin")); |
1210 | 1211 |
1211 // Clear descriptor cache. | 1212 // Clear descriptor cache. |
(...skipping 4942 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6154 if (!CreateInitialObjects()) return false; | 6155 if (!CreateInitialObjects()) return false; |
6155 | 6156 |
6156 global_contexts_list_ = undefined_value(); | 6157 global_contexts_list_ = undefined_value(); |
6157 } | 6158 } |
6158 | 6159 |
6159 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); | 6160 LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); |
6160 LOG(isolate_, IntPtrTEvent("heap-available", Available())); | 6161 LOG(isolate_, IntPtrTEvent("heap-available", Available())); |
6161 | 6162 |
6162 store_buffer()->SetUp(); | 6163 store_buffer()->SetUp(); |
6163 | 6164 |
6165 if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex(); | |
6166 | |
6164 return true; | 6167 return true; |
6165 } | 6168 } |
6166 | 6169 |
6167 | 6170 |
6168 void Heap::SetStackLimits() { | 6171 void Heap::SetStackLimits() { |
6169 ASSERT(isolate_ != NULL); | 6172 ASSERT(isolate_ != NULL); |
6170 ASSERT(isolate_ == isolate()); | 6173 ASSERT(isolate_ == isolate()); |
6171 // On 64 bit machines, pointers are generally out of range of Smis. We write | 6174 // On 64 bit machines, pointers are generally out of range of Smis. We write |
6172 // something that looks like an out of range Smi to the GC. | 6175 // something that looks like an out of range Smi to the GC. |
6173 | 6176 |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6239 lo_space_->TearDown(); | 6242 lo_space_->TearDown(); |
6240 delete lo_space_; | 6243 delete lo_space_; |
6241 lo_space_ = NULL; | 6244 lo_space_ = NULL; |
6242 } | 6245 } |
6243 | 6246 |
6244 store_buffer()->TearDown(); | 6247 store_buffer()->TearDown(); |
6245 incremental_marking()->TearDown(); | 6248 incremental_marking()->TearDown(); |
6246 | 6249 |
6247 isolate_->memory_allocator()->TearDown(); | 6250 isolate_->memory_allocator()->TearDown(); |
6248 | 6251 |
6252 if (FLAG_parallel_recompilation) delete relocation_mutex_; | |
Yang
2012/07/19 12:47:30
I think you can omit checking for the flag since d
sanjoy
2012/07/19 15:06:09
Done.
| |
6253 | |
6249 #ifdef DEBUG | 6254 #ifdef DEBUG |
6250 delete debug_utils_; | 6255 delete debug_utils_; |
6251 debug_utils_ = NULL; | 6256 debug_utils_ = NULL; |
6252 #endif | 6257 #endif |
6253 } | 6258 } |
6254 | 6259 |
6255 | 6260 |
6256 void Heap::Shrink() { | 6261 void Heap::Shrink() { |
6257 // Try to shrink all paged spaces. | 6262 // Try to shrink all paged spaces. |
6258 PagedSpaces spaces; | 6263 PagedSpaces spaces; |
(...skipping 964 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
7223 static_cast<int>(object_sizes_last_time_[index])); | 7228 static_cast<int>(object_sizes_last_time_[index])); |
7224 CODE_KIND_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) | 7229 CODE_KIND_LIST(ADJUST_LAST_TIME_OBJECT_COUNT) |
7225 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7230 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
7226 | 7231 |
7227 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7232 memcpy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
7228 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7233 memcpy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
7229 ClearObjectStats(); | 7234 ClearObjectStats(); |
7230 } | 7235 } |
7231 | 7236 |
7232 } } // namespace v8::internal | 7237 } } // namespace v8::internal |
OLD | NEW |