OLD | NEW |
1 // Copyright 2006-2010 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
161 | 161 |
162 void Isolate::PreallocatedMemoryThreadStop() { | 162 void Isolate::PreallocatedMemoryThreadStop() { |
163 if (preallocated_memory_thread_ == NULL) return; | 163 if (preallocated_memory_thread_ == NULL) return; |
164 preallocated_memory_thread_->StopThread(); | 164 preallocated_memory_thread_->StopThread(); |
165 // Done with the thread entirely. | 165 // Done with the thread entirely. |
166 delete preallocated_memory_thread_; | 166 delete preallocated_memory_thread_; |
167 preallocated_memory_thread_ = NULL; | 167 preallocated_memory_thread_ = NULL; |
168 } | 168 } |
169 | 169 |
170 | 170 |
| 171 void Isolate::PreallocatedStorageInit(size_t size) { |
| 172 ASSERT(free_list_.next_ == &free_list_); |
| 173 ASSERT(free_list_.previous_ == &free_list_); |
| 174 PreallocatedStorage* free_chunk = |
| 175 reinterpret_cast<PreallocatedStorage*>(new char[size]); |
| 176 free_list_.next_ = free_list_.previous_ = free_chunk; |
| 177 free_chunk->next_ = free_chunk->previous_ = &free_list_; |
| 178 free_chunk->size_ = size - sizeof(PreallocatedStorage); |
| 179 preallocated_storage_preallocated_ = true; |
| 180 } |
| 181 |
| 182 |
| 183 void* Isolate::PreallocatedStorageNew(size_t size) { |
| 184 if (!preallocated_storage_preallocated_) { |
| 185 return FreeStoreAllocationPolicy::New(size); |
| 186 } |
| 187 ASSERT(free_list_.next_ != &free_list_); |
| 188 ASSERT(free_list_.previous_ != &free_list_); |
| 189 |
| 190 size = (size + kPointerSize - 1) & ~(kPointerSize - 1); |
| 191 // Search for exact fit. |
| 192 for (PreallocatedStorage* storage = free_list_.next_; |
| 193 storage != &free_list_; |
| 194 storage = storage->next_) { |
| 195 if (storage->size_ == size) { |
| 196 storage->Unlink(); |
| 197 storage->LinkTo(&in_use_list_); |
| 198 return reinterpret_cast<void*>(storage + 1); |
| 199 } |
| 200 } |
| 201 // Search for first fit. |
| 202 for (PreallocatedStorage* storage = free_list_.next_; |
| 203 storage != &free_list_; |
| 204 storage = storage->next_) { |
| 205 if (storage->size_ >= size + sizeof(PreallocatedStorage)) { |
| 206 storage->Unlink(); |
| 207 storage->LinkTo(&in_use_list_); |
| 208 PreallocatedStorage* left_over = |
| 209 reinterpret_cast<PreallocatedStorage*>( |
| 210 reinterpret_cast<char*>(storage + 1) + size); |
| 211 left_over->size_ = storage->size_ - size - sizeof(PreallocatedStorage); |
| 212 ASSERT(size + left_over->size_ + sizeof(PreallocatedStorage) == |
| 213 storage->size_); |
| 214 storage->size_ = size; |
| 215 left_over->LinkTo(&free_list_); |
| 216 return reinterpret_cast<void*>(storage + 1); |
| 217 } |
| 218 } |
| 219 // Allocation failure. |
| 220 ASSERT(false); |
| 221 return NULL; |
| 222 } |
| 223 |
| 224 |
| 225 // We don't attempt to coalesce. |
| 226 void Isolate::PreallocatedStorageDelete(void* p) { |
| 227 if (p == NULL) { |
| 228 return; |
| 229 } |
| 230 if (!preallocated_storage_preallocated_) { |
| 231 FreeStoreAllocationPolicy::Delete(p); |
| 232 return; |
| 233 } |
| 234 PreallocatedStorage* storage = reinterpret_cast<PreallocatedStorage*>(p) - 1; |
| 235 ASSERT(storage->next_->previous_ == storage); |
| 236 ASSERT(storage->previous_->next_ == storage); |
| 237 storage->Unlink(); |
| 238 storage->LinkTo(&free_list_); |
| 239 } |
| 240 |
| 241 |
171 Isolate* Isolate::default_isolate_ = NULL; | 242 Isolate* Isolate::default_isolate_ = NULL; |
172 Thread::LocalStorageKey Isolate::isolate_key_; | 243 Thread::LocalStorageKey Isolate::isolate_key_; |
173 Thread::LocalStorageKey Isolate::thread_id_key_; | 244 Thread::LocalStorageKey Isolate::thread_id_key_; |
174 Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_; | 245 Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_; |
175 Mutex* Isolate::process_wide_mutex_ = OS::CreateMutex(); | 246 Mutex* Isolate::process_wide_mutex_ = OS::CreateMutex(); |
176 Isolate::ThreadDataTable* Isolate::thread_data_table_ = NULL; | 247 Isolate::ThreadDataTable* Isolate::thread_data_table_ = NULL; |
177 Isolate::ThreadId Isolate::highest_thread_id_ = 0; | 248 Isolate::ThreadId Isolate::highest_thread_id_ = 0; |
178 | 249 |
179 | 250 |
180 class IsolateInitializer { | 251 class IsolateInitializer { |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
375 result_constant_list_(0) { | 446 result_constant_list_(0) { |
376 TRACE_ISOLATE(constructor); | 447 TRACE_ISOLATE(constructor); |
377 | 448 |
378 memset(isolate_addresses_, 0, | 449 memset(isolate_addresses_, 0, |
379 sizeof(isolate_addresses_[0]) * (k_isolate_address_count + 1)); | 450 sizeof(isolate_addresses_[0]) * (k_isolate_address_count + 1)); |
380 | 451 |
381 heap_.isolate_ = this; | 452 heap_.isolate_ = this; |
382 zone_.isolate_ = this; | 453 zone_.isolate_ = this; |
383 stack_guard_.isolate_ = this; | 454 stack_guard_.isolate_ = this; |
384 | 455 |
385 #if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) | 456 #if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \ |
| 457 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__) |
386 simulator_initialized_ = false; | 458 simulator_initialized_ = false; |
387 simulator_i_cache_ = NULL; | 459 simulator_i_cache_ = NULL; |
388 simulator_redirection_ = NULL; | 460 simulator_redirection_ = NULL; |
389 #endif | 461 #endif |
390 | 462 |
391 #ifdef DEBUG | 463 #ifdef DEBUG |
392 // heap_histograms_ initializes itself. | 464 // heap_histograms_ initializes itself. |
393 memset(&js_spill_information_, 0, sizeof(js_spill_information_)); | 465 memset(&js_spill_information_, 0, sizeof(js_spill_information_)); |
394 memset(code_kind_statistics_, 0, | 466 memset(code_kind_statistics_, 0, |
395 sizeof(code_kind_statistics_[0]) * Code::NUMBER_OF_KINDS); | 467 sizeof(code_kind_statistics_[0]) * Code::NUMBER_OF_KINDS); |
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
651 logger_->Setup(); | 723 logger_->Setup(); |
652 | 724 |
653 CpuProfiler::Setup(); | 725 CpuProfiler::Setup(); |
654 HeapProfiler::Setup(); | 726 HeapProfiler::Setup(); |
655 | 727 |
656 // Setup the platform OS support. | 728 // Setup the platform OS support. |
657 OS::Setup(); | 729 OS::Setup(); |
658 | 730 |
659 // Initialize other runtime facilities | 731 // Initialize other runtime facilities |
660 #if defined(USE_SIMULATOR) | 732 #if defined(USE_SIMULATOR) |
661 #if defined(V8_TARGET_ARCH_ARM) | 733 #if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS) |
662 Simulator::Initialize(); | 734 Simulator::Initialize(); |
663 #elif defined(V8_TARGET_ARCH_MIPS) | |
664 ::assembler::mips::Simulator::Initialize(); | |
665 #endif | 735 #endif |
666 #endif | 736 #endif |
667 | 737 |
668 { // NOLINT | 738 { // NOLINT |
669 // Ensure that the thread has a valid stack guard. The v8::Locker object | 739 // Ensure that the thread has a valid stack guard. The v8::Locker object |
670 // will ensure this too, but we don't have to use lockers if we are only | 740 // will ensure this too, but we don't have to use lockers if we are only |
671 // using one thread. | 741 // using one thread. |
672 ExecutionAccess lock(this); | 742 ExecutionAccess lock(this); |
673 stack_guard_.InitThread(lock); | 743 stack_guard_.InitThread(lock); |
674 } | 744 } |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
816 | 886 |
817 #ifdef DEBUG | 887 #ifdef DEBUG |
818 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \ | 888 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \ |
819 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_); | 889 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_); |
820 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET) | 890 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET) |
821 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET) | 891 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET) |
822 #undef ISOLATE_FIELD_OFFSET | 892 #undef ISOLATE_FIELD_OFFSET |
823 #endif | 893 #endif |
824 | 894 |
825 } } // namespace v8::internal | 895 } } // namespace v8::internal |
OLD | NEW |