| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 11 matching lines...) Expand all Loading... |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include <stdlib.h> | 28 #include <stdlib.h> |
| 29 | 29 |
| 30 #include "v8.h" | 30 #include "v8.h" |
| 31 | 31 |
| 32 #include "allocation-inl.h" | |
| 33 #include "ast.h" | 32 #include "ast.h" |
| 34 #include "bootstrapper.h" | 33 #include "bootstrapper.h" |
| 35 #include "codegen.h" | 34 #include "codegen.h" |
| 36 #include "compilation-cache.h" | 35 #include "compilation-cache.h" |
| 37 #include "cpu-profiler.h" | 36 #include "cpu-profiler.h" |
| 38 #include "debug.h" | 37 #include "debug.h" |
| 39 #include "deoptimizer.h" | 38 #include "deoptimizer.h" |
| 40 #include "heap-profiler.h" | 39 #include "heap-profiler.h" |
| 41 #include "hydrogen.h" | 40 #include "hydrogen.h" |
| 42 #include "isolate-inl.h" | 41 #include "isolate-inl.h" |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 124 #endif | 123 #endif |
| 125 thread_id_ = ThreadId::Current(); | 124 thread_id_ = ThreadId::Current(); |
| 126 } | 125 } |
| 127 | 126 |
| 128 | 127 |
| 129 v8::TryCatch* ThreadLocalTop::TryCatchHandler() { | 128 v8::TryCatch* ThreadLocalTop::TryCatchHandler() { |
| 130 return TRY_CATCH_FROM_ADDRESS(try_catch_handler_address()); | 129 return TRY_CATCH_FROM_ADDRESS(try_catch_handler_address()); |
| 131 } | 130 } |
| 132 | 131 |
| 133 | 132 |
| 134 // Create a dummy thread that will wait forever on a semaphore. The only | |
| 135 // purpose for this thread is to have some stack area to save essential data | |
| 136 // into for use by a stacks only core dump (aka minidump). | |
| 137 class PreallocatedMemoryThread: public Thread { | |
| 138 public: | |
| 139 char* data() { | |
| 140 if (data_ready_semaphore_ != NULL) { | |
| 141 // Initial access is guarded until the data has been published. | |
| 142 data_ready_semaphore_->Wait(); | |
| 143 delete data_ready_semaphore_; | |
| 144 data_ready_semaphore_ = NULL; | |
| 145 } | |
| 146 return data_; | |
| 147 } | |
| 148 | |
| 149 unsigned length() { | |
| 150 if (data_ready_semaphore_ != NULL) { | |
| 151 // Initial access is guarded until the data has been published. | |
| 152 data_ready_semaphore_->Wait(); | |
| 153 delete data_ready_semaphore_; | |
| 154 data_ready_semaphore_ = NULL; | |
| 155 } | |
| 156 return length_; | |
| 157 } | |
| 158 | |
| 159 // Stop the PreallocatedMemoryThread and release its resources. | |
| 160 void StopThread() { | |
| 161 keep_running_ = false; | |
| 162 wait_for_ever_semaphore_->Signal(); | |
| 163 | |
| 164 // Wait for the thread to terminate. | |
| 165 Join(); | |
| 166 | |
| 167 if (data_ready_semaphore_ != NULL) { | |
| 168 delete data_ready_semaphore_; | |
| 169 data_ready_semaphore_ = NULL; | |
| 170 } | |
| 171 | |
| 172 delete wait_for_ever_semaphore_; | |
| 173 wait_for_ever_semaphore_ = NULL; | |
| 174 } | |
| 175 | |
| 176 protected: | |
| 177 // When the thread starts running it will allocate a fixed number of bytes | |
| 178 // on the stack and publish the location of this memory for others to use. | |
| 179 void Run() { | |
| 180 EmbeddedVector<char, 15 * 1024> local_buffer; | |
| 181 | |
| 182 // Initialize the buffer with a known good value. | |
| 183 OS::StrNCpy(local_buffer, "Trace data was not generated.\n", | |
| 184 local_buffer.length()); | |
| 185 | |
| 186 // Publish the local buffer and signal its availability. | |
| 187 data_ = local_buffer.start(); | |
| 188 length_ = local_buffer.length(); | |
| 189 data_ready_semaphore_->Signal(); | |
| 190 | |
| 191 while (keep_running_) { | |
| 192 // This thread will wait here until the end of time. | |
| 193 wait_for_ever_semaphore_->Wait(); | |
| 194 } | |
| 195 | |
| 196 // Make sure we access the buffer after the wait to remove all possibility | |
| 197 // of it being optimized away. | |
| 198 OS::StrNCpy(local_buffer, "PreallocatedMemoryThread shutting down.\n", | |
| 199 local_buffer.length()); | |
| 200 } | |
| 201 | |
| 202 | |
| 203 private: | |
| 204 PreallocatedMemoryThread() | |
| 205 : Thread("v8:PreallocMem"), | |
| 206 keep_running_(true), | |
| 207 wait_for_ever_semaphore_(new Semaphore(0)), | |
| 208 data_ready_semaphore_(new Semaphore(0)), | |
| 209 data_(NULL), | |
| 210 length_(0) { | |
| 211 } | |
| 212 | |
| 213 // Used to make sure that the thread keeps looping even for spurious wakeups. | |
| 214 bool keep_running_; | |
| 215 | |
| 216 // This semaphore is used by the PreallocatedMemoryThread to wait for ever. | |
| 217 Semaphore* wait_for_ever_semaphore_; | |
| 218 // Semaphore to signal that the data has been initialized. | |
| 219 Semaphore* data_ready_semaphore_; | |
| 220 | |
| 221 // Location and size of the preallocated memory block. | |
| 222 char* data_; | |
| 223 unsigned length_; | |
| 224 | |
| 225 friend class Isolate; | |
| 226 | |
| 227 DISALLOW_COPY_AND_ASSIGN(PreallocatedMemoryThread); | |
| 228 }; | |
| 229 | |
| 230 | |
| 231 void Isolate::PreallocatedMemoryThreadStart() { | |
| 232 if (preallocated_memory_thread_ != NULL) return; | |
| 233 preallocated_memory_thread_ = new PreallocatedMemoryThread(); | |
| 234 preallocated_memory_thread_->Start(); | |
| 235 } | |
| 236 | |
| 237 | |
| 238 void Isolate::PreallocatedMemoryThreadStop() { | |
| 239 if (preallocated_memory_thread_ == NULL) return; | |
| 240 preallocated_memory_thread_->StopThread(); | |
| 241 // Done with the thread entirely. | |
| 242 delete preallocated_memory_thread_; | |
| 243 preallocated_memory_thread_ = NULL; | |
| 244 } | |
| 245 | |
| 246 | |
| 247 void Isolate::PreallocatedStorageInit(size_t size) { | |
| 248 ASSERT(free_list_.next_ == &free_list_); | |
| 249 ASSERT(free_list_.previous_ == &free_list_); | |
| 250 PreallocatedStorage* free_chunk = | |
| 251 reinterpret_cast<PreallocatedStorage*>(new char[size]); | |
| 252 free_list_.next_ = free_list_.previous_ = free_chunk; | |
| 253 free_chunk->next_ = free_chunk->previous_ = &free_list_; | |
| 254 free_chunk->size_ = size - sizeof(PreallocatedStorage); | |
| 255 preallocated_storage_preallocated_ = true; | |
| 256 } | |
| 257 | |
| 258 | |
| 259 void* Isolate::PreallocatedStorageNew(size_t size) { | |
| 260 if (!preallocated_storage_preallocated_) { | |
| 261 return FreeStoreAllocationPolicy().New(size); | |
| 262 } | |
| 263 ASSERT(free_list_.next_ != &free_list_); | |
| 264 ASSERT(free_list_.previous_ != &free_list_); | |
| 265 | |
| 266 size = (size + kPointerSize - 1) & ~(kPointerSize - 1); | |
| 267 // Search for exact fit. | |
| 268 for (PreallocatedStorage* storage = free_list_.next_; | |
| 269 storage != &free_list_; | |
| 270 storage = storage->next_) { | |
| 271 if (storage->size_ == size) { | |
| 272 storage->Unlink(); | |
| 273 storage->LinkTo(&in_use_list_); | |
| 274 return reinterpret_cast<void*>(storage + 1); | |
| 275 } | |
| 276 } | |
| 277 // Search for first fit. | |
| 278 for (PreallocatedStorage* storage = free_list_.next_; | |
| 279 storage != &free_list_; | |
| 280 storage = storage->next_) { | |
| 281 if (storage->size_ >= size + sizeof(PreallocatedStorage)) { | |
| 282 storage->Unlink(); | |
| 283 storage->LinkTo(&in_use_list_); | |
| 284 PreallocatedStorage* left_over = | |
| 285 reinterpret_cast<PreallocatedStorage*>( | |
| 286 reinterpret_cast<char*>(storage + 1) + size); | |
| 287 left_over->size_ = storage->size_ - size - sizeof(PreallocatedStorage); | |
| 288 ASSERT(size + left_over->size_ + sizeof(PreallocatedStorage) == | |
| 289 storage->size_); | |
| 290 storage->size_ = size; | |
| 291 left_over->LinkTo(&free_list_); | |
| 292 return reinterpret_cast<void*>(storage + 1); | |
| 293 } | |
| 294 } | |
| 295 // Allocation failure. | |
| 296 ASSERT(false); | |
| 297 return NULL; | |
| 298 } | |
| 299 | |
| 300 | |
| 301 // We don't attempt to coalesce. | |
| 302 void Isolate::PreallocatedStorageDelete(void* p) { | |
| 303 if (p == NULL) { | |
| 304 return; | |
| 305 } | |
| 306 if (!preallocated_storage_preallocated_) { | |
| 307 FreeStoreAllocationPolicy::Delete(p); | |
| 308 return; | |
| 309 } | |
| 310 PreallocatedStorage* storage = reinterpret_cast<PreallocatedStorage*>(p) - 1; | |
| 311 ASSERT(storage->next_->previous_ == storage); | |
| 312 ASSERT(storage->previous_->next_ == storage); | |
| 313 storage->Unlink(); | |
| 314 storage->LinkTo(&free_list_); | |
| 315 } | |
| 316 | |
| 317 Isolate* Isolate::default_isolate_ = NULL; | 133 Isolate* Isolate::default_isolate_ = NULL; |
| 318 Thread::LocalStorageKey Isolate::isolate_key_; | 134 Thread::LocalStorageKey Isolate::isolate_key_; |
| 319 Thread::LocalStorageKey Isolate::thread_id_key_; | 135 Thread::LocalStorageKey Isolate::thread_id_key_; |
| 320 Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_; | 136 Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_; |
| 321 #ifdef DEBUG | 137 #ifdef DEBUG |
| 322 Thread::LocalStorageKey PerThreadAssertScopeBase::thread_local_key; | 138 Thread::LocalStorageKey PerThreadAssertScopeBase::thread_local_key; |
| 323 #endif // DEBUG | 139 #endif // DEBUG |
| 324 Mutex Isolate::process_wide_mutex_; | 140 Mutex Isolate::process_wide_mutex_; |
| 325 // TODO(dcarney): Remove with default isolate. | 141 // TODO(dcarney): Remove with default isolate. |
| 326 enum DefaultIsolateStatus { | 142 enum DefaultIsolateStatus { |
| (...skipping 511 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 838 | 654 |
| 839 | 655 |
| 840 void Isolate::PrintStack() { | 656 void Isolate::PrintStack() { |
| 841 PrintStack(stdout); | 657 PrintStack(stdout); |
| 842 } | 658 } |
| 843 | 659 |
| 844 | 660 |
| 845 void Isolate::PrintStack(FILE* out) { | 661 void Isolate::PrintStack(FILE* out) { |
| 846 if (stack_trace_nesting_level_ == 0) { | 662 if (stack_trace_nesting_level_ == 0) { |
| 847 stack_trace_nesting_level_++; | 663 stack_trace_nesting_level_++; |
| 848 | |
| 849 StringAllocator* allocator; | |
| 850 if (preallocated_message_space_ == NULL) { | |
| 851 allocator = new HeapStringAllocator(); | |
| 852 } else { | |
| 853 allocator = preallocated_message_space_; | |
| 854 } | |
| 855 | |
| 856 StringStream::ClearMentionedObjectCache(this); | 664 StringStream::ClearMentionedObjectCache(this); |
| 857 StringStream accumulator(allocator); | 665 HeapStringAllocator allocator; |
| 666 StringStream accumulator(&allocator); |
| 858 incomplete_message_ = &accumulator; | 667 incomplete_message_ = &accumulator; |
| 859 PrintStack(&accumulator); | 668 PrintStack(&accumulator); |
| 860 accumulator.OutputToFile(out); | 669 accumulator.OutputToFile(out); |
| 861 InitializeLoggingAndCounters(); | 670 InitializeLoggingAndCounters(); |
| 862 accumulator.Log(this); | 671 accumulator.Log(this); |
| 863 incomplete_message_ = NULL; | 672 incomplete_message_ = NULL; |
| 864 stack_trace_nesting_level_ = 0; | 673 stack_trace_nesting_level_ = 0; |
| 865 if (preallocated_message_space_ == NULL) { | |
| 866 // Remove the HeapStringAllocator created above. | |
| 867 delete allocator; | |
| 868 } | |
| 869 } else if (stack_trace_nesting_level_ == 1) { | 674 } else if (stack_trace_nesting_level_ == 1) { |
| 870 stack_trace_nesting_level_++; | 675 stack_trace_nesting_level_++; |
| 871 OS::PrintError( | 676 OS::PrintError( |
| 872 "\n\nAttempt to print stack while printing stack (double fault)\n"); | 677 "\n\nAttempt to print stack while printing stack (double fault)\n"); |
| 873 OS::PrintError( | 678 OS::PrintError( |
| 874 "If you are lucky you may find a partial stack dump on stdout.\n\n"); | 679 "If you are lucky you may find a partial stack dump on stdout.\n\n"); |
| 875 incomplete_message_->OutputToFile(out); | 680 incomplete_message_->OutputToFile(out); |
| 876 } | 681 } |
| 877 } | 682 } |
| 878 | 683 |
| (...skipping 838 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1717 #define TRACE_ISOLATE(tag) | 1522 #define TRACE_ISOLATE(tag) |
| 1718 #endif | 1523 #endif |
| 1719 | 1524 |
| 1720 | 1525 |
| 1721 Isolate::Isolate() | 1526 Isolate::Isolate() |
| 1722 : state_(UNINITIALIZED), | 1527 : state_(UNINITIALIZED), |
| 1723 embedder_data_(NULL), | 1528 embedder_data_(NULL), |
| 1724 entry_stack_(NULL), | 1529 entry_stack_(NULL), |
| 1725 stack_trace_nesting_level_(0), | 1530 stack_trace_nesting_level_(0), |
| 1726 incomplete_message_(NULL), | 1531 incomplete_message_(NULL), |
| 1727 preallocated_memory_thread_(NULL), | |
| 1728 preallocated_message_space_(NULL), | |
| 1729 bootstrapper_(NULL), | 1532 bootstrapper_(NULL), |
| 1730 runtime_profiler_(NULL), | 1533 runtime_profiler_(NULL), |
| 1731 compilation_cache_(NULL), | 1534 compilation_cache_(NULL), |
| 1732 counters_(NULL), | 1535 counters_(NULL), |
| 1733 code_range_(NULL), | 1536 code_range_(NULL), |
| 1734 debugger_initialized_(false), | 1537 debugger_initialized_(false), |
| 1735 logger_(NULL), | 1538 logger_(NULL), |
| 1736 stats_table_(NULL), | 1539 stats_table_(NULL), |
| 1737 stub_cache_(NULL), | 1540 stub_cache_(NULL), |
| 1738 deoptimizer_data_(NULL), | 1541 deoptimizer_data_(NULL), |
| 1739 capture_stack_trace_for_uncaught_exceptions_(false), | 1542 capture_stack_trace_for_uncaught_exceptions_(false), |
| 1740 stack_trace_for_uncaught_exceptions_frame_limit_(0), | 1543 stack_trace_for_uncaught_exceptions_frame_limit_(0), |
| 1741 stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview), | 1544 stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview), |
| 1742 transcendental_cache_(NULL), | 1545 transcendental_cache_(NULL), |
| 1743 memory_allocator_(NULL), | 1546 memory_allocator_(NULL), |
| 1744 keyed_lookup_cache_(NULL), | 1547 keyed_lookup_cache_(NULL), |
| 1745 context_slot_cache_(NULL), | 1548 context_slot_cache_(NULL), |
| 1746 descriptor_lookup_cache_(NULL), | 1549 descriptor_lookup_cache_(NULL), |
| 1747 handle_scope_implementer_(NULL), | 1550 handle_scope_implementer_(NULL), |
| 1748 unicode_cache_(NULL), | 1551 unicode_cache_(NULL), |
| 1749 runtime_zone_(this), | 1552 runtime_zone_(this), |
| 1750 in_use_list_(0), | |
| 1751 free_list_(0), | |
| 1752 preallocated_storage_preallocated_(false), | |
| 1753 inner_pointer_to_code_cache_(NULL), | 1553 inner_pointer_to_code_cache_(NULL), |
| 1754 write_iterator_(NULL), | 1554 write_iterator_(NULL), |
| 1755 global_handles_(NULL), | 1555 global_handles_(NULL), |
| 1756 eternal_handles_(NULL), | 1556 eternal_handles_(NULL), |
| 1757 context_switcher_(NULL), | 1557 context_switcher_(NULL), |
| 1758 thread_manager_(NULL), | 1558 thread_manager_(NULL), |
| 1759 fp_stubs_generated_(false), | 1559 fp_stubs_generated_(false), |
| 1760 has_installed_extensions_(false), | 1560 has_installed_extensions_(false), |
| 1761 string_tracker_(NULL), | 1561 string_tracker_(NULL), |
| 1762 regexp_stack_(NULL), | 1562 regexp_stack_(NULL), |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1895 | 1695 |
| 1896 delete deoptimizer_data_; | 1696 delete deoptimizer_data_; |
| 1897 deoptimizer_data_ = NULL; | 1697 deoptimizer_data_ = NULL; |
| 1898 if (FLAG_preemption) { | 1698 if (FLAG_preemption) { |
| 1899 v8::Locker locker(reinterpret_cast<v8::Isolate*>(this)); | 1699 v8::Locker locker(reinterpret_cast<v8::Isolate*>(this)); |
| 1900 v8::Locker::StopPreemption(reinterpret_cast<v8::Isolate*>(this)); | 1700 v8::Locker::StopPreemption(reinterpret_cast<v8::Isolate*>(this)); |
| 1901 } | 1701 } |
| 1902 builtins_.TearDown(); | 1702 builtins_.TearDown(); |
| 1903 bootstrapper_->TearDown(); | 1703 bootstrapper_->TearDown(); |
| 1904 | 1704 |
| 1905 // Remove the external reference to the preallocated stack memory. | |
| 1906 delete preallocated_message_space_; | |
| 1907 preallocated_message_space_ = NULL; | |
| 1908 PreallocatedMemoryThreadStop(); | |
| 1909 | |
| 1910 if (runtime_profiler_ != NULL) { | 1705 if (runtime_profiler_ != NULL) { |
| 1911 runtime_profiler_->TearDown(); | 1706 runtime_profiler_->TearDown(); |
| 1912 delete runtime_profiler_; | 1707 delete runtime_profiler_; |
| 1913 runtime_profiler_ = NULL; | 1708 runtime_profiler_ = NULL; |
| 1914 } | 1709 } |
| 1915 heap_.TearDown(); | 1710 heap_.TearDown(); |
| 1916 logger_->TearDown(); | 1711 logger_->TearDown(); |
| 1917 | 1712 |
| 1918 delete heap_profiler_; | 1713 delete heap_profiler_; |
| 1919 heap_profiler_ = NULL; | 1714 heap_profiler_ = NULL; |
| (...skipping 321 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2241 } | 2036 } |
| 2242 | 2037 |
| 2243 if (num_sweeper_threads_ > 0) { | 2038 if (num_sweeper_threads_ > 0) { |
| 2244 sweeper_thread_ = new SweeperThread*[num_sweeper_threads_]; | 2039 sweeper_thread_ = new SweeperThread*[num_sweeper_threads_]; |
| 2245 for (int i = 0; i < num_sweeper_threads_; i++) { | 2040 for (int i = 0; i < num_sweeper_threads_; i++) { |
| 2246 sweeper_thread_[i] = new SweeperThread(this); | 2041 sweeper_thread_[i] = new SweeperThread(this); |
| 2247 sweeper_thread_[i]->Start(); | 2042 sweeper_thread_[i]->Start(); |
| 2248 } | 2043 } |
| 2249 } | 2044 } |
| 2250 | 2045 |
| 2251 // Only preallocate on the first initialization. | |
| 2252 if (FLAG_preallocate_message_memory && preallocated_message_space_ == NULL) { | |
| 2253 // Start the thread which will set aside some memory. | |
| 2254 PreallocatedMemoryThreadStart(); | |
| 2255 preallocated_message_space_ = | |
| 2256 new NoAllocationStringAllocator( | |
| 2257 preallocated_memory_thread_->data(), | |
| 2258 preallocated_memory_thread_->length()); | |
| 2259 PreallocatedStorageInit(preallocated_memory_thread_->length() / 4); | |
| 2260 } | |
| 2261 | |
| 2262 if (FLAG_preemption) { | 2046 if (FLAG_preemption) { |
| 2263 v8::Locker locker(reinterpret_cast<v8::Isolate*>(this)); | 2047 v8::Locker locker(reinterpret_cast<v8::Isolate*>(this)); |
| 2264 v8::Locker::StartPreemption(reinterpret_cast<v8::Isolate*>(this), 100); | 2048 v8::Locker::StartPreemption(reinterpret_cast<v8::Isolate*>(this), 100); |
| 2265 } | 2049 } |
| 2266 | 2050 |
| 2267 #ifdef ENABLE_DEBUGGER_SUPPORT | 2051 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 2268 debug_->SetUp(create_heap_objects); | 2052 debug_->SetUp(create_heap_objects); |
| 2269 #endif | 2053 #endif |
| 2270 | 2054 |
| 2271 // If we are deserializing, read the state into the now-empty heap. | 2055 // If we are deserializing, read the state into the now-empty heap. |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2526 | 2310 |
| 2527 #ifdef DEBUG | 2311 #ifdef DEBUG |
| 2528 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \ | 2312 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \ |
| 2529 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_); | 2313 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_); |
| 2530 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET) | 2314 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET) |
| 2531 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET) | 2315 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET) |
| 2532 #undef ISOLATE_FIELD_OFFSET | 2316 #undef ISOLATE_FIELD_OFFSET |
| 2533 #endif | 2317 #endif |
| 2534 | 2318 |
| 2535 } } // namespace v8::internal | 2319 } } // namespace v8::internal |
| OLD | NEW |