Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/isolate.h

Issue 185653004: Experimental parser: merge to r19637 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/interface.cc ('k') | src/isolate.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 10 matching lines...) Expand all
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #ifndef V8_ISOLATE_H_ 28 #ifndef V8_ISOLATE_H_
29 #define V8_ISOLATE_H_ 29 #define V8_ISOLATE_H_
30 30
31 #include <set>
32
33 #include "../include/v8-debug.h" 31 #include "../include/v8-debug.h"
34 #include "allocation.h" 32 #include "allocation.h"
35 #include "assert-scope.h" 33 #include "assert-scope.h"
36 #include "atomicops.h" 34 #include "atomicops.h"
37 #include "builtins.h" 35 #include "builtins.h"
38 #include "contexts.h" 36 #include "contexts.h"
39 #include "execution.h" 37 #include "execution.h"
40 #include "frames.h" 38 #include "frames.h"
41 #include "date.h" 39 #include "date.h"
42 #include "global-handles.h" 40 #include "global-handles.h"
43 #include "handles.h" 41 #include "handles.h"
44 #include "hashmap.h" 42 #include "hashmap.h"
45 #include "heap.h" 43 #include "heap.h"
46 #include "optimizing-compiler-thread.h" 44 #include "optimizing-compiler-thread.h"
47 #include "regexp-stack.h" 45 #include "regexp-stack.h"
48 #include "runtime-profiler.h" 46 #include "runtime-profiler.h"
49 #include "runtime.h" 47 #include "runtime.h"
50 #include "zone.h" 48 #include "zone.h"
51 49
52 namespace v8 { 50 namespace v8 {
53 namespace internal { 51 namespace internal {
54 52
55 class Bootstrapper; 53 class Bootstrapper;
54 struct CallInterfaceDescriptor;
56 class CodeGenerator; 55 class CodeGenerator;
57 class CodeRange; 56 class CodeRange;
58 struct CodeStubInterfaceDescriptor; 57 struct CodeStubInterfaceDescriptor;
59 struct CallInterfaceDescriptor;
60 class CodeTracer; 58 class CodeTracer;
61 class CompilationCache; 59 class CompilationCache;
60 class ConsStringIteratorOp;
62 class ContextSlotCache; 61 class ContextSlotCache;
63 class Counters; 62 class Counters;
64 class CpuFeatures; 63 class CpuFeatures;
65 class CpuProfiler; 64 class CpuProfiler;
66 class DeoptimizerData; 65 class DeoptimizerData;
67 class Deserializer; 66 class Deserializer;
68 class EmptyStatement; 67 class EmptyStatement;
69 class ExternalCallbackScope; 68 class ExternalCallbackScope;
70 class ExternalReferenceTable; 69 class ExternalReferenceTable;
71 class Factory; 70 class Factory;
72 class FunctionInfoListener; 71 class FunctionInfoListener;
73 class HandleScopeImplementer; 72 class HandleScopeImplementer;
74 class HeapProfiler; 73 class HeapProfiler;
75 class HStatistics; 74 class HStatistics;
76 class HTracer; 75 class HTracer;
77 class InlineRuntimeFunctionsTable; 76 class InlineRuntimeFunctionsTable;
77 class InnerPointerToCodeCache;
78 class MaterializedObjectStore;
78 class NoAllocationStringAllocator; 79 class NoAllocationStringAllocator;
79 class InnerPointerToCodeCache;
80 class RandomNumberGenerator; 80 class RandomNumberGenerator;
81 class RegExpStack; 81 class RegExpStack;
82 class SaveContext; 82 class SaveContext;
83 class UnicodeCache; 83 class LexerGCHandler;
84 class ConsStringIteratorOp;
85 class ScannerBase;
86 class StringTracker; 84 class StringTracker;
87 class StubCache; 85 class StubCache;
88 class SweeperThread; 86 class SweeperThread;
89 class ThreadManager; 87 class ThreadManager;
90 class ThreadState; 88 class ThreadState;
91 class ThreadVisitor; // Defined in v8threads.h 89 class ThreadVisitor; // Defined in v8threads.h
90 class UnicodeCache;
92 template <StateTag Tag> class VMState; 91 template <StateTag Tag> class VMState;
93 92
94 // 'void function pointer', used to roundtrip the 93 // 'void function pointer', used to roundtrip the
95 // ExternalReference::ExternalReferenceRedirector since we can not include 94 // ExternalReference::ExternalReferenceRedirector since we can not include
96 // assembler.h, where it is defined, here. 95 // assembler.h, where it is defined, here.
97 typedef void* ExternalReferenceRedirectorPointer(); 96 typedef void* ExternalReferenceRedirectorPointer();
98 97
99 98
100 #ifdef ENABLE_DEBUGGER_SUPPORT 99 #ifdef ENABLE_DEBUGGER_SUPPORT
101 class Debug; 100 class Debug;
102 class Debugger; 101 class Debugger;
103 class DebuggerAgent; 102 class DebuggerAgent;
104 #endif 103 #endif
105 104
106 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \ 105 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
106 !defined(__aarch64__) && V8_TARGET_ARCH_A64 || \
107 !defined(__mips__) && V8_TARGET_ARCH_MIPS 107 !defined(__mips__) && V8_TARGET_ARCH_MIPS
108 class Redirection; 108 class Redirection;
109 class Simulator; 109 class Simulator;
110 #endif 110 #endif
111 111
112 112
113 // Static indirection table for handles to constants. If a frame 113 // Static indirection table for handles to constants. If a frame
114 // element represents a constant, the data contains an index into 114 // element represents a constant, the data contains an index into
115 // this table of handles to the actual constants. 115 // this table of handles to the actual constants.
116 // Static indirection table for handles to constants. If a Result 116 // Static indirection table for handles to constants. If a Result
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
202 static int GetCurrentThreadId(); 202 static int GetCurrentThreadId();
203 203
204 int id_; 204 int id_;
205 205
206 static Atomic32 highest_thread_id_; 206 static Atomic32 highest_thread_id_;
207 207
208 friend class Isolate; 208 friend class Isolate;
209 }; 209 };
210 210
211 211
212 #define FIELD_ACCESSOR(type, name) \
213 inline void set_##name(type v) { name##_ = v; } \
214 inline type name() const { return name##_; }
215
216
212 class ThreadLocalTop BASE_EMBEDDED { 217 class ThreadLocalTop BASE_EMBEDDED {
213 public: 218 public:
214 // Does early low-level initialization that does not depend on the 219 // Does early low-level initialization that does not depend on the
215 // isolate being present. 220 // isolate being present.
216 ThreadLocalTop(); 221 ThreadLocalTop();
217 222
218 // Initialize the thread data. 223 // Initialize the thread data.
219 void Initialize(); 224 void Initialize();
220 225
221 // Get the top C++ try catch handler or NULL if none are registered. 226 // Get the top C++ try catch handler or NULL if none are registered.
222 // 227 //
223 // This method is not guarenteed to return an address that can be 228 // This method is not guarenteed to return an address that can be
224 // used for comparison with addresses into the JS stack. If such an 229 // used for comparison with addresses into the JS stack. If such an
225 // address is needed, use try_catch_handler_address. 230 // address is needed, use try_catch_handler_address.
226 v8::TryCatch* TryCatchHandler(); 231 v8::TryCatch* TryCatchHandler();
227 232
228 // Get the address of the top C++ try catch handler or NULL if 233 // Get the address of the top C++ try catch handler or NULL if
229 // none are registered. 234 // none are registered.
230 // 235 //
231 // This method always returns an address that can be compared to 236 // This method always returns an address that can be compared to
232 // pointers into the JavaScript stack. When running on actual 237 // pointers into the JavaScript stack. When running on actual
233 // hardware, try_catch_handler_address and TryCatchHandler return 238 // hardware, try_catch_handler_address and TryCatchHandler return
234 // the same pointer. When running on a simulator with a separate JS 239 // the same pointer. When running on a simulator with a separate JS
235 // stack, try_catch_handler_address returns a JS stack address that 240 // stack, try_catch_handler_address returns a JS stack address that
236 // corresponds to the place on the JS stack where the C++ handler 241 // corresponds to the place on the JS stack where the C++ handler
237 // would have been if the stack were not separate. 242 // would have been if the stack were not separate.
238 inline Address try_catch_handler_address() { 243 FIELD_ACCESSOR(Address, try_catch_handler_address)
239 return try_catch_handler_address_;
240 }
241
242 // Set the address of the top C++ try catch handler.
243 inline void set_try_catch_handler_address(Address address) {
244 try_catch_handler_address_ = address;
245 }
246 244
247 void Free() { 245 void Free() {
248 ASSERT(!has_pending_message_); 246 ASSERT(!has_pending_message_);
249 ASSERT(!external_caught_exception_); 247 ASSERT(!external_caught_exception_);
250 ASSERT(try_catch_handler_address_ == NULL); 248 ASSERT(try_catch_handler_address_ == NULL);
251 } 249 }
252 250
253 Isolate* isolate_; 251 Isolate* isolate_;
254 // The context where the current execution method is created and for variable 252 // The context where the current execution method is created and for variable
255 // lookups. 253 // lookups.
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
355 V(Relocatable*, relocatable_top, NULL) \ 353 V(Relocatable*, relocatable_top, NULL) \
356 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \ 354 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \
357 V(Object*, string_stream_current_security_token, NULL) \ 355 V(Object*, string_stream_current_security_token, NULL) \
358 /* TODO(isolates): Release this on destruction? */ \ 356 /* TODO(isolates): Release this on destruction? */ \
359 V(int*, irregexp_interpreter_backtrack_stack_cache, NULL) \ 357 V(int*, irregexp_interpreter_backtrack_stack_cache, NULL) \
360 /* Serializer state. */ \ 358 /* Serializer state. */ \
361 V(ExternalReferenceTable*, external_reference_table, NULL) \ 359 V(ExternalReferenceTable*, external_reference_table, NULL) \
362 /* AstNode state. */ \ 360 /* AstNode state. */ \
363 V(int, ast_node_id, 0) \ 361 V(int, ast_node_id, 0) \
364 V(unsigned, ast_node_count, 0) \ 362 V(unsigned, ast_node_count, 0) \
365 V(bool, microtask_pending, false) \ 363 V(bool, microtask_pending, false) \
364 V(bool, autorun_microtasks, true) \
366 V(HStatistics*, hstatistics, NULL) \ 365 V(HStatistics*, hstatistics, NULL) \
367 V(HTracer*, htracer, NULL) \ 366 V(HTracer*, htracer, NULL) \
368 V(CodeTracer*, code_tracer, NULL) \ 367 V(CodeTracer*, code_tracer, NULL) \
369 ISOLATE_DEBUGGER_INIT_LIST(V) 368 ISOLATE_DEBUGGER_INIT_LIST(V)
370 369
370 #define THREAD_LOCAL_TOP_ACCESSOR(type, name) \
371 inline void set_##name(type v) { thread_local_top_.name##_ = v; } \
372 inline type name() const { return thread_local_top_.name##_; }
373
374
371 class Isolate { 375 class Isolate {
372 // These forward declarations are required to make the friend declarations in 376 // These forward declarations are required to make the friend declarations in
373 // PerIsolateThreadData work on some older versions of gcc. 377 // PerIsolateThreadData work on some older versions of gcc.
374 class ThreadDataTable; 378 class ThreadDataTable;
375 class EntryStackItem; 379 class EntryStackItem;
376 public: 380 public:
377 ~Isolate(); 381 ~Isolate();
378 382
379 // A thread has a PerIsolateThreadData instance for each isolate that it has 383 // A thread has a PerIsolateThreadData instance for each isolate that it has
380 // entered. That instance is allocated when the isolate is initially entered 384 // entered. That instance is allocated when the isolate is initially entered
381 // and reused on subsequent entries. 385 // and reused on subsequent entries.
382 class PerIsolateThreadData { 386 class PerIsolateThreadData {
383 public: 387 public:
384 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id) 388 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
385 : isolate_(isolate), 389 : isolate_(isolate),
386 thread_id_(thread_id), 390 thread_id_(thread_id),
387 stack_limit_(0), 391 stack_limit_(0),
388 thread_state_(NULL), 392 thread_state_(NULL),
389 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \ 393 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
394 !defined(__aarch64__) && V8_TARGET_ARCH_A64 || \
390 !defined(__mips__) && V8_TARGET_ARCH_MIPS 395 !defined(__mips__) && V8_TARGET_ARCH_MIPS
391 simulator_(NULL), 396 simulator_(NULL),
392 #endif 397 #endif
393 next_(NULL), 398 next_(NULL),
394 prev_(NULL) { } 399 prev_(NULL) { }
400 ~PerIsolateThreadData();
395 Isolate* isolate() const { return isolate_; } 401 Isolate* isolate() const { return isolate_; }
396 ThreadId thread_id() const { return thread_id_; } 402 ThreadId thread_id() const { return thread_id_; }
397 void set_stack_limit(uintptr_t value) { stack_limit_ = value; } 403
398 uintptr_t stack_limit() const { return stack_limit_; } 404 FIELD_ACCESSOR(uintptr_t, stack_limit)
399 ThreadState* thread_state() const { return thread_state_; } 405 FIELD_ACCESSOR(ThreadState*, thread_state)
400 void set_thread_state(ThreadState* value) { thread_state_ = value; }
401 406
402 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \ 407 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
408 !defined(__aarch64__) && V8_TARGET_ARCH_A64 || \
403 !defined(__mips__) && V8_TARGET_ARCH_MIPS 409 !defined(__mips__) && V8_TARGET_ARCH_MIPS
404 Simulator* simulator() const { return simulator_; } 410 FIELD_ACCESSOR(Simulator*, simulator)
405 void set_simulator(Simulator* simulator) {
406 simulator_ = simulator;
407 }
408 #endif 411 #endif
409 412
410 bool Matches(Isolate* isolate, ThreadId thread_id) const { 413 bool Matches(Isolate* isolate, ThreadId thread_id) const {
411 return isolate_ == isolate && thread_id_.Equals(thread_id); 414 return isolate_ == isolate && thread_id_.Equals(thread_id);
412 } 415 }
413 416
414 private: 417 private:
415 Isolate* isolate_; 418 Isolate* isolate_;
416 ThreadId thread_id_; 419 ThreadId thread_id_;
417 uintptr_t stack_limit_; 420 uintptr_t stack_limit_;
418 ThreadState* thread_state_; 421 ThreadState* thread_state_;
419 422
420 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \ 423 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
424 !defined(__aarch64__) && V8_TARGET_ARCH_A64 || \
421 !defined(__mips__) && V8_TARGET_ARCH_MIPS 425 !defined(__mips__) && V8_TARGET_ARCH_MIPS
422 Simulator* simulator_; 426 Simulator* simulator_;
423 #endif 427 #endif
424 428
425 PerIsolateThreadData* next_; 429 PerIsolateThreadData* next_;
426 PerIsolateThreadData* prev_; 430 PerIsolateThreadData* prev_;
427 431
428 friend class Isolate; 432 friend class Isolate;
429 friend class ThreadDataTable; 433 friend class ThreadDataTable;
430 friend class EntryStackItem; 434 friend class EntryStackItem;
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
536 Address get_address_from_id(AddressId id); 540 Address get_address_from_id(AddressId id);
537 541
538 // Access to top context (where the current function object was created). 542 // Access to top context (where the current function object was created).
539 Context* context() { return thread_local_top_.context_; } 543 Context* context() { return thread_local_top_.context_; }
540 void set_context(Context* context) { 544 void set_context(Context* context) {
541 ASSERT(context == NULL || context->IsContext()); 545 ASSERT(context == NULL || context->IsContext());
542 thread_local_top_.context_ = context; 546 thread_local_top_.context_ = context;
543 } 547 }
544 Context** context_address() { return &thread_local_top_.context_; } 548 Context** context_address() { return &thread_local_top_.context_; }
545 549
546 SaveContext* save_context() { return thread_local_top_.save_context_; } 550 THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
547 void set_save_context(SaveContext* save) {
548 thread_local_top_.save_context_ = save;
549 }
550 551
551 // Access to current thread id. 552 // Access to current thread id.
552 ThreadId thread_id() { return thread_local_top_.thread_id_; } 553 THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
553 void set_thread_id(ThreadId id) { thread_local_top_.thread_id_ = id; }
554 554
555 // Interface to pending exception. 555 // Interface to pending exception.
556 MaybeObject* pending_exception() { 556 MaybeObject* pending_exception() {
557 ASSERT(has_pending_exception()); 557 ASSERT(has_pending_exception());
558 return thread_local_top_.pending_exception_; 558 return thread_local_top_.pending_exception_;
559 } 559 }
560 bool external_caught_exception() { 560
561 return thread_local_top_.external_caught_exception_;
562 }
563 void set_external_caught_exception(bool value) {
564 thread_local_top_.external_caught_exception_ = value;
565 }
566 void set_pending_exception(MaybeObject* exception) { 561 void set_pending_exception(MaybeObject* exception) {
567 thread_local_top_.pending_exception_ = exception; 562 thread_local_top_.pending_exception_ = exception;
568 } 563 }
564
569 void clear_pending_exception() { 565 void clear_pending_exception() {
570 thread_local_top_.pending_exception_ = heap_.the_hole_value(); 566 thread_local_top_.pending_exception_ = heap_.the_hole_value();
571 } 567 }
568
572 MaybeObject** pending_exception_address() { 569 MaybeObject** pending_exception_address() {
573 return &thread_local_top_.pending_exception_; 570 return &thread_local_top_.pending_exception_;
574 } 571 }
572
575 bool has_pending_exception() { 573 bool has_pending_exception() {
576 return !thread_local_top_.pending_exception_->IsTheHole(); 574 return !thread_local_top_.pending_exception_->IsTheHole();
577 } 575 }
576
577 THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)
578
578 void clear_pending_message() { 579 void clear_pending_message() {
579 thread_local_top_.has_pending_message_ = false; 580 thread_local_top_.has_pending_message_ = false;
580 thread_local_top_.pending_message_obj_ = heap_.the_hole_value(); 581 thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
581 thread_local_top_.pending_message_script_ = heap_.the_hole_value(); 582 thread_local_top_.pending_message_script_ = heap_.the_hole_value();
582 } 583 }
583 v8::TryCatch* try_catch_handler() { 584 v8::TryCatch* try_catch_handler() {
584 return thread_local_top_.TryCatchHandler(); 585 return thread_local_top_.TryCatchHandler();
585 } 586 }
586 Address try_catch_handler_address() { 587 Address try_catch_handler_address() {
587 return thread_local_top_.try_catch_handler_address(); 588 return thread_local_top_.try_catch_handler_address();
588 } 589 }
589 bool* external_caught_exception_address() { 590 bool* external_caught_exception_address() {
590 return &thread_local_top_.external_caught_exception_; 591 return &thread_local_top_.external_caught_exception_;
591 } 592 }
592 v8::TryCatch* catcher() { 593
593 return thread_local_top_.catcher_; 594 THREAD_LOCAL_TOP_ACCESSOR(v8::TryCatch*, catcher)
594 }
595 void set_catcher(v8::TryCatch* catcher) {
596 thread_local_top_.catcher_ = catcher;
597 }
598 595
599 MaybeObject** scheduled_exception_address() { 596 MaybeObject** scheduled_exception_address() {
600 return &thread_local_top_.scheduled_exception_; 597 return &thread_local_top_.scheduled_exception_;
601 } 598 }
602 599
603 Address pending_message_obj_address() { 600 Address pending_message_obj_address() {
604 return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_); 601 return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
605 } 602 }
606 603
607 Address has_pending_message_address() { 604 Address has_pending_message_address() {
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
703 }; 700 };
704 701
705 void SetCaptureStackTraceForUncaughtExceptions( 702 void SetCaptureStackTraceForUncaughtExceptions(
706 bool capture, 703 bool capture,
707 int frame_limit, 704 int frame_limit,
708 StackTrace::StackTraceOptions options); 705 StackTrace::StackTraceOptions options);
709 706
710 // Tells whether the current context has experienced an out of memory 707 // Tells whether the current context has experienced an out of memory
711 // exception. 708 // exception.
712 bool is_out_of_memory(); 709 bool is_out_of_memory();
713 bool ignore_out_of_memory() { 710
714 return thread_local_top_.ignore_out_of_memory_; 711 THREAD_LOCAL_TOP_ACCESSOR(bool, ignore_out_of_memory)
715 }
716 void set_ignore_out_of_memory(bool value) {
717 thread_local_top_.ignore_out_of_memory_ = value;
718 }
719 712
720 void PrintCurrentStackTrace(FILE* out); 713 void PrintCurrentStackTrace(FILE* out);
721 void PrintStack(StringStream* accumulator); 714 void PrintStack(StringStream* accumulator);
722 void PrintStack(FILE* out); 715 void PrintStack(FILE* out);
723 Handle<String> StackTraceString(); 716 Handle<String> StackTraceString();
724 NO_INLINE(void PushStackTraceAndDie(unsigned int magic, 717 NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
725 Object* object, 718 Object* object,
726 Map* map, 719 Map* map,
727 unsigned int magic2)); 720 unsigned int magic2));
728 Handle<JSArray> CaptureCurrentStackTrace( 721 Handle<JSArray> CaptureCurrentStackTrace(
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
865 // the isolate is fully initialized. 858 // the isolate is fully initialized.
866 ASSERT(logger_ != NULL); 859 ASSERT(logger_ != NULL);
867 return logger_; 860 return logger_;
868 } 861 }
869 StackGuard* stack_guard() { return &stack_guard_; } 862 StackGuard* stack_guard() { return &stack_guard_; }
870 Heap* heap() { return &heap_; } 863 Heap* heap() { return &heap_; }
871 StatsTable* stats_table(); 864 StatsTable* stats_table();
872 StubCache* stub_cache() { return stub_cache_; } 865 StubCache* stub_cache() { return stub_cache_; }
873 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; } 866 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
874 ThreadLocalTop* thread_local_top() { return &thread_local_top_; } 867 ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
868 MaterializedObjectStore* materialized_object_store() {
869 return materialized_object_store_;
870 }
875 871
876 MemoryAllocator* memory_allocator() { 872 MemoryAllocator* memory_allocator() {
877 return memory_allocator_; 873 return memory_allocator_;
878 } 874 }
879 875
880 KeyedLookupCache* keyed_lookup_cache() { 876 KeyedLookupCache* keyed_lookup_cache() {
881 return keyed_lookup_cache_; 877 return keyed_lookup_cache_;
882 } 878 }
883 879
884 ContextSlotCache* context_slot_cache() { 880 ContextSlotCache* context_slot_cache() {
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
930 ConsStringIteratorOp* objects_string_compare_iterator_b() { 926 ConsStringIteratorOp* objects_string_compare_iterator_b() {
931 return &objects_string_compare_iterator_b_; 927 return &objects_string_compare_iterator_b_;
932 } 928 }
933 929
934 StaticResource<ConsStringIteratorOp>* objects_string_iterator() { 930 StaticResource<ConsStringIteratorOp>* objects_string_iterator() {
935 return &objects_string_iterator_; 931 return &objects_string_iterator_;
936 } 932 }
937 933
938 RuntimeState* runtime_state() { return &runtime_state_; } 934 RuntimeState* runtime_state() { return &runtime_state_; }
939 935
940 void set_fp_stubs_generated(bool value) { 936 FIELD_ACCESSOR(bool, fp_stubs_generated);
941 fp_stubs_generated_ = value;
942 }
943
944 bool fp_stubs_generated() { return fp_stubs_generated_; }
945 937
946 Builtins* builtins() { return &builtins_; } 938 Builtins* builtins() { return &builtins_; }
947 939
948 void NotifyExtensionInstalled() { 940 void NotifyExtensionInstalled() {
949 has_installed_extensions_ = true; 941 has_installed_extensions_ = true;
950 } 942 }
951 943
952 bool has_installed_extensions() { return has_installed_extensions_; } 944 bool has_installed_extensions() { return has_installed_extensions_; }
953 945
954 unibrow::Mapping<unibrow::Ecma262Canonicalize>* 946 unibrow::Mapping<unibrow::Ecma262Canonicalize>*
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
986 HistogramInfo* heap_histograms() { return heap_histograms_; } 978 HistogramInfo* heap_histograms() { return heap_histograms_; }
987 979
988 JSObject::SpillInformation* js_spill_information() { 980 JSObject::SpillInformation* js_spill_information() {
989 return &js_spill_information_; 981 return &js_spill_information_;
990 } 982 }
991 983
992 int* code_kind_statistics() { return code_kind_statistics_; } 984 int* code_kind_statistics() { return code_kind_statistics_; }
993 #endif 985 #endif
994 986
995 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \ 987 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
988 V8_TARGET_ARCH_A64 && !defined(__aarch64__) || \
996 V8_TARGET_ARCH_MIPS && !defined(__mips__) 989 V8_TARGET_ARCH_MIPS && !defined(__mips__)
997 bool simulator_initialized() { return simulator_initialized_; } 990 FIELD_ACCESSOR(bool, simulator_initialized)
998 void set_simulator_initialized(bool initialized) { 991 FIELD_ACCESSOR(HashMap*, simulator_i_cache)
999 simulator_initialized_ = initialized; 992 FIELD_ACCESSOR(Redirection*, simulator_redirection)
1000 }
1001
1002 HashMap* simulator_i_cache() { return simulator_i_cache_; }
1003 void set_simulator_i_cache(HashMap* hash_map) {
1004 simulator_i_cache_ = hash_map;
1005 }
1006
1007 Redirection* simulator_redirection() {
1008 return simulator_redirection_;
1009 }
1010 void set_simulator_redirection(Redirection* redirection) {
1011 simulator_redirection_ = redirection;
1012 }
1013 #endif 993 #endif
1014 994
1015 Factory* factory() { return reinterpret_cast<Factory*>(this); } 995 Factory* factory() { return reinterpret_cast<Factory*>(this); }
1016 996
1017 static const int kJSRegexpStaticOffsetsVectorSize = 128; 997 static const int kJSRegexpStaticOffsetsVectorSize = 128;
1018 998
1019 ExternalCallbackScope* external_callback_scope() { 999 THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
1020 return thread_local_top_.external_callback_scope_;
1021 }
1022 void set_external_callback_scope(ExternalCallbackScope* scope) {
1023 thread_local_top_.external_callback_scope_ = scope;
1024 }
1025 1000
1026 StateTag current_vm_state() { 1001 THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
1027 return thread_local_top_.current_vm_state_;
1028 }
1029
1030 void set_current_vm_state(StateTag state) {
1031 thread_local_top_.current_vm_state_ = state;
1032 }
1033 1002
1034 void SetData(uint32_t slot, void* data) { 1003 void SetData(uint32_t slot, void* data) {
1035 ASSERT(slot < Internals::kNumIsolateDataSlots); 1004 ASSERT(slot < Internals::kNumIsolateDataSlots);
1036 embedder_data_[slot] = data; 1005 embedder_data_[slot] = data;
1037 } 1006 }
1038 void* GetData(uint32_t slot) { 1007 void* GetData(uint32_t slot) {
1039 ASSERT(slot < Internals::kNumIsolateDataSlots); 1008 ASSERT(slot < Internals::kNumIsolateDataSlots);
1040 return embedder_data_[slot]; 1009 return embedder_data_[slot];
1041 } 1010 }
1042 1011
1043 LookupResult* top_lookup_result() { 1012 THREAD_LOCAL_TOP_ACCESSOR(LookupResult*, top_lookup_result)
1044 return thread_local_top_.top_lookup_result_;
1045 }
1046 void SetTopLookupResult(LookupResult* top) {
1047 thread_local_top_.top_lookup_result_ = top;
1048 }
1049 1013
1050 bool IsDead() { return has_fatal_error_; } 1014 bool IsDead() { return has_fatal_error_; }
1051 void SignalFatalError() { has_fatal_error_ = true; } 1015 void SignalFatalError() { has_fatal_error_ = true; }
1052 1016
1053 bool use_crankshaft() const { return use_crankshaft_; } 1017 bool use_crankshaft() const { return use_crankshaft_; }
1054 1018
1055 bool initialized_from_snapshot() { return initialized_from_snapshot_; } 1019 bool initialized_from_snapshot() { return initialized_from_snapshot_; }
1056 1020
1057 double time_millis_since_init() { 1021 double time_millis_since_init() {
1058 return OS::TimeCurrentMillis() - time_millis_at_init_; 1022 return OS::TimeCurrentMillis() - time_millis_at_init_;
(...skipping 13 matching lines...) Expand all
1072 Map* get_initial_js_array_map(ElementsKind kind); 1036 Map* get_initial_js_array_map(ElementsKind kind);
1073 1037
1074 bool IsFastArrayConstructorPrototypeChainIntact(); 1038 bool IsFastArrayConstructorPrototypeChainIntact();
1075 1039
1076 CodeStubInterfaceDescriptor* 1040 CodeStubInterfaceDescriptor*
1077 code_stub_interface_descriptor(int index); 1041 code_stub_interface_descriptor(int index);
1078 1042
1079 enum CallDescriptorKey { 1043 enum CallDescriptorKey {
1080 KeyedCall, 1044 KeyedCall,
1081 NamedCall, 1045 NamedCall,
1046 CallHandler,
1082 ArgumentAdaptorCall, 1047 ArgumentAdaptorCall,
1048 ApiFunctionCall,
1083 NUMBER_OF_CALL_DESCRIPTORS 1049 NUMBER_OF_CALL_DESCRIPTORS
1084 }; 1050 };
1085 1051
1086 CallInterfaceDescriptor* call_descriptor(CallDescriptorKey index); 1052 CallInterfaceDescriptor* call_descriptor(CallDescriptorKey index);
1087 1053
1088 void IterateDeferredHandles(ObjectVisitor* visitor); 1054 void IterateDeferredHandles(ObjectVisitor* visitor);
1089 void LinkDeferredHandles(DeferredHandles* deferred_handles); 1055 void LinkDeferredHandles(DeferredHandles* deferred_handles);
1090 void UnlinkDeferredHandles(DeferredHandles* deferred_handles); 1056 void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
1091 1057
1092 #ifdef DEBUG 1058 #ifdef DEBUG
1093 bool IsDeferredHandle(Object** location); 1059 bool IsDeferredHandle(Object** location);
1094 #endif // DEBUG 1060 #endif // DEBUG
1095 1061
1096 int max_available_threads() const { 1062 FIELD_ACCESSOR(int, max_available_threads);
1097 return max_available_threads_;
1098 }
1099
1100 void set_max_available_threads(int value) {
1101 max_available_threads_ = value;
1102 }
1103 1063
1104 bool concurrent_recompilation_enabled() { 1064 bool concurrent_recompilation_enabled() {
1105 // Thread is only available with flag enabled. 1065 // Thread is only available with flag enabled.
1106 ASSERT(optimizing_compiler_thread_ == NULL || 1066 ASSERT(optimizing_compiler_thread_ == NULL ||
1107 FLAG_concurrent_recompilation); 1067 FLAG_concurrent_recompilation);
1108 return optimizing_compiler_thread_ != NULL; 1068 return optimizing_compiler_thread_ != NULL;
1109 } 1069 }
1110 1070
1111 bool concurrent_osr_enabled() const { 1071 bool concurrent_osr_enabled() const {
1112 // Thread is only available with flag enabled. 1072 // Thread is only available with flag enabled.
(...skipping 30 matching lines...) Expand all
1143 function_entry_hook_ = function_entry_hook; 1103 function_entry_hook_ = function_entry_hook;
1144 } 1104 }
1145 1105
1146 void* stress_deopt_count_address() { return &stress_deopt_count_; } 1106 void* stress_deopt_count_address() { return &stress_deopt_count_; }
1147 1107
1148 inline RandomNumberGenerator* random_number_generator(); 1108 inline RandomNumberGenerator* random_number_generator();
1149 1109
1150 // Given an address occupied by a live code object, return that object. 1110 // Given an address occupied by a live code object, return that object.
1151 Object* FindCodeObject(Address a); 1111 Object* FindCodeObject(Address a);
1152 1112
1153 void AddScanner(ScannerBase* scanner); 1113 LexerGCHandler* lexer_gc_handler() { return lexer_gc_handler_; }
1154 void RemoveScanner(ScannerBase* scanner); 1114
1115 int NextOptimizationId() {
1116 int id = next_optimization_id_++;
1117 if (!Smi::IsValid(next_optimization_id_)) {
1118 next_optimization_id_ = 0;
1119 }
1120 return id;
1121 }
1155 1122
1156 private: 1123 private:
1157 Isolate(); 1124 Isolate();
1158 1125
1159 friend struct GlobalState; 1126 friend struct GlobalState;
1160 friend struct InitializeGlobalState; 1127 friend struct InitializeGlobalState;
1161 1128
1162 enum State { 1129 enum State {
1163 UNINITIALIZED, // Some components may not have been allocated. 1130 UNINITIALIZED, // Some components may not have been allocated.
1164 INITIALIZED // All components are fully initialized. 1131 INITIALIZED // All components are fully initialized.
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
1255 void FillCache(); 1222 void FillCache();
1256 1223
1257 void PropagatePendingExceptionToExternalTryCatch(); 1224 void PropagatePendingExceptionToExternalTryCatch();
1258 1225
1259 void InitializeDebugger(); 1226 void InitializeDebugger();
1260 1227
1261 // Traverse prototype chain to find out whether the object is derived from 1228 // Traverse prototype chain to find out whether the object is derived from
1262 // the Error object. 1229 // the Error object.
1263 bool IsErrorObject(Handle<Object> obj); 1230 bool IsErrorObject(Handle<Object> obj);
1264 1231
1265 static void UpdateScannersAfterGC(v8::Isolate*, GCType, GCCallbackFlags);
1266 void UpdateScannersAfterGC();
1267
1268 Atomic32 id_; 1232 Atomic32 id_;
1269 EntryStackItem* entry_stack_; 1233 EntryStackItem* entry_stack_;
1270 int stack_trace_nesting_level_; 1234 int stack_trace_nesting_level_;
1271 StringStream* incomplete_message_; 1235 StringStream* incomplete_message_;
1272 Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT 1236 Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT
1273 Bootstrapper* bootstrapper_; 1237 Bootstrapper* bootstrapper_;
1274 RuntimeProfiler* runtime_profiler_; 1238 RuntimeProfiler* runtime_profiler_;
1275 CompilationCache* compilation_cache_; 1239 CompilationCache* compilation_cache_;
1276 Counters* counters_; 1240 Counters* counters_;
1277 CodeRange* code_range_; 1241 CodeRange* code_range_;
1278 RecursiveMutex break_access_; 1242 RecursiveMutex break_access_;
1279 Atomic32 debugger_initialized_; 1243 Atomic32 debugger_initialized_;
1280 RecursiveMutex debugger_access_; 1244 RecursiveMutex debugger_access_;
1281 Logger* logger_; 1245 Logger* logger_;
1282 StackGuard stack_guard_; 1246 StackGuard stack_guard_;
1283 StatsTable* stats_table_; 1247 StatsTable* stats_table_;
1284 StubCache* stub_cache_; 1248 StubCache* stub_cache_;
1285 DeoptimizerData* deoptimizer_data_; 1249 DeoptimizerData* deoptimizer_data_;
1250 MaterializedObjectStore* materialized_object_store_;
1286 ThreadLocalTop thread_local_top_; 1251 ThreadLocalTop thread_local_top_;
1287 bool capture_stack_trace_for_uncaught_exceptions_; 1252 bool capture_stack_trace_for_uncaught_exceptions_;
1288 int stack_trace_for_uncaught_exceptions_frame_limit_; 1253 int stack_trace_for_uncaught_exceptions_frame_limit_;
1289 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_; 1254 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1290 MemoryAllocator* memory_allocator_; 1255 MemoryAllocator* memory_allocator_;
1291 KeyedLookupCache* keyed_lookup_cache_; 1256 KeyedLookupCache* keyed_lookup_cache_;
1292 ContextSlotCache* context_slot_cache_; 1257 ContextSlotCache* context_slot_cache_;
1293 DescriptorLookupCache* descriptor_lookup_cache_; 1258 DescriptorLookupCache* descriptor_lookup_cache_;
1294 HandleScopeData handle_scope_data_; 1259 HandleScopeData handle_scope_data_;
1295 HandleScopeImplementer* handle_scope_implementer_; 1260 HandleScopeImplementer* handle_scope_implementer_;
(...skipping 29 matching lines...) Expand all
1325 // True if we are using the Crankshaft optimizing compiler. 1290 // True if we are using the Crankshaft optimizing compiler.
1326 bool use_crankshaft_; 1291 bool use_crankshaft_;
1327 1292
1328 // True if this isolate was initialized from a snapshot. 1293 // True if this isolate was initialized from a snapshot.
1329 bool initialized_from_snapshot_; 1294 bool initialized_from_snapshot_;
1330 1295
1331 // Time stamp at initialization. 1296 // Time stamp at initialization.
1332 double time_millis_at_init_; 1297 double time_millis_at_init_;
1333 1298
1334 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \ 1299 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
1300 V8_TARGET_ARCH_A64 && !defined(__aarch64__) || \
1335 V8_TARGET_ARCH_MIPS && !defined(__mips__) 1301 V8_TARGET_ARCH_MIPS && !defined(__mips__)
1336 bool simulator_initialized_; 1302 bool simulator_initialized_;
1337 HashMap* simulator_i_cache_; 1303 HashMap* simulator_i_cache_;
1338 Redirection* simulator_redirection_; 1304 Redirection* simulator_redirection_;
1339 #endif 1305 #endif
1340 1306
1341 #ifdef DEBUG 1307 #ifdef DEBUG
1342 // A static array of histogram info for each type. 1308 // A static array of histogram info for each type.
1343 HistogramInfo heap_histograms_[LAST_TYPE + 1]; 1309 HistogramInfo heap_histograms_[LAST_TYPE + 1];
1344 JSObject::SpillInformation js_spill_information_; 1310 JSObject::SpillInformation js_spill_information_;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1379 SweeperThread** sweeper_thread_; 1345 SweeperThread** sweeper_thread_;
1380 int num_sweeper_threads_; 1346 int num_sweeper_threads_;
1381 1347
1382 // TODO(yangguo): This will become obsolete once ResourceConstraints 1348 // TODO(yangguo): This will become obsolete once ResourceConstraints
1383 // becomes an argument to Isolate constructor. 1349 // becomes an argument to Isolate constructor.
1384 int max_available_threads_; 1350 int max_available_threads_;
1385 1351
1386 // Counts deopt points if deopt_every_n_times is enabled. 1352 // Counts deopt points if deopt_every_n_times is enabled.
1387 unsigned int stress_deopt_count_; 1353 unsigned int stress_deopt_count_;
1388 1354
1389 // Stores information about the ScannerBase objects currently alive, so that 1355 LexerGCHandler* lexer_gc_handler_;
1390 // we can update the raw string pointers they hold after GC. 1356 int next_optimization_id_;
1391 std::set<ScannerBase*> scanners_;
1392 1357
1393 friend class ExecutionAccess; 1358 friend class ExecutionAccess;
1394 friend class HandleScopeImplementer; 1359 friend class HandleScopeImplementer;
1395 friend class IsolateInitializer; 1360 friend class IsolateInitializer;
1396 friend class OptimizingCompilerThread; 1361 friend class OptimizingCompilerThread;
1397 friend class SweeperThread; 1362 friend class SweeperThread;
1398 friend class ThreadManager; 1363 friend class ThreadManager;
1399 friend class Simulator; 1364 friend class Simulator;
1400 friend class StackGuard; 1365 friend class StackGuard;
1401 friend class ThreadId; 1366 friend class ThreadId;
1402 friend class TestMemoryAllocatorScope; 1367 friend class TestMemoryAllocatorScope;
1403 friend class TestCodeRangeScope; 1368 friend class TestCodeRangeScope;
1404 friend class v8::Isolate; 1369 friend class v8::Isolate;
1405 friend class v8::Locker; 1370 friend class v8::Locker;
1406 friend class v8::Unlocker; 1371 friend class v8::Unlocker;
1407 1372
1408 DISALLOW_COPY_AND_ASSIGN(Isolate); 1373 DISALLOW_COPY_AND_ASSIGN(Isolate);
1409 }; 1374 };
1410 1375
1411 1376
1377 #undef FIELD_ACCESSOR
1378 #undef THREAD_LOCAL_TOP_ACCESSOR
1379
1380
1412 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the 1381 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1413 // class as a work around for a bug in the generated code found with these 1382 // class as a work around for a bug in the generated code found with these
1414 // versions of GCC. See V8 issue 122 for details. 1383 // versions of GCC. See V8 issue 122 for details.
1415 class SaveContext BASE_EMBEDDED { 1384 class SaveContext BASE_EMBEDDED {
1416 public: 1385 public:
1417 inline explicit SaveContext(Isolate* isolate); 1386 inline explicit SaveContext(Isolate* isolate);
1418 1387
1419 ~SaveContext() { 1388 ~SaveContext() {
1420 isolate_->set_context(context_.is_null() ? NULL : *context_); 1389 isolate_->set_context(context_.is_null() ? NULL : *context_);
1421 isolate_->set_save_context(prev_); 1390 isolate_->set_save_context(prev_);
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
1490 }; 1459 };
1491 1460
1492 1461
1493 // Support for temporarily postponing interrupts. When the outermost 1462 // Support for temporarily postponing interrupts. When the outermost
1494 // postpone scope is left the interrupts will be re-enabled and any 1463 // postpone scope is left the interrupts will be re-enabled and any
1495 // interrupts that occurred while in the scope will be taken into 1464 // interrupts that occurred while in the scope will be taken into
1496 // account. 1465 // account.
1497 class PostponeInterruptsScope BASE_EMBEDDED { 1466 class PostponeInterruptsScope BASE_EMBEDDED {
1498 public: 1467 public:
1499 explicit PostponeInterruptsScope(Isolate* isolate) 1468 explicit PostponeInterruptsScope(Isolate* isolate)
1500 : stack_guard_(isolate->stack_guard()) { 1469 : stack_guard_(isolate->stack_guard()), isolate_(isolate) {
1470 ExecutionAccess access(isolate_);
1501 stack_guard_->thread_local_.postpone_interrupts_nesting_++; 1471 stack_guard_->thread_local_.postpone_interrupts_nesting_++;
1502 stack_guard_->DisableInterrupts(); 1472 stack_guard_->DisableInterrupts();
1503 } 1473 }
1504 1474
1505 ~PostponeInterruptsScope() { 1475 ~PostponeInterruptsScope() {
1476 ExecutionAccess access(isolate_);
1506 if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) { 1477 if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) {
1507 stack_guard_->EnableInterrupts(); 1478 stack_guard_->EnableInterrupts();
1508 } 1479 }
1509 } 1480 }
1510 private: 1481 private:
1511 StackGuard* stack_guard_; 1482 StackGuard* stack_guard_;
1483 Isolate* isolate_;
1512 }; 1484 };
1513 1485
1514 1486
1515 // Tells whether the native context is marked with out of memory. 1487 // Tells whether the native context is marked with out of memory.
1516 inline bool Context::has_out_of_memory() { 1488 inline bool Context::has_out_of_memory() {
1517 return native_context()->out_of_memory()->IsTrue(); 1489 return native_context()->out_of_memory()->IsTrue();
1518 } 1490 }
1519 1491
1520 1492
1521 // Mark the native context with out of memory. 1493 // Mark the native context with out of memory.
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
1587 } 1559 }
1588 1560
1589 EmbeddedVector<char, 128> filename_; 1561 EmbeddedVector<char, 128> filename_;
1590 FILE* file_; 1562 FILE* file_;
1591 int scope_depth_; 1563 int scope_depth_;
1592 }; 1564 };
1593 1565
1594 } } // namespace v8::internal 1566 } } // namespace v8::internal
1595 1567
1596 #endif // V8_ISOLATE_H_ 1568 #endif // V8_ISOLATE_H_
OLDNEW
« no previous file with comments | « src/interface.cc ('k') | src/isolate.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698