OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
46 class OptimizingCompilerThread : public Thread { | 46 class OptimizingCompilerThread : public Thread { |
47 public: | 47 public: |
48 explicit OptimizingCompilerThread(Isolate *isolate) : | 48 explicit OptimizingCompilerThread(Isolate *isolate) : |
49 Thread("OptimizingCompilerThread"), | 49 Thread("OptimizingCompilerThread"), |
50 #ifdef DEBUG | 50 #ifdef DEBUG |
51 thread_id_(0), | 51 thread_id_(0), |
52 #endif | 52 #endif |
53 isolate_(isolate), | 53 isolate_(isolate), |
54 stop_semaphore_(0), | 54 stop_semaphore_(0), |
55 input_queue_semaphore_(0), | 55 input_queue_semaphore_(0), |
56 osr_cursor_(0), | 56 input_queue_capacity_(FLAG_concurrent_recompilation_queue_length), |
57 input_queue_length_(0), | |
58 input_queue_shift_(0), | |
59 osr_buffer_capacity_(FLAG_concurrent_recompilation_queue_length + 4), | |
60 osr_buffer_cursor_(0), | |
57 osr_hits_(0), | 61 osr_hits_(0), |
58 osr_attempts_(0) { | 62 osr_attempts_(0) { |
59 NoBarrier_Store(&stop_thread_, static_cast<AtomicWord>(CONTINUE)); | 63 NoBarrier_Store(&stop_thread_, static_cast<AtomicWord>(CONTINUE)); |
60 NoBarrier_Store(&queue_length_, static_cast<AtomicWord>(0)); | 64 input_queue_ = NewArray<RecompileJob*>(input_queue_capacity_); |
61 if (FLAG_concurrent_osr) { | 65 osr_buffer_ = NewArray<RecompileJob*>(osr_buffer_capacity_); |
62 osr_buffer_size_ = FLAG_concurrent_recompilation_queue_length + 4; | 66 // Mark OSR buffer slots as empty. |
63 osr_buffer_ = NewArray<RecompileJob*>(osr_buffer_size_); | 67 for (int i = 0; i < osr_buffer_capacity_; i++) osr_buffer_[i] = NULL; |
64 for (int i = 0; i < osr_buffer_size_; i++) osr_buffer_[i] = NULL; | |
65 } | |
66 } | 68 } |
67 | 69 |
68 ~OptimizingCompilerThread() { | 70 ~OptimizingCompilerThread() { |
69 if (FLAG_concurrent_osr) DeleteArray(osr_buffer_); | 71 ASSERT_EQ(0, input_queue_length_); |
72 #ifdef DEBUG | |
73 for (int i = 0; i < osr_buffer_capacity_; i++) { | |
74 CHECK_EQ(NULL, osr_buffer_[i]); | |
75 } | |
76 #endif | |
77 DeleteArray(osr_buffer_); | |
70 } | 78 } |
71 | 79 |
72 void Run(); | 80 void Run(); |
73 void Stop(); | 81 void Stop(); |
74 void Flush(); | 82 void Flush(); |
75 void QueueForOptimization(RecompileJob* optimizing_compiler); | 83 void QueueForOptimization(RecompileJob* optimizing_compiler); |
76 void InstallOptimizedFunctions(); | 84 void InstallOptimizedFunctions(); |
77 RecompileJob* FindReadyOSRCandidate(Handle<JSFunction> function, | 85 RecompileJob* FindReadyOSRCandidate(Handle<JSFunction> function, |
78 uint32_t osr_pc_offset); | 86 uint32_t osr_pc_offset); |
79 bool IsQueuedForOSR(Handle<JSFunction> function, uint32_t osr_pc_offset); | 87 bool IsQueuedForOSR(Handle<JSFunction> function, uint32_t osr_pc_offset); |
80 | 88 |
81 bool IsQueuedForOSR(JSFunction* function); | 89 bool IsQueuedForOSR(JSFunction* function); |
82 | 90 |
83 inline bool IsQueueAvailable() { | 91 inline bool IsQueueAvailable() { |
84 // We don't need a barrier since we have a data dependency right | 92 LockGuard<Mutex> access_input_queue(&input_queue_mutex_); |
85 // after. | 93 return input_queue_length_ < input_queue_capacity_; |
86 Atomic32 current_length = NoBarrier_Load(&queue_length_); | 94 } |
87 | 95 |
88 // This can be queried only from the execution thread. | 96 inline void AgeBufferedOsrJobs() { |
89 ASSERT(!IsOptimizerThread()); | 97 // Advance cursor of the cyclic buffer to next empty slot or stale OSR job. |
90 // Since only the execution thread increments queue_length_ and | 98 // Dispose said OSR job in the latter case. Calling this on every GC |
91 // only one thread can run inside an Isolate at one time, a direct | 99 // should make sure that we do not hold onto stale jobs indefinitely. |
92 // doesn't introduce a race -- queue_length_ may decreased in | 100 AddToOsrBuffer(NULL); |
93 // meantime, but not increased. | |
94 return (current_length < FLAG_concurrent_recompilation_queue_length); | |
95 } | 101 } |
96 | 102 |
97 #ifdef DEBUG | 103 #ifdef DEBUG |
98 bool IsOptimizerThread(); | 104 bool IsOptimizerThread(); |
99 #endif | 105 #endif |
100 | 106 |
101 private: | 107 private: |
102 enum StopFlag { CONTINUE, STOP, FLUSH }; | 108 enum StopFlag { CONTINUE, STOP, FLUSH }; |
103 | 109 |
104 void FlushInputQueue(bool restore_function_code); | 110 void FlushInputQueue(bool restore_function_code); |
105 void FlushOutputQueue(bool restore_function_code); | 111 void FlushOutputQueue(bool restore_function_code); |
106 void FlushOsrBuffer(bool restore_function_code); | 112 void FlushOsrBuffer(bool restore_function_code); |
107 void CompileNext(); | 113 void CompileNext(); |
114 RecompileJob* NextInput(); | |
108 | 115 |
109 // Add a recompilation task for OSR to the cyclic buffer, awaiting OSR entry. | 116 // Add a recompilation task for OSR to the cyclic buffer, awaiting OSR entry. |
110 // Tasks evicted from the cyclic buffer are discarded. | 117 // Tasks evicted from the cyclic buffer are discarded. |
111 void AddToOsrBuffer(RecompileJob* compiler); | 118 void AddToOsrBuffer(RecompileJob* compiler); |
112 void AdvanceOsrCursor() { | 119 |
113 osr_cursor_ = (osr_cursor_ + 1) % osr_buffer_size_; | 120 int InputQueueIndex(int i) { |
titzer
2013/10/16 14:02:10
inline annotation?
| |
121 int result = (i + input_queue_shift_) % input_queue_capacity_; | |
122 ASSERT_LE(0, result); | |
123 ASSERT_LT(result, input_queue_capacity_); | |
124 return result; | |
114 } | 125 } |
115 | 126 |
116 #ifdef DEBUG | 127 #ifdef DEBUG |
117 int thread_id_; | 128 int thread_id_; |
118 Mutex thread_id_mutex_; | 129 Mutex thread_id_mutex_; |
119 #endif | 130 #endif |
120 | 131 |
121 Isolate* isolate_; | 132 Isolate* isolate_; |
122 Semaphore stop_semaphore_; | 133 Semaphore stop_semaphore_; |
123 Semaphore input_queue_semaphore_; | 134 Semaphore input_queue_semaphore_; |
124 | 135 |
125 // Queue of incoming recompilation tasks (including OSR). | 136 // Circular queue of incoming recompilation tasks (including OSR). |
126 UnboundQueue<RecompileJob*> input_queue_; | 137 RecompileJob** input_queue_; |
138 int input_queue_capacity_; | |
139 int input_queue_length_; | |
140 int input_queue_shift_; | |
141 Mutex input_queue_mutex_; | |
142 | |
127 // Queue of recompilation tasks ready to be installed (excluding OSR). | 143 // Queue of recompilation tasks ready to be installed (excluding OSR). |
128 UnboundQueue<RecompileJob*> output_queue_; | 144 UnboundQueue<RecompileJob*> output_queue_; |
145 | |
129 // Cyclic buffer of recompilation tasks for OSR. | 146 // Cyclic buffer of recompilation tasks for OSR. |
130 // TODO(yangguo): This may keep zombie tasks indefinitely, holding on to | |
131 // a lot of memory. Fix this. | |
132 RecompileJob** osr_buffer_; | 147 RecompileJob** osr_buffer_; |
133 // Cursor for the cyclic buffer. | 148 int osr_buffer_capacity_; |
134 int osr_cursor_; | 149 int osr_buffer_cursor_; |
135 int osr_buffer_size_; | |
136 | 150 |
137 volatile AtomicWord stop_thread_; | 151 volatile AtomicWord stop_thread_; |
138 volatile Atomic32 queue_length_; | |
139 TimeDelta time_spent_compiling_; | 152 TimeDelta time_spent_compiling_; |
140 TimeDelta time_spent_total_; | 153 TimeDelta time_spent_total_; |
141 | 154 |
142 int osr_hits_; | 155 int osr_hits_; |
143 int osr_attempts_; | 156 int osr_attempts_; |
144 }; | 157 }; |
145 | 158 |
146 } } // namespace v8::internal | 159 } } // namespace v8::internal |
147 | 160 |
148 #endif // V8_OPTIMIZING_COMPILER_THREAD_H_ | 161 #endif // V8_OPTIMIZING_COMPILER_THREAD_H_ |
OLD | NEW |