| OLD | NEW |
| (Empty) | |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are |
| 4 // met: |
| 5 // |
| 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. |
| 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 |
| 28 #ifndef V8_DEOPTIMIZER_H_ |
| 29 #define V8_DEOPTIMIZER_H_ |
| 30 |
| 31 #include "v8.h" |
| 32 |
| 33 #include "macro-assembler.h" |
| 34 #include "zone-inl.h" |
| 35 |
| 36 |
| 37 namespace v8 { |
| 38 namespace internal { |
| 39 |
| 40 class FrameDescription; |
| 41 class TranslationIterator; |
| 42 class DeoptimizingCodeListNode; |
| 43 |
| 44 |
| 45 class ValueDescription BASE_EMBEDDED { |
| 46 public: |
| 47 explicit ValueDescription(int index) : stack_index_(index) { } |
| 48 int stack_index() const { return stack_index_; } |
| 49 |
| 50 private: |
| 51 // Offset relative to the top of the stack. |
| 52 int stack_index_; |
| 53 }; |
| 54 |
| 55 |
| 56 class ValueDescriptionInteger32: public ValueDescription { |
| 57 public: |
| 58 ValueDescriptionInteger32(int index, int32_t value) |
| 59 : ValueDescription(index), int32_value_(value) { } |
| 60 int32_t int32_value() const { return int32_value_; } |
| 61 |
| 62 private: |
| 63 // Raw value. |
| 64 int32_t int32_value_; |
| 65 }; |
| 66 |
| 67 |
| 68 class ValueDescriptionDouble: public ValueDescription { |
| 69 public: |
| 70 ValueDescriptionDouble(int index, double value) |
| 71 : ValueDescription(index), double_value_(value) { } |
| 72 double double_value() const { return double_value_; } |
| 73 |
| 74 private: |
| 75 // Raw value. |
| 76 double double_value_; |
| 77 }; |
| 78 |
| 79 |
| 80 class OptimizedFunctionVisitor BASE_EMBEDDED { |
| 81 public: |
| 82 virtual ~OptimizedFunctionVisitor() {} |
| 83 |
| 84 // Function which is called before iteration of any optimized functions |
| 85 // from given global context. |
| 86 virtual void EnterContext(Context* context) = 0; |
| 87 |
| 88 virtual void VisitFunction(JSFunction* function) = 0; |
| 89 |
| 90 // Function which is called after iteration of all optimized functions |
| 91 // from given global context. |
| 92 virtual void LeaveContext(Context* context) = 0; |
| 93 }; |
| 94 |
| 95 |
| 96 class Deoptimizer; |
| 97 |
| 98 |
| 99 class DeoptimizerData { |
| 100 public: |
| 101 DeoptimizerData(); |
| 102 ~DeoptimizerData(); |
| 103 |
| 104 private: |
| 105 LargeObjectChunk* eager_deoptimization_entry_code_; |
| 106 LargeObjectChunk* lazy_deoptimization_entry_code_; |
| 107 Deoptimizer* current_; |
| 108 |
| 109 // List of deoptimized code which still have references from active stack |
| 110 // frames. These code objects are needed by the deoptimizer when deoptimizing |
| 111 // a frame for which the code object for the function function has been |
| 112 // changed from the code present when deoptimizing was done. |
| 113 DeoptimizingCodeListNode* deoptimizing_code_list_; |
| 114 |
| 115 friend class Deoptimizer; |
| 116 |
| 117 DISALLOW_COPY_AND_ASSIGN(DeoptimizerData); |
| 118 }; |
| 119 |
| 120 |
| 121 class Deoptimizer : public Malloced { |
| 122 public: |
| 123 enum BailoutType { |
| 124 EAGER, |
| 125 LAZY, |
| 126 OSR |
| 127 }; |
| 128 |
| 129 int output_count() const { return output_count_; } |
| 130 |
| 131 static Deoptimizer* New(JSFunction* function, |
| 132 BailoutType type, |
| 133 unsigned bailout_id, |
| 134 Address from, |
| 135 int fp_to_sp_delta, |
| 136 Isolate* isolate); |
| 137 static Deoptimizer* Grab(Isolate* isolate); |
| 138 |
| 139 // Deoptimize the function now. Its current optimized code will never be run |
| 140 // again and any activations of the optimized code will get deoptimized when |
| 141 // execution returns. |
| 142 static void DeoptimizeFunction(JSFunction* function); |
| 143 |
| 144 // Deoptimize all functions in the heap. |
| 145 static void DeoptimizeAll(); |
| 146 |
| 147 static void DeoptimizeGlobalObject(JSObject* object); |
| 148 |
| 149 static void VisitAllOptimizedFunctionsForContext( |
| 150 Context* context, OptimizedFunctionVisitor* visitor); |
| 151 |
| 152 static void VisitAllOptimizedFunctionsForGlobalObject( |
| 153 JSObject* object, OptimizedFunctionVisitor* visitor); |
| 154 |
| 155 static void VisitAllOptimizedFunctions(OptimizedFunctionVisitor* visitor); |
| 156 |
| 157 // Given the relocation info of a call to the stack check stub, patch the |
| 158 // code so as to go unconditionally to the on-stack replacement builtin |
| 159 // instead. |
| 160 static void PatchStackCheckCode(RelocInfo* rinfo, Code* replacement_code); |
| 161 |
| 162 // Given the relocation info of a call to the on-stack replacement |
| 163 // builtin, patch the code back to the original stack check code. |
| 164 static void RevertStackCheckCode(RelocInfo* rinfo, Code* check_code); |
| 165 |
| 166 ~Deoptimizer(); |
| 167 |
| 168 void InsertHeapNumberValues(int index, JavaScriptFrame* frame); |
| 169 |
| 170 static void ComputeOutputFrames(Deoptimizer* deoptimizer, Isolate* isolate); |
| 171 |
| 172 static Address GetDeoptimizationEntry(int id, BailoutType type); |
| 173 static int GetDeoptimizationId(Address addr, BailoutType type); |
| 174 static unsigned GetOutputInfo(DeoptimizationOutputData* data, |
| 175 unsigned node_id, |
| 176 SharedFunctionInfo* shared); |
| 177 |
| 178 // Code generation support. |
| 179 static int input_offset() { return OFFSET_OF(Deoptimizer, input_); } |
| 180 static int output_count_offset() { |
| 181 return OFFSET_OF(Deoptimizer, output_count_); |
| 182 } |
| 183 static int output_offset() { return OFFSET_OF(Deoptimizer, output_); } |
| 184 |
| 185 static int GetDeoptimizedCodeCount(Isolate* isolate); |
| 186 |
| 187 static const int kNotDeoptimizationEntry = -1; |
| 188 |
| 189 // Generators for the deoptimization entry code. |
| 190 class EntryGenerator BASE_EMBEDDED { |
| 191 public: |
| 192 EntryGenerator(MacroAssembler* masm, BailoutType type) |
| 193 : masm_(masm), type_(type) { } |
| 194 virtual ~EntryGenerator() { } |
| 195 |
| 196 void Generate(); |
| 197 |
| 198 protected: |
| 199 MacroAssembler* masm() const { return masm_; } |
| 200 BailoutType type() const { return type_; } |
| 201 |
| 202 virtual void GeneratePrologue() { } |
| 203 |
| 204 private: |
| 205 MacroAssembler* masm_; |
| 206 Deoptimizer::BailoutType type_; |
| 207 }; |
| 208 |
| 209 class TableEntryGenerator : public EntryGenerator { |
| 210 public: |
| 211 TableEntryGenerator(MacroAssembler* masm, BailoutType type, int count) |
| 212 : EntryGenerator(masm, type), count_(count) { } |
| 213 |
| 214 protected: |
| 215 virtual void GeneratePrologue(); |
| 216 |
| 217 private: |
| 218 int count() const { return count_; } |
| 219 |
| 220 int count_; |
| 221 }; |
| 222 |
| 223 private: |
| 224 static const int kNumberOfEntries = 4096; |
| 225 |
| 226 Deoptimizer(Isolate* isolate, |
| 227 JSFunction* function, |
| 228 BailoutType type, |
| 229 unsigned bailout_id, |
| 230 Address from, |
| 231 int fp_to_sp_delta); |
| 232 void DeleteFrameDescriptions(); |
| 233 |
| 234 void DoComputeOutputFrames(); |
| 235 void DoComputeOsrOutputFrame(); |
| 236 void DoComputeFrame(TranslationIterator* iterator, int frame_index); |
| 237 void DoTranslateCommand(TranslationIterator* iterator, |
| 238 int frame_index, |
| 239 unsigned output_offset); |
| 240 // Translate a command for OSR. Updates the input offset to be used for |
| 241 // the next command. Returns false if translation of the command failed |
| 242 // (e.g., a number conversion failed) and may or may not have updated the |
| 243 // input offset. |
| 244 bool DoOsrTranslateCommand(TranslationIterator* iterator, |
| 245 int* input_offset); |
| 246 |
| 247 unsigned ComputeInputFrameSize() const; |
| 248 unsigned ComputeFixedSize(JSFunction* function) const; |
| 249 |
| 250 unsigned ComputeIncomingArgumentSize(JSFunction* function) const; |
| 251 unsigned ComputeOutgoingArgumentSize() const; |
| 252 |
| 253 Object* ComputeLiteral(int index) const; |
| 254 |
| 255 void InsertHeapNumberValue(JavaScriptFrame* frame, |
| 256 int stack_index, |
| 257 double val, |
| 258 int extra_slot_count); |
| 259 |
| 260 void AddInteger32Value(int frame_index, int slot_index, int32_t value); |
| 261 void AddDoubleValue(int frame_index, int slot_index, double value); |
| 262 |
| 263 static LargeObjectChunk* CreateCode(BailoutType type); |
| 264 static void GenerateDeoptimizationEntries( |
| 265 MacroAssembler* masm, int count, BailoutType type); |
| 266 |
| 267 // Weak handle callback for deoptimizing code objects. |
| 268 static void HandleWeakDeoptimizedCode( |
| 269 v8::Persistent<v8::Value> obj, void* data); |
| 270 static Code* FindDeoptimizingCodeFromAddress(Address addr); |
| 271 static void RemoveDeoptimizingCode(Code* code); |
| 272 |
| 273 Isolate* isolate_; |
| 274 JSFunction* function_; |
| 275 Code* optimized_code_; |
| 276 unsigned bailout_id_; |
| 277 BailoutType bailout_type_; |
| 278 Address from_; |
| 279 int fp_to_sp_delta_; |
| 280 |
| 281 // Input frame description. |
| 282 FrameDescription* input_; |
| 283 // Number of output frames. |
| 284 int output_count_; |
| 285 // Array of output frame descriptions. |
| 286 FrameDescription** output_; |
| 287 |
| 288 List<ValueDescriptionInteger32>* integer32_values_; |
| 289 List<ValueDescriptionDouble>* double_values_; |
| 290 |
| 291 static int table_entry_size_; |
| 292 |
| 293 friend class FrameDescription; |
| 294 friend class DeoptimizingCodeListNode; |
| 295 }; |
| 296 |
| 297 |
| 298 class FrameDescription { |
| 299 public: |
| 300 FrameDescription(uint32_t frame_size, |
| 301 JSFunction* function); |
| 302 |
| 303 void* operator new(size_t size, uint32_t frame_size) { |
| 304 return malloc(size + frame_size); |
| 305 } |
| 306 |
| 307 void operator delete(void* description) { |
| 308 free(description); |
| 309 } |
| 310 |
| 311 uint32_t GetFrameSize() const { return frame_size_; } |
| 312 |
| 313 JSFunction* GetFunction() const { return function_; } |
| 314 |
| 315 unsigned GetOffsetFromSlotIndex(Deoptimizer* deoptimizer, int slot_index); |
| 316 |
| 317 uint32_t GetFrameSlot(unsigned offset) { |
| 318 return *GetFrameSlotPointer(offset); |
| 319 } |
| 320 |
| 321 double GetDoubleFrameSlot(unsigned offset) { |
| 322 return *reinterpret_cast<double*>(GetFrameSlotPointer(offset)); |
| 323 } |
| 324 |
| 325 void SetFrameSlot(unsigned offset, uint32_t value) { |
| 326 *GetFrameSlotPointer(offset) = value; |
| 327 } |
| 328 |
| 329 uint32_t GetRegister(unsigned n) const { |
| 330 ASSERT(n < ARRAY_SIZE(registers_)); |
| 331 return registers_[n]; |
| 332 } |
| 333 |
| 334 double GetDoubleRegister(unsigned n) const { |
| 335 ASSERT(n < ARRAY_SIZE(double_registers_)); |
| 336 return double_registers_[n]; |
| 337 } |
| 338 |
| 339 void SetRegister(unsigned n, uint32_t value) { |
| 340 ASSERT(n < ARRAY_SIZE(registers_)); |
| 341 registers_[n] = value; |
| 342 } |
| 343 |
| 344 void SetDoubleRegister(unsigned n, double value) { |
| 345 ASSERT(n < ARRAY_SIZE(double_registers_)); |
| 346 double_registers_[n] = value; |
| 347 } |
| 348 |
| 349 uint32_t GetTop() const { return top_; } |
| 350 void SetTop(uint32_t top) { top_ = top; } |
| 351 |
| 352 uint32_t GetPc() const { return pc_; } |
| 353 void SetPc(uint32_t pc) { pc_ = pc; } |
| 354 |
| 355 uint32_t GetFp() const { return fp_; } |
| 356 void SetFp(uint32_t fp) { fp_ = fp; } |
| 357 |
| 358 Smi* GetState() const { return state_; } |
| 359 void SetState(Smi* state) { state_ = state; } |
| 360 |
| 361 void SetContinuation(uint32_t pc) { continuation_ = pc; } |
| 362 |
| 363 static int registers_offset() { |
| 364 return OFFSET_OF(FrameDescription, registers_); |
| 365 } |
| 366 |
| 367 static int double_registers_offset() { |
| 368 return OFFSET_OF(FrameDescription, double_registers_); |
| 369 } |
| 370 |
| 371 static int frame_size_offset() { |
| 372 return OFFSET_OF(FrameDescription, frame_size_); |
| 373 } |
| 374 |
| 375 static int pc_offset() { |
| 376 return OFFSET_OF(FrameDescription, pc_); |
| 377 } |
| 378 |
| 379 static int state_offset() { |
| 380 return OFFSET_OF(FrameDescription, state_); |
| 381 } |
| 382 |
| 383 static int continuation_offset() { |
| 384 return OFFSET_OF(FrameDescription, continuation_); |
| 385 } |
| 386 |
| 387 static int frame_content_offset() { |
| 388 return sizeof(FrameDescription); |
| 389 } |
| 390 |
| 391 private: |
| 392 static const uint32_t kZapUint32 = 0xbeeddead; |
| 393 |
| 394 uint32_t frame_size_; // Number of bytes. |
| 395 JSFunction* function_; |
| 396 uint32_t registers_[Register::kNumRegisters]; |
| 397 double double_registers_[DoubleRegister::kNumAllocatableRegisters]; |
| 398 uint32_t top_; |
| 399 uint32_t pc_; |
| 400 uint32_t fp_; |
| 401 Smi* state_; |
| 402 |
| 403 // Continuation is the PC where the execution continues after |
| 404 // deoptimizing. |
| 405 uint32_t continuation_; |
| 406 |
| 407 uint32_t* GetFrameSlotPointer(unsigned offset) { |
| 408 ASSERT(offset < frame_size_); |
| 409 return reinterpret_cast<uint32_t*>( |
| 410 reinterpret_cast<Address>(this) + frame_content_offset() + offset); |
| 411 } |
| 412 }; |
| 413 |
| 414 |
| 415 class TranslationBuffer BASE_EMBEDDED { |
| 416 public: |
| 417 TranslationBuffer() : contents_(256) { } |
| 418 |
| 419 int CurrentIndex() const { return contents_.length(); } |
| 420 void Add(int32_t value); |
| 421 |
| 422 Handle<ByteArray> CreateByteArray(); |
| 423 |
| 424 private: |
| 425 ZoneList<uint8_t> contents_; |
| 426 }; |
| 427 |
| 428 |
| 429 class TranslationIterator BASE_EMBEDDED { |
| 430 public: |
| 431 TranslationIterator(ByteArray* buffer, int index) |
| 432 : buffer_(buffer), index_(index) { |
| 433 ASSERT(index >= 0 && index < buffer->length()); |
| 434 } |
| 435 |
| 436 int32_t Next(); |
| 437 |
| 438 bool HasNext() const { return index_ >= 0; } |
| 439 |
| 440 void Done() { index_ = -1; } |
| 441 |
| 442 void Skip(int n) { |
| 443 for (int i = 0; i < n; i++) Next(); |
| 444 } |
| 445 |
| 446 private: |
| 447 ByteArray* buffer_; |
| 448 int index_; |
| 449 }; |
| 450 |
| 451 |
| 452 class Translation BASE_EMBEDDED { |
| 453 public: |
| 454 enum Opcode { |
| 455 BEGIN, |
| 456 FRAME, |
| 457 REGISTER, |
| 458 INT32_REGISTER, |
| 459 DOUBLE_REGISTER, |
| 460 STACK_SLOT, |
| 461 INT32_STACK_SLOT, |
| 462 DOUBLE_STACK_SLOT, |
| 463 LITERAL, |
| 464 ARGUMENTS_OBJECT, |
| 465 |
| 466 // A prefix indicating that the next command is a duplicate of the one |
| 467 // that follows it. |
| 468 DUPLICATE |
| 469 }; |
| 470 |
| 471 Translation(TranslationBuffer* buffer, int frame_count) |
| 472 : buffer_(buffer), |
| 473 index_(buffer->CurrentIndex()) { |
| 474 buffer_->Add(BEGIN); |
| 475 buffer_->Add(frame_count); |
| 476 } |
| 477 |
| 478 int index() const { return index_; } |
| 479 |
| 480 // Commands. |
| 481 void BeginFrame(int node_id, int literal_id, unsigned height); |
| 482 void StoreRegister(Register reg); |
| 483 void StoreInt32Register(Register reg); |
| 484 void StoreDoubleRegister(DoubleRegister reg); |
| 485 void StoreStackSlot(int index); |
| 486 void StoreInt32StackSlot(int index); |
| 487 void StoreDoubleStackSlot(int index); |
| 488 void StoreLiteral(int literal_id); |
| 489 void StoreArgumentsObject(); |
| 490 void MarkDuplicate(); |
| 491 |
| 492 static int NumberOfOperandsFor(Opcode opcode); |
| 493 |
| 494 #ifdef DEBUG |
| 495 static const char* StringFor(Opcode opcode); |
| 496 #endif |
| 497 |
| 498 private: |
| 499 TranslationBuffer* buffer_; |
| 500 int index_; |
| 501 }; |
| 502 |
| 503 |
| 504 // Linked list holding deoptimizing code objects. The deoptimizing code objects |
| 505 // are kept as weak handles until they are no longer activated on the stack. |
| 506 class DeoptimizingCodeListNode : public Malloced { |
| 507 public: |
| 508 explicit DeoptimizingCodeListNode(Code* code); |
| 509 ~DeoptimizingCodeListNode(); |
| 510 |
| 511 DeoptimizingCodeListNode* next() const { return next_; } |
| 512 void set_next(DeoptimizingCodeListNode* next) { next_ = next; } |
| 513 Handle<Code> code() const { return code_; } |
| 514 |
| 515 private: |
| 516 // Global (weak) handle to the deoptimizing code object. |
| 517 Handle<Code> code_; |
| 518 |
| 519 // Next pointer for linked list. |
| 520 DeoptimizingCodeListNode* next_; |
| 521 }; |
| 522 |
| 523 |
| 524 } } // namespace v8::internal |
| 525 |
| 526 #endif // V8_DEOPTIMIZER_H_ |
| OLD | NEW |