| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 274 return !is_aborted(); | 274 return !is_aborted(); |
| 275 } | 275 } |
| 276 | 276 |
| 277 | 277 |
| 278 bool LCodeGen::GenerateJumpTable() { | 278 bool LCodeGen::GenerateJumpTable() { |
| 279 Label needs_frame_not_call; | 279 Label needs_frame_not_call; |
| 280 Label needs_frame_is_call; | 280 Label needs_frame_is_call; |
| 281 for (int i = 0; i < jump_table_.length(); i++) { | 281 for (int i = 0; i < jump_table_.length(); i++) { |
| 282 __ bind(&jump_table_[i].label); | 282 __ bind(&jump_table_[i].label); |
| 283 Address entry = jump_table_[i].address; | 283 Address entry = jump_table_[i].address; |
| 284 bool is_lazy_deopt = jump_table_[i].is_lazy_deopt; |
| 285 Deoptimizer::BailoutType type = |
| 286 is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; |
| 287 int id = Deoptimizer::GetDeoptimizationId(entry, type); |
| 288 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
| 289 Comment(";;; jump table entry %d.", i); |
| 290 } else { |
| 291 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 292 } |
| 284 if (jump_table_[i].needs_frame) { | 293 if (jump_table_[i].needs_frame) { |
| 285 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); | 294 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); |
| 286 if (jump_table_[i].is_lazy_deopt) { | 295 if (is_lazy_deopt) { |
| 287 if (needs_frame_is_call.is_bound()) { | 296 if (needs_frame_is_call.is_bound()) { |
| 288 __ jmp(&needs_frame_is_call); | 297 __ jmp(&needs_frame_is_call); |
| 289 } else { | 298 } else { |
| 290 __ bind(&needs_frame_is_call); | 299 __ bind(&needs_frame_is_call); |
| 291 __ push(rbp); | 300 __ push(rbp); |
| 292 __ movq(rbp, rsp); | 301 __ movq(rbp, rsp); |
| 293 __ push(rsi); | 302 __ push(rsi); |
| 294 // This variant of deopt can only be used with stubs. Since we don't | 303 // This variant of deopt can only be used with stubs. Since we don't |
| 295 // have a function pointer to install in the stack frame that we're | 304 // have a function pointer to install in the stack frame that we're |
| 296 // building, install a special marker there instead. | 305 // building, install a special marker there instead. |
| (...skipping 15 matching lines...) Expand all Loading... |
| 312 // have a function pointer to install in the stack frame that we're | 321 // have a function pointer to install in the stack frame that we're |
| 313 // building, install a special marker there instead. | 322 // building, install a special marker there instead. |
| 314 ASSERT(info()->IsStub()); | 323 ASSERT(info()->IsStub()); |
| 315 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); | 324 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); |
| 316 __ push(rsi); | 325 __ push(rsi); |
| 317 __ movq(rsi, MemOperand(rsp, kPointerSize)); | 326 __ movq(rsi, MemOperand(rsp, kPointerSize)); |
| 318 __ jmp(kScratchRegister); | 327 __ jmp(kScratchRegister); |
| 319 } | 328 } |
| 320 } | 329 } |
| 321 } else { | 330 } else { |
| 322 if (jump_table_[i].is_lazy_deopt) { | 331 if (is_lazy_deopt) { |
| 323 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 332 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
| 324 } else { | 333 } else { |
| 325 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); | 334 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); |
| 326 } | 335 } |
| 327 } | 336 } |
| 328 } | 337 } |
| 329 return !is_aborted(); | 338 return !is_aborted(); |
| 330 } | 339 } |
| 331 | 340 |
| 332 | 341 |
| (...skipping 5234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5567 FixedArray::kHeaderSize - kPointerSize)); | 5576 FixedArray::kHeaderSize - kPointerSize)); |
| 5568 __ bind(&done); | 5577 __ bind(&done); |
| 5569 } | 5578 } |
| 5570 | 5579 |
| 5571 | 5580 |
| 5572 #undef __ | 5581 #undef __ |
| 5573 | 5582 |
| 5574 } } // namespace v8::internal | 5583 } } // namespace v8::internal |
| 5575 | 5584 |
| 5576 #endif // V8_TARGET_ARCH_X64 | 5585 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |