| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 358 __ VerifyX87StackDepth(x87_stack_depth_); | 358 __ VerifyX87StackDepth(x87_stack_depth_); |
| 359 } | 359 } |
| 360 } | 360 } |
| 361 } | 361 } |
| 362 EnsureSpaceForLazyDeopt(); | 362 EnsureSpaceForLazyDeopt(); |
| 363 return !is_aborted(); | 363 return !is_aborted(); |
| 364 } | 364 } |
| 365 | 365 |
| 366 | 366 |
| 367 bool LCodeGen::GenerateJumpTable() { | 367 bool LCodeGen::GenerateJumpTable() { |
| 368 Label needs_frame_not_call; | 368 Label needs_frame; |
| 369 Label needs_frame_is_call; | |
| 370 if (jump_table_.length() > 0) { | 369 if (jump_table_.length() > 0) { |
| 371 Comment(";;; -------------------- Jump table --------------------"); | 370 Comment(";;; -------------------- Jump table --------------------"); |
| 372 } | 371 } |
| 373 for (int i = 0; i < jump_table_.length(); i++) { | 372 for (int i = 0; i < jump_table_.length(); i++) { |
| 374 __ bind(&jump_table_[i].label); | 373 __ bind(&jump_table_[i].label); |
| 375 Address entry = jump_table_[i].address; | 374 Address entry = jump_table_[i].address; |
| 376 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; | 375 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; |
| 377 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 376 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
| 378 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 377 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
| 379 Comment(";;; jump table entry %d.", i); | 378 Comment(";;; jump table entry %d.", i); |
| 380 } else { | 379 } else { |
| 381 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 380 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 382 } | 381 } |
| 383 if (jump_table_[i].needs_frame) { | 382 if (jump_table_[i].needs_frame) { |
| 384 __ push(Immediate(ExternalReference::ForDeoptEntry(entry))); | 383 __ push(Immediate(ExternalReference::ForDeoptEntry(entry))); |
| 385 if (type == Deoptimizer::LAZY) { | 384 if (needs_frame.is_bound()) { |
| 386 if (needs_frame_is_call.is_bound()) { | 385 __ jmp(&needs_frame); |
| 387 __ jmp(&needs_frame_is_call); | |
| 388 } else { | |
| 389 __ bind(&needs_frame_is_call); | |
| 390 __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset)); | |
| 391 // This variant of deopt can only be used with stubs. Since we don't | |
| 392 // have a function pointer to install in the stack frame that we're | |
| 393 // building, install a special marker there instead. | |
| 394 ASSERT(info()->IsStub()); | |
| 395 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); | |
| 396 // Push a PC inside the function so that the deopt code can find where | |
| 397 // the deopt comes from. It doesn't have to be the precise return | |
| 398 // address of a "calling" LAZY deopt, it only has to be somewhere | |
| 399 // inside the code body. | |
| 400 Label push_approx_pc; | |
| 401 __ call(&push_approx_pc); | |
| 402 __ bind(&push_approx_pc); | |
| 403 // Push the continuation which was stashed were the ebp should | |
| 404 // be. Replace it with the saved ebp. | |
| 405 __ push(MemOperand(esp, 3 * kPointerSize)); | |
| 406 __ mov(MemOperand(esp, 4 * kPointerSize), ebp); | |
| 407 __ lea(ebp, MemOperand(esp, 4 * kPointerSize)); | |
| 408 __ ret(0); // Call the continuation without clobbering registers. | |
| 409 } | |
| 410 } else { | 386 } else { |
| 411 if (needs_frame_not_call.is_bound()) { | 387 __ bind(&needs_frame); |
| 412 __ jmp(&needs_frame_not_call); | 388 __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset)); |
| 413 } else { | 389 // This variant of deopt can only be used with stubs. Since we don't |
| 414 __ bind(&needs_frame_not_call); | 390 // have a function pointer to install in the stack frame that we're |
| 415 __ push(MemOperand(ebp, StandardFrameConstants::kContextOffset)); | 391 // building, install a special marker there instead. |
| 416 // This variant of deopt can only be used with stubs. Since we don't | 392 ASSERT(info()->IsStub()); |
| 417 // have a function pointer to install in the stack frame that we're | 393 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); |
| 418 // building, install a special marker there instead. | 394 // Push a PC inside the function so that the deopt code can find where |
| 419 ASSERT(info()->IsStub()); | 395 // the deopt comes from. It doesn't have to be the precise return |
| 420 __ push(Immediate(Smi::FromInt(StackFrame::STUB))); | 396 // address of a "calling" LAZY deopt, it only has to be somewhere |
| 421 // Push the continuation which was stashed were the ebp should | 397 // inside the code body. |
| 422 // be. Replace it with the saved ebp. | 398 Label push_approx_pc; |
| 423 __ push(MemOperand(esp, 2 * kPointerSize)); | 399 __ call(&push_approx_pc); |
| 424 __ mov(MemOperand(esp, 3 * kPointerSize), ebp); | 400 __ bind(&push_approx_pc); |
| 425 __ lea(ebp, MemOperand(esp, 3 * kPointerSize)); | 401 // Push the continuation which was stashed were the ebp should |
| 426 __ ret(0); // Call the continuation without clobbering registers. | 402 // be. Replace it with the saved ebp. |
| 427 } | 403 __ push(MemOperand(esp, 3 * kPointerSize)); |
| 404 __ mov(MemOperand(esp, 4 * kPointerSize), ebp); |
| 405 __ lea(ebp, MemOperand(esp, 4 * kPointerSize)); |
| 406 __ ret(0); // Call the continuation without clobbering registers. |
| 428 } | 407 } |
| 429 } else { | 408 } else { |
| 430 if (type == Deoptimizer::LAZY) { | 409 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
| 431 __ call(entry, RelocInfo::RUNTIME_ENTRY); | |
| 432 } else { | |
| 433 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); | |
| 434 } | |
| 435 } | 410 } |
| 436 } | 411 } |
| 437 return !is_aborted(); | 412 return !is_aborted(); |
| 438 } | 413 } |
| 439 | 414 |
| 440 | 415 |
| 441 bool LCodeGen::GenerateDeferredCode() { | 416 bool LCodeGen::GenerateDeferredCode() { |
| 442 ASSERT(is_generating()); | 417 ASSERT(is_generating()); |
| 443 if (deferred_.length() > 0) { | 418 if (deferred_.length() > 0) { |
| 444 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 419 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
| (...skipping 433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 878 Label done; | 853 Label done; |
| 879 if (cc != no_condition) { | 854 if (cc != no_condition) { |
| 880 __ j(NegateCondition(cc), &done, Label::kNear); | 855 __ j(NegateCondition(cc), &done, Label::kNear); |
| 881 } | 856 } |
| 882 __ int3(); | 857 __ int3(); |
| 883 __ bind(&done); | 858 __ bind(&done); |
| 884 } | 859 } |
| 885 | 860 |
| 886 ASSERT(info()->IsStub() || frame_is_built_); | 861 ASSERT(info()->IsStub() || frame_is_built_); |
| 887 if (cc == no_condition && frame_is_built_) { | 862 if (cc == no_condition && frame_is_built_) { |
| 888 if (bailout_type == Deoptimizer::LAZY) { | 863 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
| 889 __ call(entry, RelocInfo::RUNTIME_ENTRY); | |
| 890 } else { | |
| 891 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); | |
| 892 } | |
| 893 } else { | 864 } else { |
| 894 // We often have several deopts to the same entry, reuse the last | 865 // We often have several deopts to the same entry, reuse the last |
| 895 // jump entry if this is the case. | 866 // jump entry if this is the case. |
| 896 if (jump_table_.is_empty() || | 867 if (jump_table_.is_empty() || |
| 897 jump_table_.last().address != entry || | 868 jump_table_.last().address != entry || |
| 898 jump_table_.last().needs_frame != !frame_is_built_ || | 869 jump_table_.last().needs_frame != !frame_is_built_ || |
| 899 jump_table_.last().bailout_type != bailout_type) { | 870 jump_table_.last().bailout_type != bailout_type) { |
| 900 Deoptimizer::JumpTableEntry table_entry(entry, | 871 Deoptimizer::JumpTableEntry table_entry(entry, |
| 901 bailout_type, | 872 bailout_type, |
| 902 !frame_is_built_); | 873 !frame_is_built_); |
| (...skipping 5638 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6541 FixedArray::kHeaderSize - kPointerSize)); | 6512 FixedArray::kHeaderSize - kPointerSize)); |
| 6542 __ bind(&done); | 6513 __ bind(&done); |
| 6543 } | 6514 } |
| 6544 | 6515 |
| 6545 | 6516 |
| 6546 #undef __ | 6517 #undef __ |
| 6547 | 6518 |
| 6548 } } // namespace v8::internal | 6519 } } // namespace v8::internal |
| 6549 | 6520 |
| 6550 #endif // V8_TARGET_ARCH_IA32 | 6521 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |