OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 341 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
352 instr->hydrogen_value()->id(), | 352 instr->hydrogen_value()->id(), |
353 instr->Mnemonic()); | 353 instr->Mnemonic()); |
354 } | 354 } |
355 | 355 |
356 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr); | 356 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr); |
357 | 357 |
358 RecordAndUpdatePosition(instr->position()); | 358 RecordAndUpdatePosition(instr->position()); |
359 | 359 |
360 instr->CompileToNative(this); | 360 instr->CompileToNative(this); |
361 | 361 |
362 if (!CpuFeatures::IsSupported(SSE2) && | 362 if (!CpuFeatures::IsSupported(SSE2)) { |
363 FLAG_debug_code && FLAG_enable_slow_asserts) { | 363 if (instr->IsGoto()) { |
364 x87_stack_.LeavingBlock(current_block_, LGoto::cast(instr)); | |
365 } else if (FLAG_debug_code && FLAG_enable_slow_asserts && | |
366 !instr->IsGap() && !instr->IsReturn()) { | |
364 __ VerifyX87StackDepth(x87_stack_.depth()); | 367 __ VerifyX87StackDepth(x87_stack_.depth()); |
368 } | |
365 } | 369 } |
366 } | 370 } |
367 EnsureSpaceForLazyDeopt(); | 371 EnsureSpaceForLazyDeopt(); |
368 return !is_aborted(); | 372 return !is_aborted(); |
369 } | 373 } |
370 | 374 |
371 | 375 |
372 bool LCodeGen::GenerateJumpTable() { | 376 bool LCodeGen::GenerateJumpTable() { |
373 Label needs_frame; | 377 Label needs_frame; |
374 if (jump_table_.length() > 0) { | 378 if (jump_table_.length() > 0) { |
(...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
652 } | 656 } |
653 Free(reg); | 657 Free(reg); |
654 if (i < stack_depth_-1) i++; | 658 if (i < stack_depth_-1) i++; |
655 } | 659 } |
656 } | 660 } |
657 if (instr->IsReturn()) { | 661 if (instr->IsReturn()) { |
658 while (stack_depth_ > 0) { | 662 while (stack_depth_ > 0) { |
659 __ fstp(0); | 663 __ fstp(0); |
660 stack_depth_--; | 664 stack_depth_--; |
661 } | 665 } |
666 __ VerifyX87StackDepth(0); | |
662 } | 667 } |
663 } | 668 } |
664 | 669 |
670 | |
671 void LCodeGen::X87Stack::LeavingBlock(int current_block_id, LGoto* goto_instr) { | |
672 ASSERT(stack_depth_ <= 1); | |
Toon Verwaest
2013/09/11 12:28:30
Add comment as discussed. If ever used for other s
| |
673 if (current_block_id + 1 != goto_instr->block_id()) { | |
674 // If we have a value on the x87 stack on leaving a block, it must be a | |
675 // phi input. If the next block we compile is not the join block, we have | |
676 // to discard the stack state. | |
677 stack_depth_ = 0; | |
678 } | |
679 } | |
680 | |
665 | 681 |
666 void LCodeGen::EmitFlushX87ForDeopt() { | 682 void LCodeGen::EmitFlushX87ForDeopt() { |
667 // The deoptimizer does not support X87 Registers. But as long as we | 683 // The deoptimizer does not support X87 Registers. But as long as we |
668 // deopt from a stub its not a problem, since we will re-materialize the | 684 // deopt from a stub its not a problem, since we will re-materialize the |
669 // original stub inputs, which can't be double registers. | 685 // original stub inputs, which can't be double registers. |
670 ASSERT(info()->IsStub()); | 686 ASSERT(info()->IsStub()); |
671 if (FLAG_debug_code && FLAG_enable_slow_asserts) { | 687 if (FLAG_debug_code && FLAG_enable_slow_asserts) { |
672 __ pushfd(); | 688 __ pushfd(); |
673 __ VerifyX87StackDepth(x87_stack_.depth()); | 689 __ VerifyX87StackDepth(x87_stack_.depth()); |
674 __ popfd(); | 690 __ popfd(); |
(...skipping 1757 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2432 } | 2448 } |
2433 | 2449 |
2434 | 2450 |
2435 void LCodeGen::EmitGoto(int block) { | 2451 void LCodeGen::EmitGoto(int block) { |
2436 if (!IsNextEmittedBlock(block)) { | 2452 if (!IsNextEmittedBlock(block)) { |
2437 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); | 2453 __ jmp(chunk_->GetAssemblyLabel(LookupDestination(block))); |
2438 } | 2454 } |
2439 } | 2455 } |
2440 | 2456 |
2441 | 2457 |
2458 void LCodeGen::DoClobberDoubles(LClobberDoubles* instr) { | |
2459 } | |
2460 | |
2461 | |
2442 void LCodeGen::DoGoto(LGoto* instr) { | 2462 void LCodeGen::DoGoto(LGoto* instr) { |
2443 EmitGoto(instr->block_id()); | 2463 EmitGoto(instr->block_id()); |
2444 } | 2464 } |
2445 | 2465 |
2446 | 2466 |
2447 Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) { | 2467 Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) { |
2448 Condition cond = no_condition; | 2468 Condition cond = no_condition; |
2449 switch (op) { | 2469 switch (op) { |
2450 case Token::EQ: | 2470 case Token::EQ: |
2451 case Token::EQ_STRICT: | 2471 case Token::EQ_STRICT: |
(...skipping 3853 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6305 FixedArray::kHeaderSize - kPointerSize)); | 6325 FixedArray::kHeaderSize - kPointerSize)); |
6306 __ bind(&done); | 6326 __ bind(&done); |
6307 } | 6327 } |
6308 | 6328 |
6309 | 6329 |
6310 #undef __ | 6330 #undef __ |
6311 | 6331 |
6312 } } // namespace v8::internal | 6332 } } // namespace v8::internal |
6313 | 6333 |
6314 #endif // V8_TARGET_ARCH_IA32 | 6334 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |