| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. |
| 6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
| (...skipping 2555 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2566 return summary; | 2566 return summary; |
| 2567 } | 2567 } |
| 2568 | 2568 |
| 2569 | 2569 |
| 2570 class CheckStackOverflowSlowPath : public SlowPathCode { | 2570 class CheckStackOverflowSlowPath : public SlowPathCode { |
| 2571 public: | 2571 public: |
| 2572 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) | 2572 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction) |
| 2573 : instruction_(instruction) { } | 2573 : instruction_(instruction) { } |
| 2574 | 2574 |
| 2575 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 2575 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2576 if (FLAG_use_osr) { |
| 2577 uword flags_address = Isolate::Current()->stack_overflow_flags_address(); |
| 2578 Register value = instruction_->locs()->temp(0).reg(); |
| 2579 __ Comment("CheckStackOverflowSlowPathOsr"); |
| 2580 __ Bind(osr_entry_label()); |
| 2581 __ LoadImmediate(IP, flags_address); |
| 2582 __ LoadImmediate(value, Isolate::kOsrRequest); |
| 2583 __ str(value, Address(IP)); |
| 2584 } |
| 2576 __ Comment("CheckStackOverflowSlowPath"); | 2585 __ Comment("CheckStackOverflowSlowPath"); |
| 2577 __ Bind(entry_label()); | 2586 __ Bind(entry_label()); |
| 2578 compiler->SaveLiveRegisters(instruction_->locs()); | 2587 compiler->SaveLiveRegisters(instruction_->locs()); |
| 2579 // pending_deoptimization_env_ is needed to generate a runtime call that | 2588 // pending_deoptimization_env_ is needed to generate a runtime call that |
| 2580 // may throw an exception. | 2589 // may throw an exception. |
| 2581 ASSERT(compiler->pending_deoptimization_env_ == NULL); | 2590 ASSERT(compiler->pending_deoptimization_env_ == NULL); |
| 2582 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); | 2591 Environment* env = compiler->SlowPathEnvironmentFor(instruction_); |
| 2583 compiler->pending_deoptimization_env_ = env; | 2592 compiler->pending_deoptimization_env_ = env; |
| 2584 compiler->GenerateRuntimeCall(instruction_->token_pos(), | 2593 compiler->GenerateRuntimeCall(instruction_->token_pos(), |
| 2585 instruction_->deopt_id(), | 2594 instruction_->deopt_id(), |
| 2586 kStackOverflowRuntimeEntry, | 2595 kStackOverflowRuntimeEntry, |
| 2587 0, | 2596 0, |
| 2588 instruction_->locs()); | 2597 instruction_->locs()); |
| 2589 | 2598 |
| 2590 if (FLAG_use_osr && !compiler->is_optimizing() && instruction_->in_loop()) { | 2599 if (FLAG_use_osr && !compiler->is_optimizing() && instruction_->in_loop()) { |
| 2591 // In unoptimized code, record loop stack checks as possible OSR entries. | 2600 // In unoptimized code, record loop stack checks as possible OSR entries. |
| 2592 compiler->AddCurrentDescriptor(PcDescriptors::kOsrEntry, | 2601 compiler->AddCurrentDescriptor(PcDescriptors::kOsrEntry, |
| 2593 instruction_->deopt_id(), | 2602 instruction_->deopt_id(), |
| 2594 0); // No token position. | 2603 0); // No token position. |
| 2595 } | 2604 } |
| 2596 compiler->pending_deoptimization_env_ = NULL; | 2605 compiler->pending_deoptimization_env_ = NULL; |
| 2597 compiler->RestoreLiveRegisters(instruction_->locs()); | 2606 compiler->RestoreLiveRegisters(instruction_->locs()); |
| 2598 __ b(exit_label()); | 2607 __ b(exit_label()); |
| 2599 } | 2608 } |
| 2600 | 2609 |
| 2610 Label* osr_entry_label() { |
| 2611 ASSERT(FLAG_use_osr); |
| 2612 return &osr_entry_label_; |
| 2613 } |
| 2614 |
| 2601 private: | 2615 private: |
| 2602 CheckStackOverflowInstr* instruction_; | 2616 CheckStackOverflowInstr* instruction_; |
| 2617 Label osr_entry_label_; |
| 2603 }; | 2618 }; |
| 2604 | 2619 |
| 2605 | 2620 |
| 2606 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2621 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2607 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); | 2622 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); |
| 2608 compiler->AddSlowPathCode(slow_path); | 2623 compiler->AddSlowPathCode(slow_path); |
| 2609 | 2624 |
| 2610 __ LoadImmediate(IP, Isolate::Current()->stack_limit_address()); | 2625 __ LoadImmediate(IP, Isolate::Current()->stack_limit_address()); |
| 2611 __ ldr(IP, Address(IP)); | 2626 __ ldr(IP, Address(IP)); |
| 2612 __ cmp(SP, ShifterOperand(IP)); | 2627 __ cmp(SP, ShifterOperand(IP)); |
| 2613 __ b(slow_path->entry_label(), LS); | 2628 __ b(slow_path->entry_label(), LS); |
| 2614 if (compiler->CanOSRFunction() && in_loop()) { | 2629 if (compiler->CanOSRFunction() && in_loop()) { |
| 2615 Register temp = locs()->temp(0).reg(); | 2630 Register temp = locs()->temp(0).reg(); |
| 2616 // In unoptimized code check the usage counter to trigger OSR at loop | 2631 // In unoptimized code check the usage counter to trigger OSR at loop |
| 2617 // stack checks. Use progressively higher thresholds for more deeply | 2632 // stack checks. Use progressively higher thresholds for more deeply |
| 2618 // nested loops to attempt to hit outer loops with OSR when possible. | 2633 // nested loops to attempt to hit outer loops with OSR when possible. |
| 2619 __ LoadObject(temp, compiler->parsed_function().function()); | 2634 __ LoadObject(temp, compiler->parsed_function().function()); |
| 2620 intptr_t threshold = | 2635 intptr_t threshold = |
| 2621 FLAG_optimization_counter_threshold * (loop_depth() + 1); | 2636 FLAG_optimization_counter_threshold * (loop_depth() + 1); |
| 2622 __ ldr(temp, FieldAddress(temp, Function::usage_counter_offset())); | 2637 __ ldr(temp, FieldAddress(temp, Function::usage_counter_offset())); |
| 2623 __ CompareImmediate(temp, threshold); | 2638 __ CompareImmediate(temp, threshold); |
| 2624 __ b(slow_path->entry_label(), GE); | 2639 __ b(slow_path->osr_entry_label(), GE); |
| 2640 } |
| 2641 if (compiler->ForceSlowPathForStackOverflow()) { |
| 2642 __ b(slow_path->entry_label()); |
| 2625 } | 2643 } |
| 2626 __ Bind(slow_path->exit_label()); | 2644 __ Bind(slow_path->exit_label()); |
| 2627 } | 2645 } |
| 2628 | 2646 |
| 2629 | 2647 |
| 2630 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler, | 2648 static void EmitSmiShiftLeft(FlowGraphCompiler* compiler, |
| 2631 BinarySmiOpInstr* shift_left) { | 2649 BinarySmiOpInstr* shift_left) { |
| 2632 const bool is_truncating = shift_left->is_truncating(); | 2650 const bool is_truncating = shift_left->is_truncating(); |
| 2633 const LocationSummary& locs = *shift_left->locs(); | 2651 const LocationSummary& locs = *shift_left->locs(); |
| 2634 Register left = locs.in(0).reg(); | 2652 Register left = locs.in(0).reg(); |
| (...skipping 3227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5862 compiler->GenerateCall(token_pos(), | 5880 compiler->GenerateCall(token_pos(), |
| 5863 &label, | 5881 &label, |
| 5864 PcDescriptors::kOther, | 5882 PcDescriptors::kOther, |
| 5865 locs()); | 5883 locs()); |
| 5866 __ Drop(ArgumentCount()); // Discard arguments. | 5884 __ Drop(ArgumentCount()); // Discard arguments. |
| 5867 } | 5885 } |
| 5868 | 5886 |
| 5869 } // namespace dart | 5887 } // namespace dart |
| 5870 | 5888 |
| 5871 #endif // defined TARGET_ARCH_ARM | 5889 #endif // defined TARGET_ARCH_ARM |
| OLD | NEW |