| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 752 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 763 | 763 |
| 764 // Adjust the frame size, subsuming the unoptimized frame into the | 764 // Adjust the frame size, subsuming the unoptimized frame into the |
| 765 // optimized frame. | 765 // optimized frame. |
| 766 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 766 int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); |
| 767 ASSERT(slots >= 0); | 767 ASSERT(slots >= 0); |
| 768 __ Claim(slots); | 768 __ Claim(slots); |
| 769 } | 769 } |
| 770 | 770 |
| 771 | 771 |
| 772 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { | 772 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { |
| 773 if (instr->IsCall()) { |
| 774 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 775 } |
| 773 if (!instr->IsLazyBailout() && !instr->IsGap()) { | 776 if (!instr->IsLazyBailout() && !instr->IsGap()) { |
| 774 safepoints_.BumpLastLazySafepointIndex(); | 777 safepoints_.BumpLastLazySafepointIndex(); |
| 775 } | 778 } |
| 776 } | 779 } |
| 777 | 780 |
| 778 | 781 |
| 779 bool LCodeGen::GenerateDeferredCode() { | 782 bool LCodeGen::GenerateDeferredCode() { |
| 780 ASSERT(is_generating()); | 783 ASSERT(is_generating()); |
| 781 if (deferred_.length() > 0) { | 784 if (deferred_.length() > 0) { |
| 782 for (int i = 0; !is_aborted() && (i < deferred_.length()); i++) { | 785 for (int i = 0; !is_aborted() && (i < deferred_.length()); i++) { |
| (...skipping 1759 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2542 __ Ldr(temp, FieldMemOperand(temp, Cell::kValueOffset)); | 2545 __ Ldr(temp, FieldMemOperand(temp, Cell::kValueOffset)); |
| 2543 __ Cmp(reg, temp); | 2546 __ Cmp(reg, temp); |
| 2544 } else { | 2547 } else { |
| 2545 __ Cmp(reg, Operand(object)); | 2548 __ Cmp(reg, Operand(object)); |
| 2546 } | 2549 } |
| 2547 DeoptimizeIf(ne, instr->environment()); | 2550 DeoptimizeIf(ne, instr->environment()); |
| 2548 } | 2551 } |
| 2549 | 2552 |
| 2550 | 2553 |
| 2551 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 2554 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 2552 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 2555 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 2553 ASSERT(instr->HasEnvironment()); | 2556 ASSERT(instr->HasEnvironment()); |
| 2554 LEnvironment* env = instr->environment(); | 2557 LEnvironment* env = instr->environment(); |
| 2555 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 2558 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 2556 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 2559 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 2557 } | 2560 } |
| 2558 | 2561 |
| 2559 | 2562 |
| 2560 void LCodeGen::DoDateField(LDateField* instr) { | 2563 void LCodeGen::DoDateField(LDateField* instr) { |
| 2561 Register object = ToRegister(instr->date()); | 2564 Register object = ToRegister(instr->date()); |
| 2562 Register result = ToRegister(instr->result()); | 2565 Register result = ToRegister(instr->result()); |
| (...skipping 2372 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4935 __ CompareRoot(masm()->StackPointer(), Heap::kStackLimitRootIndex); | 4938 __ CompareRoot(masm()->StackPointer(), Heap::kStackLimitRootIndex); |
| 4936 __ B(hs, &done); | 4939 __ B(hs, &done); |
| 4937 | 4940 |
| 4938 PredictableCodeSizeScope predictable(masm_, | 4941 PredictableCodeSizeScope predictable(masm_, |
| 4939 Assembler::kCallSizeWithRelocation); | 4942 Assembler::kCallSizeWithRelocation); |
| 4940 ASSERT(instr->context()->IsRegister()); | 4943 ASSERT(instr->context()->IsRegister()); |
| 4941 ASSERT(ToRegister(instr->context()).is(cp)); | 4944 ASSERT(ToRegister(instr->context()).is(cp)); |
| 4942 CallCode(isolate()->builtins()->StackCheck(), | 4945 CallCode(isolate()->builtins()->StackCheck(), |
| 4943 RelocInfo::CODE_TARGET, | 4946 RelocInfo::CODE_TARGET, |
| 4944 instr); | 4947 instr); |
| 4945 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | |
| 4946 | |
| 4947 __ Bind(&done); | 4948 __ Bind(&done); |
| 4948 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | |
| 4949 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
| 4950 } else { | 4949 } else { |
| 4951 ASSERT(instr->hydrogen()->is_backwards_branch()); | 4950 ASSERT(instr->hydrogen()->is_backwards_branch()); |
| 4952 // Perform stack overflow check if this goto needs it before jumping. | 4951 // Perform stack overflow check if this goto needs it before jumping. |
| 4953 DeferredStackCheck* deferred_stack_check = | 4952 DeferredStackCheck* deferred_stack_check = |
| 4954 new(zone()) DeferredStackCheck(this, instr); | 4953 new(zone()) DeferredStackCheck(this, instr); |
| 4955 __ CompareRoot(masm()->StackPointer(), Heap::kStackLimitRootIndex); | 4954 __ CompareRoot(masm()->StackPointer(), Heap::kStackLimitRootIndex); |
| 4956 __ B(lo, deferred_stack_check->entry()); | 4955 __ B(lo, deferred_stack_check->entry()); |
| 4957 | 4956 |
| 4958 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 4957 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
| 4959 __ Bind(instr->done_label()); | 4958 __ Bind(instr->done_label()); |
| (...skipping 895 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5855 __ Bind(&out_of_object); | 5854 __ Bind(&out_of_object); |
| 5856 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5855 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 5857 // Index is equal to negated out of object property index plus 1. | 5856 // Index is equal to negated out of object property index plus 1. |
| 5858 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5857 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 5859 __ Ldr(result, FieldMemOperand(result, | 5858 __ Ldr(result, FieldMemOperand(result, |
| 5860 FixedArray::kHeaderSize - kPointerSize)); | 5859 FixedArray::kHeaderSize - kPointerSize)); |
| 5861 __ Bind(&done); | 5860 __ Bind(&done); |
| 5862 } | 5861 } |
| 5863 | 5862 |
| 5864 } } // namespace v8::internal | 5863 } } // namespace v8::internal |
| OLD | NEW |