OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 696 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
707 } else if (op->IsConstantOperand()) { | 707 } else if (op->IsConstantOperand()) { |
708 HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op)); | 708 HConstant* constant = chunk()->LookupConstant(LConstantOperand::cast(op)); |
709 int src_index = DefineDeoptimizationLiteral(constant->handle(isolate())); | 709 int src_index = DefineDeoptimizationLiteral(constant->handle(isolate())); |
710 translation->StoreLiteral(src_index); | 710 translation->StoreLiteral(src_index); |
711 } else { | 711 } else { |
712 UNREACHABLE(); | 712 UNREACHABLE(); |
713 } | 713 } |
714 } | 714 } |
715 | 715 |
716 | 716 |
| 717 int LCodeGen::CallCodeSize(Handle<Code> code, RelocInfo::Mode mode) { |
| 718 int size = masm()->CallSize(code, mode); |
| 719 if (code->kind() == Code::BINARY_OP_IC || |
| 720 code->kind() == Code::COMPARE_IC) { |
| 721 size += Assembler::kInstrSize; // extra nop() added in CallCodeGeneric. |
| 722 } |
| 723 return size; |
| 724 } |
| 725 |
| 726 |
717 void LCodeGen::CallCode(Handle<Code> code, | 727 void LCodeGen::CallCode(Handle<Code> code, |
718 RelocInfo::Mode mode, | 728 RelocInfo::Mode mode, |
719 LInstruction* instr, | 729 LInstruction* instr, |
720 TargetAddressStorageMode storage_mode) { | 730 TargetAddressStorageMode storage_mode) { |
721 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode); | 731 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode); |
722 } | 732 } |
723 | 733 |
724 | 734 |
725 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 735 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
726 RelocInfo::Mode mode, | 736 RelocInfo::Mode mode, |
(...skipping 4938 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5665 ASSERT(instr->HasEnvironment()); | 5675 ASSERT(instr->HasEnvironment()); |
5666 LEnvironment* env = instr->environment(); | 5676 LEnvironment* env = instr->environment(); |
5667 // There is no LLazyBailout instruction for stack-checks. We have to | 5677 // There is no LLazyBailout instruction for stack-checks. We have to |
5668 // prepare for lazy deoptimization explicitly here. | 5678 // prepare for lazy deoptimization explicitly here. |
5669 if (instr->hydrogen()->is_function_entry()) { | 5679 if (instr->hydrogen()->is_function_entry()) { |
5670 // Perform stack overflow check. | 5680 // Perform stack overflow check. |
5671 Label done; | 5681 Label done; |
5672 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5682 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
5673 __ cmp(sp, Operand(ip)); | 5683 __ cmp(sp, Operand(ip)); |
5674 __ b(hs, &done); | 5684 __ b(hs, &done); |
5675 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); | 5685 Handle<Code> stack_check = isolate()->builtins()->StackCheck(); |
| 5686 PredictableCodeSizeScope predictable(masm(), |
| 5687 CallCodeSize(stack_check, RelocInfo::CODE_TARGET)); |
5676 ASSERT(instr->context()->IsRegister()); | 5688 ASSERT(instr->context()->IsRegister()); |
5677 ASSERT(ToRegister(instr->context()).is(cp)); | 5689 ASSERT(ToRegister(instr->context()).is(cp)); |
5678 CallCode(isolate()->builtins()->StackCheck(), | 5690 CallCode(stack_check, RelocInfo::CODE_TARGET, instr); |
5679 RelocInfo::CODE_TARGET, | |
5680 instr); | |
5681 __ bind(&done); | 5691 __ bind(&done); |
5682 } else { | 5692 } else { |
5683 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5693 ASSERT(instr->hydrogen()->is_backwards_branch()); |
5684 // Perform stack overflow check if this goto needs it before jumping. | 5694 // Perform stack overflow check if this goto needs it before jumping. |
5685 DeferredStackCheck* deferred_stack_check = | 5695 DeferredStackCheck* deferred_stack_check = |
5686 new(zone()) DeferredStackCheck(this, instr); | 5696 new(zone()) DeferredStackCheck(this, instr); |
5687 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5697 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
5688 __ cmp(sp, Operand(ip)); | 5698 __ cmp(sp, Operand(ip)); |
5689 __ b(lo, deferred_stack_check->entry()); | 5699 __ b(lo, deferred_stack_check->entry()); |
5690 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5700 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5852 __ ldr(result, FieldMemOperand(scratch, | 5862 __ ldr(result, FieldMemOperand(scratch, |
5853 FixedArray::kHeaderSize - kPointerSize)); | 5863 FixedArray::kHeaderSize - kPointerSize)); |
5854 __ bind(deferred->exit()); | 5864 __ bind(deferred->exit()); |
5855 __ bind(&done); | 5865 __ bind(&done); |
5856 } | 5866 } |
5857 | 5867 |
5858 | 5868 |
5859 #undef __ | 5869 #undef __ |
5860 | 5870 |
5861 } } // namespace v8::internal | 5871 } } // namespace v8::internal |
OLD | NEW |