Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(380)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 11037023: Use movw/movt instead of constant pool on ARMv7 (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Fix nits Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 590 matching lines...) Expand 10 before | Expand all | Expand 10 after
601 int src_index = DefineDeoptimizationLiteral(constant->handle()); 601 int src_index = DefineDeoptimizationLiteral(constant->handle());
602 translation->StoreLiteral(src_index); 602 translation->StoreLiteral(src_index);
603 } else { 603 } else {
604 UNREACHABLE(); 604 UNREACHABLE();
605 } 605 }
606 } 606 }
607 607
608 608
609 void LCodeGen::CallCode(Handle<Code> code, 609 void LCodeGen::CallCode(Handle<Code> code,
610 RelocInfo::Mode mode, 610 RelocInfo::Mode mode,
611 LInstruction* instr) { 611 LInstruction* instr,
612 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); 612 TargetAddressStorageMode storage_mode) {
613 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT, storage_mode);
613 } 614 }
614 615
615 616
616 void LCodeGen::CallCodeGeneric(Handle<Code> code, 617 void LCodeGen::CallCodeGeneric(Handle<Code> code,
617 RelocInfo::Mode mode, 618 RelocInfo::Mode mode,
618 LInstruction* instr, 619 LInstruction* instr,
619 SafepointMode safepoint_mode) { 620 SafepointMode safepoint_mode,
621 TargetAddressStorageMode storage_mode) {
620 ASSERT(instr != NULL); 622 ASSERT(instr != NULL);
621 // Block literal pool emission to ensure nop indicating no inlined smi code 623 // Block literal pool emission to ensure nop indicating no inlined smi code
622 // is in the correct position. 624 // is in the correct position.
623 Assembler::BlockConstPoolScope block_const_pool(masm()); 625 Assembler::BlockConstPoolScope block_const_pool(masm());
624 LPointerMap* pointers = instr->pointer_map(); 626 LPointerMap* pointers = instr->pointer_map();
625 RecordPosition(pointers->position()); 627 RecordPosition(pointers->position());
626 __ Call(code, mode); 628 __ Call(code, mode, TypeFeedbackId::None(), al, storage_mode);
627 RecordSafepointWithLazyDeopt(instr, safepoint_mode); 629 RecordSafepointWithLazyDeopt(instr, safepoint_mode);
628 630
629 // Signal that we don't inline smi code before these stubs in the 631 // Signal that we don't inline smi code before these stubs in the
630 // optimizing code generator. 632 // optimizing code generator.
631 if (code->kind() == Code::BINARY_OP_IC || 633 if (code->kind() == Code::BINARY_OP_IC ||
632 code->kind() == Code::COMPARE_IC) { 634 code->kind() == Code::COMPARE_IC) {
633 __ nop(); 635 __ nop();
634 } 636 }
635 } 637 }
636 638
(...skipping 1827 matching lines...) Expand 10 before | Expand all | Expand 10 after
2464 Register map = temp; 2466 Register map = temp;
2465 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); 2467 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
2466 { 2468 {
2467 // Block constant pool emission to ensure the positions of instructions are 2469 // Block constant pool emission to ensure the positions of instructions are
2468 // as expected by the patcher. See InstanceofStub::Generate(). 2470 // as expected by the patcher. See InstanceofStub::Generate().
2469 Assembler::BlockConstPoolScope block_const_pool(masm()); 2471 Assembler::BlockConstPoolScope block_const_pool(masm());
2470 __ bind(deferred->map_check()); // Label for calculating code patching. 2472 __ bind(deferred->map_check()); // Label for calculating code patching.
2471 // We use Factory::the_hole_value() on purpose instead of loading from the 2473 // We use Factory::the_hole_value() on purpose instead of loading from the
2472 // root array to force relocation to be able to later patch with 2474 // root array to force relocation to be able to later patch with
2473 // the cached map. 2475 // the cached map.
2476 PredictableCodeSizeScope predictable(masm_);
2474 Handle<JSGlobalPropertyCell> cell = 2477 Handle<JSGlobalPropertyCell> cell =
2475 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); 2478 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
2476 __ mov(ip, Operand(Handle<Object>(cell))); 2479 __ mov(ip, Operand(Handle<Object>(cell)));
2477 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset)); 2480 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
2478 __ cmp(map, Operand(ip)); 2481 __ cmp(map, Operand(ip));
2479 __ b(ne, &cache_miss); 2482 __ b(ne, &cache_miss);
2480 // We use Factory::the_hole_value() on purpose instead of loading from the 2483 // We use Factory::the_hole_value() on purpose instead of loading from the
2481 // root array to force relocation to be able to later patch 2484 // root array to force relocation to be able to later patch
2482 // with true or false. 2485 // with true or false.
2483 __ mov(result, Operand(factory()->the_hole_value())); 2486 __ mov(result, Operand(factory()->the_hole_value()));
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
2525 2528
2526 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); 2529 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
2527 2530
2528 // Get the temp register reserved by the instruction. This needs to be r4 as 2531 // Get the temp register reserved by the instruction. This needs to be r4 as
2529 // its slot of the pushing of safepoint registers is used to communicate the 2532 // its slot of the pushing of safepoint registers is used to communicate the
2530 // offset to the location of the map check. 2533 // offset to the location of the map check.
2531 Register temp = ToRegister(instr->temp()); 2534 Register temp = ToRegister(instr->temp());
2532 ASSERT(temp.is(r4)); 2535 ASSERT(temp.is(r4));
2533 __ LoadHeapObject(InstanceofStub::right(), instr->function()); 2536 __ LoadHeapObject(InstanceofStub::right(), instr->function());
2534 static const int kAdditionalDelta = 5; 2537 static const int kAdditionalDelta = 5;
2538 // Make sure that code size is predicable, since we use specific constants
2539 // offsets in the code to find embedded values..
2540 PredictableCodeSizeScope predictable(masm_);
2535 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; 2541 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
2536 Label before_push_delta; 2542 Label before_push_delta;
2537 __ bind(&before_push_delta); 2543 __ bind(&before_push_delta);
2538 __ BlockConstPoolFor(kAdditionalDelta); 2544 __ BlockConstPoolFor(kAdditionalDelta);
2539 __ mov(temp, Operand(delta * kPointerSize)); 2545 __ mov(temp, Operand(delta * kPointerSize));
2540 // The mov above can generate one or two instructions. The delta was computed 2546 // The mov above can generate one or two instructions. The delta was computed
2541 // for two instructions, so we need to pad here in case of one instruction. 2547 // for two instructions, so we need to pad here in case of one instruction.
2542 if (masm_->InstructionsGeneratedSince(&before_push_delta) != 2) { 2548 if (masm_->InstructionsGeneratedSince(&before_push_delta) != 2) {
2543 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta)); 2549 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta));
2544 __ nop(); 2550 __ nop();
(...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after
2788 __ bind(&check_passed); 2794 __ bind(&check_passed);
2789 EmitLoadFieldOrConstantFunction( 2795 EmitLoadFieldOrConstantFunction(
2790 result, object, map, name, instr->environment()); 2796 result, object, map, name, instr->environment());
2791 __ b(&done); 2797 __ b(&done);
2792 __ bind(&next); 2798 __ bind(&next);
2793 } 2799 }
2794 } 2800 }
2795 if (need_generic) { 2801 if (need_generic) {
2796 __ mov(r2, Operand(name)); 2802 __ mov(r2, Operand(name));
2797 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2803 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2798 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2804 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
2799 } 2805 }
2800 __ bind(&done); 2806 __ bind(&done);
2801 } 2807 }
2802 2808
2803 2809
2804 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 2810 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2805 ASSERT(ToRegister(instr->object()).is(r0)); 2811 ASSERT(ToRegister(instr->object()).is(r0));
2806 ASSERT(ToRegister(instr->result()).is(r0)); 2812 ASSERT(ToRegister(instr->result()).is(r0));
2807 2813
2808 // Name is always in r2. 2814 // Name is always in r2.
2809 __ mov(r2, Operand(instr->name())); 2815 __ mov(r2, Operand(instr->name()));
2810 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2816 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2811 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2817 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
2812 } 2818 }
2813 2819
2814 2820
2815 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 2821 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2816 Register scratch = scratch0(); 2822 Register scratch = scratch0();
2817 Register function = ToRegister(instr->function()); 2823 Register function = ToRegister(instr->function());
2818 Register result = ToRegister(instr->result()); 2824 Register result = ToRegister(instr->result());
2819 2825
2820 // Check that the function really is a function. Load map into the 2826 // Check that the function really is a function. Load map into the
2821 // result register. 2827 // result register.
(...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after
3112 } 3118 }
3113 } 3119 }
3114 } 3120 }
3115 3121
3116 3122
3117 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 3123 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3118 ASSERT(ToRegister(instr->object()).is(r1)); 3124 ASSERT(ToRegister(instr->object()).is(r1));
3119 ASSERT(ToRegister(instr->key()).is(r0)); 3125 ASSERT(ToRegister(instr->key()).is(r0));
3120 3126
3121 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 3127 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
3122 CallCode(ic, RelocInfo::CODE_TARGET, instr); 3128 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
3123 } 3129 }
3124 3130
3125 3131
3126 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 3132 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3127 Register scratch = scratch0(); 3133 Register scratch = scratch0();
3128 Register result = ToRegister(instr->result()); 3134 Register result = ToRegister(instr->result());
3129 3135
3130 if (instr->hydrogen()->from_inlined()) { 3136 if (instr->hydrogen()->from_inlined()) {
3131 __ sub(result, sp, Operand(2 * kPointerSize)); 3137 __ sub(result, sp, Operand(2 * kPointerSize));
3132 } else { 3138 } else {
(...skipping 674 matching lines...) Expand 10 before | Expand all | Expand 10 after
3807 } 3813 }
3808 } 3814 }
3809 3815
3810 3816
3811 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 3817 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
3812 ASSERT(ToRegister(instr->result()).is(r0)); 3818 ASSERT(ToRegister(instr->result()).is(r0));
3813 3819
3814 int arity = instr->arity(); 3820 int arity = instr->arity();
3815 Handle<Code> ic = 3821 Handle<Code> ic =
3816 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity); 3822 isolate()->stub_cache()->ComputeKeyedCallInitialize(arity);
3817 CallCode(ic, RelocInfo::CODE_TARGET, instr); 3823 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
3818 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3824 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3819 } 3825 }
3820 3826
3821 3827
3822 void LCodeGen::DoCallNamed(LCallNamed* instr) { 3828 void LCodeGen::DoCallNamed(LCallNamed* instr) {
3823 ASSERT(ToRegister(instr->result()).is(r0)); 3829 ASSERT(ToRegister(instr->result()).is(r0));
3824 3830
3825 int arity = instr->arity(); 3831 int arity = instr->arity();
3826 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 3832 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3827 Handle<Code> ic = 3833 Handle<Code> ic =
3828 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); 3834 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3829 __ mov(r2, Operand(instr->name())); 3835 __ mov(r2, Operand(instr->name()));
3830 CallCode(ic, mode, instr); 3836 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
3831 // Restore context register. 3837 // Restore context register.
3832 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3838 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3833 } 3839 }
3834 3840
3835 3841
3836 void LCodeGen::DoCallFunction(LCallFunction* instr) { 3842 void LCodeGen::DoCallFunction(LCallFunction* instr) {
3837 ASSERT(ToRegister(instr->function()).is(r1)); 3843 ASSERT(ToRegister(instr->function()).is(r1));
3838 ASSERT(ToRegister(instr->result()).is(r0)); 3844 ASSERT(ToRegister(instr->result()).is(r0));
3839 3845
3840 int arity = instr->arity(); 3846 int arity = instr->arity();
3841 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS); 3847 CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
3842 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 3848 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
3843 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3849 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3844 } 3850 }
3845 3851
3846 3852
3847 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 3853 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
3848 ASSERT(ToRegister(instr->result()).is(r0)); 3854 ASSERT(ToRegister(instr->result()).is(r0));
3849 3855
3850 int arity = instr->arity(); 3856 int arity = instr->arity();
3851 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT; 3857 RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
3852 Handle<Code> ic = 3858 Handle<Code> ic =
3853 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); 3859 isolate()->stub_cache()->ComputeCallInitialize(arity, mode);
3854 __ mov(r2, Operand(instr->name())); 3860 __ mov(r2, Operand(instr->name()));
3855 CallCode(ic, mode, instr); 3861 CallCode(ic, mode, instr, NEVER_INLINE_TARGET_ADDRESS);
3856 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3862 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3857 } 3863 }
3858 3864
3859 3865
3860 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 3866 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
3861 ASSERT(ToRegister(instr->result()).is(r0)); 3867 ASSERT(ToRegister(instr->result()).is(r0));
3862 CallKnownFunction(instr->target(), 3868 CallKnownFunction(instr->target(),
3863 instr->arity(), 3869 instr->arity(),
3864 instr, 3870 instr,
3865 CALL_AS_FUNCTION, 3871 CALL_AS_FUNCTION,
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
3945 3951
3946 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 3952 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
3947 ASSERT(ToRegister(instr->object()).is(r1)); 3953 ASSERT(ToRegister(instr->object()).is(r1));
3948 ASSERT(ToRegister(instr->value()).is(r0)); 3954 ASSERT(ToRegister(instr->value()).is(r0));
3949 3955
3950 // Name is always in r2. 3956 // Name is always in r2.
3951 __ mov(r2, Operand(instr->name())); 3957 __ mov(r2, Operand(instr->name()));
3952 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 3958 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
3953 ? isolate()->builtins()->StoreIC_Initialize_Strict() 3959 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3954 : isolate()->builtins()->StoreIC_Initialize(); 3960 : isolate()->builtins()->StoreIC_Initialize();
3955 CallCode(ic, RelocInfo::CODE_TARGET, instr); 3961 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
3956 } 3962 }
3957 3963
3958 3964
3959 void LCodeGen::DeoptIfTaggedButNotSmi(LEnvironment* environment, 3965 void LCodeGen::DeoptIfTaggedButNotSmi(LEnvironment* environment,
3960 HValue* value, 3966 HValue* value,
3961 LOperand* operand) { 3967 LOperand* operand) {
3962 if (value->representation().IsTagged() && !value->type().IsSmi()) { 3968 if (value->representation().IsTagged() && !value->type().IsSmi()) {
3963 if (operand->IsRegister()) { 3969 if (operand->IsRegister()) {
3964 __ tst(ToRegister(operand), Operand(kSmiTagMask)); 3970 __ tst(ToRegister(operand), Operand(kSmiTagMask));
3965 } else { 3971 } else {
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
4159 4165
4160 4166
4161 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 4167 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
4162 ASSERT(ToRegister(instr->object()).is(r2)); 4168 ASSERT(ToRegister(instr->object()).is(r2));
4163 ASSERT(ToRegister(instr->key()).is(r1)); 4169 ASSERT(ToRegister(instr->key()).is(r1));
4164 ASSERT(ToRegister(instr->value()).is(r0)); 4170 ASSERT(ToRegister(instr->value()).is(r0));
4165 4171
4166 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode) 4172 Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
4167 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() 4173 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
4168 : isolate()->builtins()->KeyedStoreIC_Initialize(); 4174 : isolate()->builtins()->KeyedStoreIC_Initialize();
4169 CallCode(ic, RelocInfo::CODE_TARGET, instr); 4175 CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
4170 } 4176 }
4171 4177
4172 4178
4173 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { 4179 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4174 Register object_reg = ToRegister(instr->object()); 4180 Register object_reg = ToRegister(instr->object());
4175 Register new_map_reg = ToRegister(instr->new_map_temp()); 4181 Register new_map_reg = ToRegister(instr->new_map_temp());
4176 Register scratch = scratch0(); 4182 Register scratch = scratch0();
4177 4183
4178 Handle<Map> from_map = instr->original_map(); 4184 Handle<Map> from_map = instr->original_map();
4179 Handle<Map> to_map = instr->transitioned_map(); 4185 Handle<Map> to_map = instr->transitioned_map();
(...skipping 1354 matching lines...) Expand 10 before | Expand all | Expand 10 after
5534 LEnvironment* env = instr->environment(); 5540 LEnvironment* env = instr->environment();
5535 // There is no LLazyBailout instruction for stack-checks. We have to 5541 // There is no LLazyBailout instruction for stack-checks. We have to
5536 // prepare for lazy deoptimization explicitly here. 5542 // prepare for lazy deoptimization explicitly here.
5537 if (instr->hydrogen()->is_function_entry()) { 5543 if (instr->hydrogen()->is_function_entry()) {
5538 // Perform stack overflow check. 5544 // Perform stack overflow check.
5539 Label done; 5545 Label done;
5540 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 5546 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
5541 __ cmp(sp, Operand(ip)); 5547 __ cmp(sp, Operand(ip));
5542 __ b(hs, &done); 5548 __ b(hs, &done);
5543 StackCheckStub stub; 5549 StackCheckStub stub;
5550 PredictableCodeSizeScope predictable(masm_);
5544 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 5551 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
5545 EnsureSpaceForLazyDeopt(); 5552 EnsureSpaceForLazyDeopt();
5546 __ bind(&done); 5553 __ bind(&done);
5547 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); 5554 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
5548 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); 5555 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5549 } else { 5556 } else {
5550 ASSERT(instr->hydrogen()->is_backwards_branch()); 5557 ASSERT(instr->hydrogen()->is_backwards_branch());
5551 // Perform stack overflow check if this goto needs it before jumping. 5558 // Perform stack overflow check if this goto needs it before jumping.
5552 DeferredStackCheck* deferred_stack_check = 5559 DeferredStackCheck* deferred_stack_check =
5553 new(zone()) DeferredStackCheck(this, instr); 5560 new(zone()) DeferredStackCheck(this, instr);
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
5673 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); 5680 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
5674 __ ldr(result, FieldMemOperand(scratch, 5681 __ ldr(result, FieldMemOperand(scratch,
5675 FixedArray::kHeaderSize - kPointerSize)); 5682 FixedArray::kHeaderSize - kPointerSize));
5676 __ bind(&done); 5683 __ bind(&done);
5677 } 5684 }
5678 5685
5679 5686
5680 #undef __ 5687 #undef __
5681 5688
5682 } } // namespace v8::internal 5689 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698