OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 552 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
563 // A value is loaded on all paths reaching this point. | 563 // A value is loaded on all paths reaching this point. |
564 loaded.Bind(); | 564 loaded.Bind(); |
565 } | 565 } |
566 ASSERT(has_valid_frame()); | 566 ASSERT(has_valid_frame()); |
567 ASSERT(!has_cc()); | 567 ASSERT(!has_cc()); |
568 ASSERT(frame_->height() == original_height + 1); | 568 ASSERT(frame_->height() == original_height + 1); |
569 } | 569 } |
570 | 570 |
571 | 571 |
572 void CodeGenerator::LoadGlobal() { | 572 void CodeGenerator::LoadGlobal() { |
573 VirtualFrame::SpilledScope spilled_scope(frame_); | 573 Register reg = frame_->GetTOSRegister(); |
574 __ ldr(r0, GlobalObject()); | 574 __ ldr(reg, GlobalObject()); |
575 frame_->EmitPush(r0); | 575 frame_->EmitPush(reg); |
576 } | 576 } |
577 | 577 |
578 | 578 |
579 void CodeGenerator::LoadGlobalReceiver(Register scratch) { | 579 void CodeGenerator::LoadGlobalReceiver(Register scratch) { |
580 VirtualFrame::SpilledScope spilled_scope(frame_); | 580 VirtualFrame::SpilledScope spilled_scope(frame_); |
581 __ ldr(scratch, ContextOperand(cp, Context::GLOBAL_INDEX)); | 581 __ ldr(scratch, ContextOperand(cp, Context::GLOBAL_INDEX)); |
582 __ ldr(scratch, | 582 __ ldr(scratch, |
583 FieldMemOperand(scratch, GlobalObject::kGlobalReceiverOffset)); | 583 FieldMemOperand(scratch, GlobalObject::kGlobalReceiverOffset)); |
584 frame_->EmitPush(scratch); | 584 frame_->EmitPush(scratch); |
585 } | 585 } |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
680 cgen->LoadReference(this); | 680 cgen->LoadReference(this); |
681 } | 681 } |
682 | 682 |
683 | 683 |
684 Reference::~Reference() { | 684 Reference::~Reference() { |
685 ASSERT(is_unloaded() || is_illegal()); | 685 ASSERT(is_unloaded() || is_illegal()); |
686 } | 686 } |
687 | 687 |
688 | 688 |
689 void CodeGenerator::LoadReference(Reference* ref) { | 689 void CodeGenerator::LoadReference(Reference* ref) { |
690 VirtualFrame::SpilledScope spilled_scope(frame_); | |
691 Comment cmnt(masm_, "[ LoadReference"); | 690 Comment cmnt(masm_, "[ LoadReference"); |
692 Expression* e = ref->expression(); | 691 Expression* e = ref->expression(); |
693 Property* property = e->AsProperty(); | 692 Property* property = e->AsProperty(); |
694 Variable* var = e->AsVariableProxy()->AsVariable(); | 693 Variable* var = e->AsVariableProxy()->AsVariable(); |
695 | 694 |
696 if (property != NULL) { | 695 if (property != NULL) { |
697 // The expression is either a property or a variable proxy that rewrites | 696 // The expression is either a property or a variable proxy that rewrites |
698 // to a property. | 697 // to a property. |
699 LoadAndSpill(property->obj()); | 698 Load(property->obj()); |
700 if (property->key()->IsPropertyName()) { | 699 if (property->key()->IsPropertyName()) { |
701 ref->set_type(Reference::NAMED); | 700 ref->set_type(Reference::NAMED); |
702 } else { | 701 } else { |
703 LoadAndSpill(property->key()); | 702 Load(property->key()); |
704 ref->set_type(Reference::KEYED); | 703 ref->set_type(Reference::KEYED); |
705 } | 704 } |
706 } else if (var != NULL) { | 705 } else if (var != NULL) { |
707 // The expression is a variable proxy that does not rewrite to a | 706 // The expression is a variable proxy that does not rewrite to a |
708 // property. Global variables are treated as named property references. | 707 // property. Global variables are treated as named property references. |
709 if (var->is_global()) { | 708 if (var->is_global()) { |
710 LoadGlobal(); | 709 LoadGlobal(); |
711 ref->set_type(Reference::NAMED); | 710 ref->set_type(Reference::NAMED); |
712 } else { | 711 } else { |
713 ASSERT(var->slot() != NULL); | 712 ASSERT(var->slot() != NULL); |
714 ref->set_type(Reference::SLOT); | 713 ref->set_type(Reference::SLOT); |
715 } | 714 } |
716 } else { | 715 } else { |
717 // Anything else is a runtime error. | 716 // Anything else is a runtime error. |
717 VirtualFrame::SpilledScope spilled_scope(frame_); | |
718 LoadAndSpill(e); | 718 LoadAndSpill(e); |
719 frame_->CallRuntime(Runtime::kThrowReferenceError, 1); | 719 frame_->CallRuntime(Runtime::kThrowReferenceError, 1); |
720 } | 720 } |
721 } | 721 } |
722 | 722 |
723 | 723 |
724 void CodeGenerator::UnloadReference(Reference* ref) { | 724 void CodeGenerator::UnloadReference(Reference* ref) { |
725 int size = ref->size(); | 725 int size = ref->size(); |
726 ref->set_unloaded(); | 726 ref->set_unloaded(); |
727 if (size == 0) return; | 727 if (size == 0) return; |
(...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1520 VirtualFrame::SpilledScope spilled_scope(frame_); | 1520 VirtualFrame::SpilledScope spilled_scope(frame_); |
1521 | 1521 |
1522 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); | 1522 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); |
1523 ASSERT(arguments->IsArguments()); | 1523 ASSERT(arguments->IsArguments()); |
1524 | 1524 |
1525 // Load applicand.apply onto the stack. This will usually | 1525 // Load applicand.apply onto the stack. This will usually |
1526 // give us a megamorphic load site. Not super, but it works. | 1526 // give us a megamorphic load site. Not super, but it works. |
1527 LoadAndSpill(applicand); | 1527 LoadAndSpill(applicand); |
1528 Handle<String> name = Factory::LookupAsciiSymbol("apply"); | 1528 Handle<String> name = Factory::LookupAsciiSymbol("apply"); |
1529 __ mov(r2, Operand(name)); | 1529 __ mov(r2, Operand(name)); |
1530 __ ldr(r0, MemOperand(sp, 0)); | |
1530 frame_->CallLoadIC(RelocInfo::CODE_TARGET); | 1531 frame_->CallLoadIC(RelocInfo::CODE_TARGET); |
1531 frame_->EmitPush(r0); | 1532 frame_->EmitPush(r0); |
1532 | 1533 |
1533 // Load the receiver and the existing arguments object onto the | 1534 // Load the receiver and the existing arguments object onto the |
1534 // expression stack. Avoid allocating the arguments object here. | 1535 // expression stack. Avoid allocating the arguments object here. |
1535 LoadAndSpill(receiver); | 1536 LoadAndSpill(receiver); |
1536 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); | 1537 LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); |
1537 | 1538 |
1538 // Emit the source position information after having loaded the | 1539 // Emit the source position information after having loaded the |
1539 // receiver and the arguments. | 1540 // receiver and the arguments. |
(...skipping 1401 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2941 else_.Bind(); | 2942 else_.Bind(); |
2942 LoadAndSpill(node->else_expression()); | 2943 LoadAndSpill(node->else_expression()); |
2943 if (exit.is_linked()) exit.Bind(); | 2944 if (exit.is_linked()) exit.Bind(); |
2944 } | 2945 } |
2945 ASSERT(frame_->height() == original_height + 1); | 2946 ASSERT(frame_->height() == original_height + 1); |
2946 } | 2947 } |
2947 | 2948 |
2948 | 2949 |
2949 void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { | 2950 void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) { |
2950 if (slot->type() == Slot::LOOKUP) { | 2951 if (slot->type() == Slot::LOOKUP) { |
2951 VirtualFrame::SpilledScope spilled_scope(frame_); | |
2952 ASSERT(slot->var()->is_dynamic()); | 2952 ASSERT(slot->var()->is_dynamic()); |
2953 | 2953 |
2954 // JumpTargets do not yet support merging frames so the frame must be | |
2955 // spilled when jumping to these targets. | |
2954 JumpTarget slow; | 2956 JumpTarget slow; |
2955 JumpTarget done; | 2957 JumpTarget done; |
2956 | 2958 |
2957 // Generate fast-case code for variables that might be shadowed by | 2959 // Generate fast-case code for variables that might be shadowed by |
2958 // eval-introduced variables. Eval is used a lot without | 2960 // eval-introduced variables. Eval is used a lot without |
2959 // introducing variables. In those cases, we do not want to | 2961 // introducing variables. In those cases, we do not want to |
2960 // perform a runtime call for all variables in the scope | 2962 // perform a runtime call for all variables in the scope |
2961 // containing the eval. | 2963 // containing the eval. |
2962 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { | 2964 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) { |
2963 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, r1, r2, &slow); | 2965 LoadFromGlobalSlotCheckExtensions(slot, typeof_state, &slow); |
2964 // If there was no control flow to slow, we can exit early. | 2966 // If there was no control flow to slow, we can exit early. |
2965 if (!slow.is_linked()) { | 2967 if (!slow.is_linked()) { |
2966 frame_->EmitPush(r0); | 2968 frame_->EmitPush(r0); |
2967 return; | 2969 return; |
2968 } | 2970 } |
2971 frame_->SpillAll(); | |
2969 | 2972 |
2970 done.Jump(); | 2973 done.Jump(); |
2971 | 2974 |
2972 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { | 2975 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) { |
2976 frame_->SpillAll(); | |
2973 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); | 2977 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); |
2974 // Only generate the fast case for locals that rewrite to slots. | 2978 // Only generate the fast case for locals that rewrite to slots. |
2975 // This rules out argument loads. | 2979 // This rules out argument loads. |
2976 if (potential_slot != NULL) { | 2980 if (potential_slot != NULL) { |
2977 __ ldr(r0, | 2981 __ ldr(r0, |
2978 ContextSlotOperandCheckExtensions(potential_slot, | 2982 ContextSlotOperandCheckExtensions(potential_slot, |
Søren Thygesen Gjesse
2010/04/27 06:55:40
Would it be possible to change this like LoadFromG
| |
2979 r1, | 2983 r1, |
2980 r2, | 2984 r2, |
2981 &slow)); | 2985 &slow)); |
2982 if (potential_slot->var()->mode() == Variable::CONST) { | 2986 if (potential_slot->var()->mode() == Variable::CONST) { |
2983 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 2987 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
2984 __ cmp(r0, ip); | 2988 __ cmp(r0, ip); |
2985 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | 2989 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); |
2986 } | 2990 } |
2987 // There is always control flow to slow from | 2991 // There is always control flow to slow from |
2988 // ContextSlotOperandCheckExtensions so we have to jump around | 2992 // ContextSlotOperandCheckExtensions so we have to jump around |
2989 // it. | 2993 // it. |
2990 done.Jump(); | 2994 done.Jump(); |
2991 } | 2995 } |
2992 } | 2996 } |
2993 | 2997 |
2994 slow.Bind(); | 2998 slow.Bind(); |
2999 VirtualFrame::SpilledScope spilled_scope(frame_); | |
2995 frame_->EmitPush(cp); | 3000 frame_->EmitPush(cp); |
2996 __ mov(r0, Operand(slot->var()->name())); | 3001 __ mov(r0, Operand(slot->var()->name())); |
2997 frame_->EmitPush(r0); | 3002 frame_->EmitPush(r0); |
2998 | 3003 |
2999 if (typeof_state == INSIDE_TYPEOF) { | 3004 if (typeof_state == INSIDE_TYPEOF) { |
3000 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); | 3005 frame_->CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); |
3001 } else { | 3006 } else { |
3002 frame_->CallRuntime(Runtime::kLoadContextSlot, 2); | 3007 frame_->CallRuntime(Runtime::kLoadContextSlot, 2); |
3003 } | 3008 } |
3004 | 3009 |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3136 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) { | 3141 if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) { |
3137 frame_->SpillAll(); | 3142 frame_->SpillAll(); |
3138 exit.Bind(); | 3143 exit.Bind(); |
3139 } | 3144 } |
3140 } | 3145 } |
3141 } | 3146 } |
3142 | 3147 |
3143 | 3148 |
3144 void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, | 3149 void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, |
3145 TypeofState typeof_state, | 3150 TypeofState typeof_state, |
3146 Register tmp, | |
3147 Register tmp2, | |
3148 JumpTarget* slow) { | 3151 JumpTarget* slow) { |
3149 // Check that no extension objects have been created by calls to | 3152 // Check that no extension objects have been created by calls to |
3150 // eval from the current scope to the global scope. | 3153 // eval from the current scope to the global scope. |
3154 Register tmp = frame_->scratch0(); | |
3155 Register tmp2 = frame_->scratch1(); | |
3151 Register context = cp; | 3156 Register context = cp; |
3152 Scope* s = scope(); | 3157 Scope* s = scope(); |
3153 while (s != NULL) { | 3158 while (s != NULL) { |
3154 if (s->num_heap_slots() > 0) { | 3159 if (s->num_heap_slots() > 0) { |
3155 if (s->calls_eval()) { | 3160 if (s->calls_eval()) { |
3161 frame_->SpillAll(); | |
3156 // Check that extension is NULL. | 3162 // Check that extension is NULL. |
3157 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX)); | 3163 __ ldr(tmp2, ContextOperand(context, Context::EXTENSION_INDEX)); |
3158 __ tst(tmp2, tmp2); | 3164 __ tst(tmp2, tmp2); |
3159 slow->Branch(ne); | 3165 slow->Branch(ne); |
3160 } | 3166 } |
3161 // Load next context in chain. | 3167 // Load next context in chain. |
3162 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX)); | 3168 __ ldr(tmp, ContextOperand(context, Context::CLOSURE_INDEX)); |
3163 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); | 3169 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); |
3164 context = tmp; | 3170 context = tmp; |
3165 } | 3171 } |
3166 // If no outer scope calls eval, we do not need to check more | 3172 // If no outer scope calls eval, we do not need to check more |
3167 // context extensions. | 3173 // context extensions. |
3168 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; | 3174 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; |
3169 s = s->outer_scope(); | 3175 s = s->outer_scope(); |
3170 } | 3176 } |
3171 | 3177 |
3172 if (s->is_eval_scope()) { | 3178 if (s->is_eval_scope()) { |
3179 frame_->SpillAll(); | |
3173 Label next, fast; | 3180 Label next, fast; |
3174 __ Move(tmp, context); | 3181 __ Move(tmp, context); |
3175 __ bind(&next); | 3182 __ bind(&next); |
3176 // Terminate at global context. | 3183 // Terminate at global context. |
3177 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset)); | 3184 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset)); |
3178 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex); | 3185 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex); |
3179 __ cmp(tmp2, ip); | 3186 __ cmp(tmp2, ip); |
3180 __ b(eq, &fast); | 3187 __ b(eq, &fast); |
3181 // Check that extension is NULL. | 3188 // Check that extension is NULL. |
3182 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); | 3189 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); |
3183 __ tst(tmp2, tmp2); | 3190 __ tst(tmp2, tmp2); |
3184 slow->Branch(ne); | 3191 slow->Branch(ne); |
3185 // Load next context in chain. | 3192 // Load next context in chain. |
3186 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX)); | 3193 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX)); |
3187 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); | 3194 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); |
3188 __ b(&next); | 3195 __ b(&next); |
3189 __ bind(&fast); | 3196 __ bind(&fast); |
3190 } | 3197 } |
3191 | 3198 |
3192 // Load the global object. | 3199 // Load the global object. |
3193 LoadGlobal(); | 3200 LoadGlobal(); |
3194 // Setup the name register and call load IC. | 3201 // Setup the name register and call load IC. |
3202 frame_->SpillAllButCopyTOSToR0(); | |
3195 __ mov(r2, Operand(slot->var()->name())); | 3203 __ mov(r2, Operand(slot->var()->name())); |
3196 frame_->CallLoadIC(typeof_state == INSIDE_TYPEOF | 3204 frame_->CallLoadIC(typeof_state == INSIDE_TYPEOF |
3197 ? RelocInfo::CODE_TARGET | 3205 ? RelocInfo::CODE_TARGET |
3198 : RelocInfo::CODE_TARGET_CONTEXT); | 3206 : RelocInfo::CODE_TARGET_CONTEXT); |
3199 // Drop the global object. The result is in r0. | 3207 // Drop the global object. The result is in r0. |
3200 frame_->Drop(); | 3208 frame_->Drop(); |
3201 } | 3209 } |
3202 | 3210 |
3203 | 3211 |
3204 void CodeGenerator::VisitSlot(Slot* node) { | 3212 void CodeGenerator::VisitSlot(Slot* node) { |
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3517 frame_->CallRuntime(Runtime::kThrow, 1); | 3525 frame_->CallRuntime(Runtime::kThrow, 1); |
3518 frame_->EmitPush(r0); | 3526 frame_->EmitPush(r0); |
3519 ASSERT(frame_->height() == original_height + 1); | 3527 ASSERT(frame_->height() == original_height + 1); |
3520 } | 3528 } |
3521 | 3529 |
3522 | 3530 |
3523 void CodeGenerator::VisitProperty(Property* node) { | 3531 void CodeGenerator::VisitProperty(Property* node) { |
3524 #ifdef DEBUG | 3532 #ifdef DEBUG |
3525 int original_height = frame_->height(); | 3533 int original_height = frame_->height(); |
3526 #endif | 3534 #endif |
3527 VirtualFrame::SpilledScope spilled_scope(frame_); | |
3528 Comment cmnt(masm_, "[ Property"); | 3535 Comment cmnt(masm_, "[ Property"); |
3529 | 3536 |
3530 { Reference property(this, node); | 3537 { Reference property(this, node); |
3531 property.GetValue(); | 3538 property.GetValue(); |
3532 } | 3539 } |
3533 ASSERT(frame_->height() == original_height + 1); | 3540 ASSERT(frame_->height() == original_height + 1); |
3534 } | 3541 } |
3535 | 3542 |
3536 | 3543 |
3537 void CodeGenerator::VisitCall(Call* node) { | 3544 void CodeGenerator::VisitCall(Call* node) { |
(...skipping 1701 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5239 }; | 5246 }; |
5240 | 5247 |
5241 | 5248 |
5242 void DeferredReferenceGetNamedValue::BeforeGenerate() { | 5249 void DeferredReferenceGetNamedValue::BeforeGenerate() { |
5243 __ StartBlockConstPool(); | 5250 __ StartBlockConstPool(); |
5244 } | 5251 } |
5245 | 5252 |
5246 | 5253 |
5247 void DeferredReferenceGetNamedValue::Generate() { | 5254 void DeferredReferenceGetNamedValue::Generate() { |
5248 __ IncrementCounter(&Counters::named_load_inline_miss, 1, r1, r2); | 5255 __ IncrementCounter(&Counters::named_load_inline_miss, 1, r1, r2); |
5249 // Setup the name register and call load IC. | 5256 // Setup the registers and call load IC. |
5257 // On entry to this deferred code, r0 is assumed to already contain the | |
5258 // receiver from the top of the stack. | |
5250 __ mov(r2, Operand(name_)); | 5259 __ mov(r2, Operand(name_)); |
5251 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 5260 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
5252 __ Call(ic, RelocInfo::CODE_TARGET); | 5261 __ Call(ic, RelocInfo::CODE_TARGET); |
5253 // The call must be followed by a nop(1) instruction to indicate that the | 5262 // The call must be followed by a nop(1) instruction to indicate that the |
5254 // inobject has been inlined. | 5263 // inobject has been inlined. |
5255 __ nop(NAMED_PROPERTY_LOAD_INLINED); | 5264 __ nop(NAMED_PROPERTY_LOAD_INLINED); |
5256 } | 5265 } |
5257 | 5266 |
5258 | 5267 |
5259 void DeferredReferenceGetNamedValue::AfterGenerate() { | 5268 void DeferredReferenceGetNamedValue::AfterGenerate() { |
5260 __ EndBlockConstPool(); | 5269 __ EndBlockConstPool(); |
5261 } | 5270 } |
5262 | 5271 |
5263 | 5272 |
5264 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { | 5273 void CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { |
5265 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { | 5274 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { |
5266 Comment cmnt(masm(), "[ Load from named Property"); | 5275 Comment cmnt(masm(), "[ Load from named Property"); |
5267 // Setup the name register and call load IC. | 5276 // Setup the name register and call load IC. |
5277 frame_->SpillAllButCopyTOSToR0(); | |
5268 __ mov(r2, Operand(name)); | 5278 __ mov(r2, Operand(name)); |
5269 frame_->CallLoadIC(is_contextual | 5279 frame_->CallLoadIC(is_contextual |
5270 ? RelocInfo::CODE_TARGET_CONTEXT | 5280 ? RelocInfo::CODE_TARGET_CONTEXT |
5271 : RelocInfo::CODE_TARGET); | 5281 : RelocInfo::CODE_TARGET); |
5272 } else { | 5282 } else { |
5273 // Inline the inobject property case. | 5283 // Inline the inobject property case. |
5274 Comment cmnt(masm(), "[ Inlined named property load"); | 5284 Comment cmnt(masm(), "[ Inlined named property load"); |
5275 | 5285 |
5276 DeferredReferenceGetNamedValue* deferred = | |
5277 new DeferredReferenceGetNamedValue(name); | |
5278 | |
5279 // The following instructions are the inlined load of an in-object property. | 5286 // The following instructions are the inlined load of an in-object property. |
5280 // Parts of this code is patched, so the exact instructions generated needs | 5287 // Parts of this code is patched, so the exact instructions generated needs |
5281 // to be fixed. Therefore the instruction pool is blocked when generating | 5288 // to be fixed. Therefore the instruction pool is blocked when generating |
5282 // this code | 5289 // this code |
5290 | |
5291 // Load the receiver from the stack. | |
5292 frame_->SpillAllButCopyTOSToR0(); | |
5293 | |
5294 DeferredReferenceGetNamedValue* deferred = | |
5295 new DeferredReferenceGetNamedValue(name); | |
5296 | |
5283 #ifdef DEBUG | 5297 #ifdef DEBUG |
5284 int kInlinedNamedLoadInstructions = 8; | 5298 int kInlinedNamedLoadInstructions = 7; |
5285 Label check_inlined_codesize; | 5299 Label check_inlined_codesize; |
5286 masm_->bind(&check_inlined_codesize); | 5300 masm_->bind(&check_inlined_codesize); |
5287 #endif | 5301 #endif |
5302 | |
5288 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 5303 { Assembler::BlockConstPoolScope block_const_pool(masm_); |
5289 // Load the receiver from the stack. | |
5290 __ ldr(r1, MemOperand(sp, 0)); | |
5291 | |
5292 // Check that the receiver is a heap object. | 5304 // Check that the receiver is a heap object. |
5293 __ tst(r1, Operand(kSmiTagMask)); | 5305 __ tst(r0, Operand(kSmiTagMask)); |
5294 deferred->Branch(eq); | 5306 deferred->Branch(eq); |
5295 | 5307 |
5296 // Check the map. The null map used below is patched by the inline cache | 5308 // Check the map. The null map used below is patched by the inline cache |
5297 // code. | 5309 // code. |
5298 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | 5310 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
5299 __ mov(r3, Operand(Factory::null_value())); | 5311 __ mov(r3, Operand(Factory::null_value())); |
5300 __ cmp(r2, r3); | 5312 __ cmp(r2, r3); |
5301 deferred->Branch(ne); | 5313 deferred->Branch(ne); |
5302 | 5314 |
5303 // Use initially use an invalid index. The index will be patched by the | 5315 // Use initially use an invalid index. The index will be patched by the |
5304 // inline cache code. | 5316 // inline cache code. |
5305 __ ldr(r0, MemOperand(r1, 0)); | 5317 __ ldr(r0, MemOperand(r0, 0)); |
5306 } | 5318 } |
5307 | 5319 |
5308 // Make sure that the expected number of instructions are generated. | 5320 // Make sure that the expected number of instructions are generated. |
5309 ASSERT_EQ(kInlinedNamedLoadInstructions, | 5321 ASSERT_EQ(kInlinedNamedLoadInstructions, |
5310 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); | 5322 masm_->InstructionsGeneratedSince(&check_inlined_codesize)); |
5311 | 5323 |
5312 __ IncrementCounter(&Counters::named_load_inline, 1, r1, r2); | 5324 __ IncrementCounter(&Counters::named_load_inline, 1, r3, r4); |
Søren Thygesen Gjesse
2010/04/27 06:55:40
Wouldn't it be better to use frame_->scratch0() an
| |
5313 deferred->BindExit(); | 5325 deferred->BindExit(); |
5314 } | 5326 } |
5315 } | 5327 } |
5316 | 5328 |
5317 | 5329 |
5318 void CodeGenerator::EmitKeyedLoad(bool is_global) { | 5330 void CodeGenerator::EmitKeyedLoad(bool is_global) { |
5319 Comment cmnt(masm_, "[ Load from keyed Property"); | 5331 Comment cmnt(masm_, "[ Load from keyed Property"); |
5320 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 5332 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
5321 RelocInfo::Mode rmode = is_global | 5333 RelocInfo::Mode rmode = is_global |
5322 ? RelocInfo::CODE_TARGET_CONTEXT | 5334 ? RelocInfo::CODE_TARGET_CONTEXT |
(...skipping 4022 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
9345 | 9357 |
9346 // Just jump to runtime to add the two strings. | 9358 // Just jump to runtime to add the two strings. |
9347 __ bind(&string_add_runtime); | 9359 __ bind(&string_add_runtime); |
9348 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 9360 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
9349 } | 9361 } |
9350 | 9362 |
9351 | 9363 |
9352 #undef __ | 9364 #undef __ |
9353 | 9365 |
9354 } } // namespace v8::internal | 9366 } } // namespace v8::internal |
OLD | NEW |